Compare commits
73 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 27f2d265de | |||
| af3e15e922 | |||
| b44624f2e7 | |||
| 847e679e92 | |||
| ddf5023ecb | |||
| e1d28bc10a | |||
| 2f3d67f9e3 | |||
| 6304953234 | |||
| 8d84620bc4 | |||
| efd6f04e63 | |||
| 97ce9db28e | |||
| 362b4c106e | |||
| 3efe385952 | |||
| f6886f172d | |||
| 81d6273346 | |||
| 7e6cf5f046 | |||
| 89cf7dca04 | |||
| 9639a64437 | |||
| 48305ebb6a | |||
| 485c0a3855 | |||
| adc828d9bb | |||
| fff1d39338 | |||
| 5afbe6ccbc | |||
| 9de17a428d | |||
| c9985102c3 | |||
| 73f98c1c3f | |||
| ae93e6f146 | |||
| 2abaeee500 | |||
| 0538ba2586 | |||
| a451779724 | |||
| cd3246d659 | |||
| d37ffd7177 | |||
| a69b613087 | |||
| 1ea186d233 | |||
| f5e7d43cf3 | |||
| d80faa044a | |||
| 64062e5c43 | |||
| bd22844280 | |||
| 366c4a0bc2 | |||
| 0d3b10bd00 | |||
| a41e3d5d2c | |||
| c45cff89de | |||
| 7bb43ad478 | |||
| 8dcaf1c631 | |||
| 422761806d | |||
| 31360240a9 | |||
| e338ee584f | |||
| 31d2e18830 | |||
| a162ddabbb | |||
| 5dfa1d72aa | |||
| 7074a19a7f | |||
| 5774fb4da2 | |||
| be45ce765d | |||
| 2a250b8823 | |||
| 9a436cb4be | |||
| 86782c39dd | |||
| fba3e9d2b0 | |||
| cc37f70185 | |||
| dbc1a1ba18 | |||
| ff57f8a322 | |||
| 968e67330d | |||
| 935ee20e83 | |||
| c205180991 | |||
| 4a53bc4abc | |||
| a86fb3bb8e | |||
| b187000ae4 | |||
| c715adfd6c | |||
| 7b9ebfdacb | |||
| 05b170cbac | |||
| b320af0b61 | |||
| 49e1ee1f39 | |||
| cef31cf1ff | |||
| 74ecdde1ac |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -17,3 +17,5 @@ dist/
|
|||||||
dist_*/
|
dist_*/
|
||||||
|
|
||||||
#------# custom
|
#------# custom
|
||||||
|
.serena
|
||||||
|
test-output.json
|
||||||
|
|||||||
@@ -19,4 +19,8 @@ node_modules/
|
|||||||
dist/
|
dist/
|
||||||
dist_*/
|
dist_*/
|
||||||
|
|
||||||
|
# AI
|
||||||
|
.claude/
|
||||||
|
.serena/
|
||||||
|
|
||||||
#------# custom
|
#------# custom
|
||||||
9
assets/templates/multienv/deno.json
Normal file
9
assets/templates/multienv/deno.json
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"experimentalDecorators": true,
|
||||||
|
"lib": ["ES2022", "DOM"],
|
||||||
|
"target": "ES2022",
|
||||||
|
"checkJs": true
|
||||||
|
},
|
||||||
|
"nodeModulesDir": true
|
||||||
|
}
|
||||||
@@ -17,12 +17,10 @@ fileName: package.json
|
|||||||
"buildDocs": "(tsdoc)"
|
"buildDocs": "(tsdoc)"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.1.25",
|
"@git.zone/tsbuild": "^3.1.2",
|
||||||
"@git.zone/tsbundle": "^2.0.5",
|
"@git.zone/tsrun": "^2.0.0",
|
||||||
"@git.zone/tsrun": "^1.2.46",
|
"@git.zone/tstest": "^3.1.3",
|
||||||
"@git.zone/tstest": "^1.0.44",
|
"@types/node": "^24.10.1"
|
||||||
"@push.rocks/tapbundle": "^5.0.15",
|
|
||||||
"@types/node": "^20.8.7"
|
|
||||||
},
|
},
|
||||||
"dependencies": {}
|
"dependencies": {}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
import * as {{module.name}} from '../ts/index.js'
|
import * as {{module.name}} from '../ts/index.js'
|
||||||
|
|
||||||
tap.test('first test', async () => {
|
tap.test('first test', async () => {
|
||||||
console.log({{module.name}})
|
console.log({{module.name}})
|
||||||
})
|
})
|
||||||
|
|
||||||
tap.start()
|
export default tap.start()
|
||||||
|
|||||||
@@ -17,18 +17,18 @@ fileName: package.json
|
|||||||
"build": "(tsbuild --web --allowimplicitany)"
|
"build": "(tsbuild --web --allowimplicitany)"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.1.17",
|
"@git.zone/tsbuild": "^3.1.2",
|
||||||
"@git.zone/tsrun": "^1.2.8",
|
"@git.zone/tsrun": "^2.0.0",
|
||||||
"@git.zone/tstest": "^1.0.28",
|
"@git.zone/tstest": "^3.1.3",
|
||||||
"@git.zone/tswatch": "^2.0.1",
|
"@git.zone/tswatch": "^2.0.1",
|
||||||
"@push.rocks/tapbundle": "^5.5.4"
|
"@types/node": "^24.10.1"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@api.global/typedserver": "^3.0.53",
|
"@api.global/typedserver": "^3.0.53",
|
||||||
"@push.rocks/projectinfo": "^5.0.1",
|
"@push.rocks/projectinfo": "^5.0.2",
|
||||||
"@push.rocks/qenv": "^6.1.0",
|
"@push.rocks/qenv": "^6.1.0",
|
||||||
"@push.rocks/smartdata": "^5.0.7",
|
"@push.rocks/smartdata": "^5.0.7",
|
||||||
"@push.rocks/smartpath": "^5.0.5",
|
"@push.rocks/smartpath": "^6.0.0",
|
||||||
"@push.rocks/smartstate": "^2.0.0"
|
"@push.rocks/smartstate": "^2.0.0"
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,5 @@
|
|||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"experimentalDecorators": true,
|
|
||||||
"emitDecoratorMetadata": true,
|
|
||||||
"useDefineForClassFields": false,
|
|
||||||
"target": "ES2022",
|
"target": "ES2022",
|
||||||
"module": "NodeNext",
|
"module": "NodeNext",
|
||||||
"moduleResolution": "NodeNext",
|
"moduleResolution": "NodeNext",
|
||||||
|
|||||||
348
changelog.md
348
changelog.md
@@ -1,6 +1,285 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 2025-12-02 - 2.2.0 - feat(services)
|
||||||
|
Improve services manager and configuration; switch test templates to @git.zone/tstest; bump dev dependencies and update docs
|
||||||
|
|
||||||
|
- services: Add robust ServiceConfiguration (creates .nogit/env.json with sane defaults, syncs ports from existing Docker containers, validates and can reconfigure ports)
|
||||||
|
- services CLI: improved start/stop/restart flows, better logging/help output and enhanced global commands (list/status/stop/cleanup)
|
||||||
|
- templates/tests: replace @push.rocks/tapbundle with @git.zone/tstest and update template test.ts to export default tap.start()
|
||||||
|
- format: stop auto-updating tslint template and mark @push.rocks/tapbundle as deprecated in package formatting logic
|
||||||
|
- dependencies: bump @git.zone/tsbuild, @git.zone/tsrun, @git.zone/tstest, @git.zone/tsdoc, @push.rocks/projectinfo, @push.rocks/smartpath, @push.rocks/smartfs, prettier and other dev deps
|
||||||
|
- docs: README updates — add issue reporting/security section, AI-powered commit recommendation notes, and clarify trademark/legal wording
|
||||||
|
|
||||||
|
## 2025-11-29 - 2.1.0 - feat(mod_services)
|
||||||
|
Add global service registry and global commands for managing project containers
|
||||||
|
|
||||||
|
- Introduce GlobalRegistry class to track registered projects, their containers, ports and last activity (ts/mod_services/classes.globalregistry.ts)
|
||||||
|
- Add global CLI mode for services (use -g/--global) with commands: list, status, stop, cleanup (ts/mod_services/index.ts)
|
||||||
|
- ServiceManager now registers the current project with the global registry when starting services and unregisters when all containers are removed (ts/mod_services/classes.servicemanager.ts)
|
||||||
|
- Global handlers to list projects, show aggregated status, stop containers across projects and cleanup stale entries
|
||||||
|
- Bump dependency @push.rocks/smartfile to ^13.1.0 in package.json
|
||||||
|
|
||||||
|
## 2025-11-27 - 2.0.0 - BREAKING CHANGE(core)
|
||||||
|
Migrate filesystem to smartfs (async) and add Elasticsearch service support; refactor format/commit/meta modules
|
||||||
|
|
||||||
|
- Replace @push.rocks/smartfile usage with @push.rocks/smartfs across the codebase; all filesystem operations are now async (SmartFs.file(...).read()/write(), SmartFs.directory(...).list()/create()/delete(), etc.)
|
||||||
|
- Convert formerly synchronous helpers and APIs to async (notable: detectProjectType, getProjectName, readCurrentVersion and related version bumping logic). Callers updated accordingly.
|
||||||
|
- Add Elasticsearch support to services: new config fields (ELASTICSEARCH_*), Docker run/start/stop/logs/status handling, and ELASTICSEARCH_URL in service configuration.
|
||||||
|
- Refactor formatting subsystem: cache and rollback/backup systems removed/disabled for stability, format planner execution simplified (sequential), diff/stats reporting updated to use smartfs.
|
||||||
|
- Update package.json dependencies: bump @git.zone/tsbuild, tsrun, tstest; upgrade @push.rocks/smartfile to v13 and add @push.rocks/smartfs dependency; update @types/node.
|
||||||
|
- Update commit flow and changelog generation to use smartfs for reading/writing files and to await version/branch detection where necessary.
|
||||||
|
- Expose a SmartFs instance via plugins and adjust all mod.* plugin files to import/use smartfs where required.
|
||||||
|
- Breaking change: Public and internal APIs that previously used synchronous smartfile APIs are now asynchronous. Consumers and scripts must await these functions and use the new smartfs API.
|
||||||
|
|
||||||
|
## 2025-11-17 - 1.21.5 - fix(tsconfig)
|
||||||
|
Remove emitDecoratorMetadata from tsconfig template
|
||||||
|
|
||||||
|
- Removed the "emitDecoratorMetadata" compiler option from assets/templates/tsconfig_update/tsconfig.json
|
||||||
|
- This updates the tsconfig template to avoid emitting decorator metadata when targeting ES2022
|
||||||
|
|
||||||
|
## 2025-11-17 - 1.21.4 - fix(tsconfig template)
|
||||||
|
Remove experimentalDecorators and useDefineForClassFields from tsconfig template
|
||||||
|
|
||||||
|
- Removed experimentalDecorators option from assets/templates/tsconfig_update/tsconfig.json
|
||||||
|
- Removed useDefineForClassFields option from assets/templates/tsconfig_update/tsconfig.json
|
||||||
|
|
||||||
|
## 2025-11-17 - 1.21.3 - fix(assets/templates/multienv)
|
||||||
|
Remove unused Bun configuration template (assets/templates/multienv/bunfig.toml)
|
||||||
|
|
||||||
|
- Deleted assets/templates/multienv/bunfig.toml which previously provided Bun TypeScript decorator configuration
|
||||||
|
- Cleans up stale/unused template to avoid shipping obsolete Bun config
|
||||||
|
- No functional code changes; removes an unused asset file
|
||||||
|
|
||||||
|
## 2025-11-17 - 1.21.2 - fix(templates/multienv)
|
||||||
|
Disable useDefineForClassFields in multienv TypeScript configs to ensure decorator compatibility
|
||||||
|
|
||||||
|
- Set useDefineForClassFields = false in assets/templates/multienv/bunfig.toml to keep Bun's transpiler compatible with decorator usage
|
||||||
|
- Set "useDefineForClassFields": false in assets/templates/multienv/deno.json to ensure Deno/TypeScript compiler emits class fields compatible with decorators
|
||||||
|
|
||||||
|
## 2025-11-17 - 1.21.1 - fix(templates.multienv)
|
||||||
|
Enable checkJs in multienv Deno template to enable JS type checking
|
||||||
|
|
||||||
|
- Added "checkJs": true to compilerOptions in assets/templates/multienv/deno.json to enable JavaScript type checking for the Deno multienv template
|
||||||
|
|
||||||
|
## 2025-11-17 - 1.21.0 - feat(multienv)
|
||||||
|
Add multi-env templates enabling TypeScript decorators for Bun and Deno; rename npmextra config key to szci
|
||||||
|
|
||||||
|
- Added assets/templates/multienv/bunfig.toml to enable Bun TypeScript transpiler experimentalDecorators
|
||||||
|
- Added assets/templates/multienv/deno.json with experimentalDecorators, lib and target set for ES2022
|
||||||
|
- Updated npmextra.json: renamed top-level config key from "npmci" to "szci" (keeps npmGlobalTools, npmAccessLevel and npmRegistryUrl unchanged)
|
||||||
|
|
||||||
|
## 2025-11-06 - 1.20.0 - feat(commit)
|
||||||
|
Add non-interactive --yes (-y) flag to commit command to auto-accept AI recommendations and optionally push with -p
|
||||||
|
|
||||||
|
- Add -y / --yes flag to gitzone commit to auto-accept AI-generated commit recommendations without interactive prompts
|
||||||
|
- Support -yp or -y -p combinations to auto-accept and push to origin; -p / --push remains the separate control for pushing
|
||||||
|
- Implementation creates a smartinteract AnswerBucket programmatically when -y is used and populates commitType, commitScope, commitDescription and pushToOrigin
|
||||||
|
- Preserves existing UI output and interactive flow when -y is not used; fully backward compatible and CI/CD friendly
|
||||||
|
- Updated CLI usage and documentation (readme.hints.md) to document the new flags
|
||||||
|
|
||||||
|
## 2025-11-05 - 1.19.9 - fix(mod_commit)
|
||||||
|
Refactor version bumping to a unified implementation for npm and Deno; remove npm-exec based helpers and add file-based version readers/updaters to avoid npm warning pollution
|
||||||
|
|
||||||
|
- Removed legacy npm/deno-specific helpers (bumpNpmVersion, syncVersionToDenoJson, bumpDenoVersion) that relied on executing npm and caused warning pollution
|
||||||
|
- Added readCurrentVersion() to read version from package.json or deno.json
|
||||||
|
- Added updateVersionFile() helper to write version directly into JSON files
|
||||||
|
- Added unified bumpProjectVersion() that handles npm, deno and both with a single code path; reuses calculateNewVersion()
|
||||||
|
- Stages updated files, commits v<newVersion> and creates a tag v<newVersion>
|
||||||
|
- Benefits: no npm warning pollution in deno.json, simpler git history, consistent behavior across project types
|
||||||
|
|
||||||
|
## 2025-11-04 - 1.19.8 - fix(package.json)
|
||||||
|
Bump @git.zone/tsdoc dependency to ^1.9.2
|
||||||
|
|
||||||
|
- Updated dependency @git.zone/tsdoc from ^1.9.1 to ^1.9.2 in package.json
|
||||||
|
|
||||||
|
## 2025-11-04 - 1.19.7 - fix(dependencies)
|
||||||
|
Bump @git.zone/tsdoc to ^1.9.1
|
||||||
|
|
||||||
|
- Updated package.json dependency @git.zone/tsdoc from ^1.9.0 to ^1.9.1
|
||||||
|
|
||||||
|
## 2025-11-04 - 1.19.6 - fix(cli)
|
||||||
|
Bump @git.zone/tsdoc dependency to ^1.9.0
|
||||||
|
|
||||||
|
- Updated dependency @git.zone/tsdoc from ^1.8.3 to ^1.9.0 in package.json
|
||||||
|
|
||||||
|
## 2025-11-04 - 1.19.5 - fix(cli)
|
||||||
|
Bump @git.zone/tsdoc to ^1.8.3 and add local .claude settings for allowed permissions
|
||||||
|
|
||||||
|
- Updated dependency @git.zone/tsdoc from ^1.8.2 to ^1.8.3
|
||||||
|
- Added .claude/settings.local.json to declare allowed permissions for local tooling (Bash commands, Docker, npm, WebFetch and MCP actions)
|
||||||
|
|
||||||
|
## 2025-11-03 - 1.19.3 - fix(tsdoc)
|
||||||
|
Bump @git.zone/tsdoc to ^1.8.0 and add .claude local settings
|
||||||
|
|
||||||
|
- Upgrade dependency @git.zone/tsdoc from ^1.6.1 to ^1.8.0 in package.json
|
||||||
|
- Add .claude/settings.local.json for local assistant permissions/configuration
|
||||||
|
|
||||||
|
## 2025-11-03 - 1.19.2 - fix(tsdoc)
|
||||||
|
Bump @git.zone/tsdoc to ^1.6.1 and add .claude/settings.local.json
|
||||||
|
|
||||||
|
- Update dependency @git.zone/tsdoc from ^1.6.0 to ^1.6.1
|
||||||
|
- Add .claude/settings.local.json to include local Claude settings/permissions
|
||||||
|
|
||||||
|
## 2025-11-02 - 1.19.1 - fix(dependencies)
|
||||||
|
Bump dependencies and add local Claude settings
|
||||||
|
|
||||||
|
- Bump devDependencies: @git.zone/tsbuild -> ^2.7.1, @git.zone/tsrun -> ^1.6.2, @git.zone/tstest -> ^2.7.0
|
||||||
|
- Upgrade runtime dependencies: @git.zone/tsdoc -> ^1.6.0; update @push.rocks packages (smartcli ^4.0.19, smartjson ^5.2.0, smartlog ^3.1.10, smartnetwork ^4.4.0, etc.)
|
||||||
|
- Add .claude/settings.local.json (local project permissions/settings file)
|
||||||
|
|
||||||
|
## 2025-10-23 - 1.19.0 - feat(mod_commit)
|
||||||
|
Add CLI UI helpers and improve commit workflow with progress, recommendations and summary
|
||||||
|
|
||||||
|
- Introduce ts/mod_commit/mod.ui.ts: reusable CLI UI helpers (pretty headers, sections, AI recommendation box, step printer, commit summary and helpers for consistent messaging).
|
||||||
|
- Refactor ts/mod_commit/index.ts: use new UI functions to display AI recommendations, show step-by-step progress for baking commit info, generating changelog, staging, committing, bumping version and optional push; include commit SHA in final summary.
|
||||||
|
- Enhance ts/mod_commit/mod.helpers.ts: bumpProjectVersion now accepts currentStep/totalSteps to report progress and returns a consistent newVersion after handling npm/deno/both cases.
|
||||||
|
- Add .claude/settings.local.json: local permissions configuration for development tooling.
|
||||||
|
|
||||||
|
## 2025-10-23 - 1.18.9 - fix(mod_commit)
|
||||||
|
Stage and commit deno.json when bumping/syncing versions and create/update git tags
|
||||||
|
|
||||||
|
- bumpDenoVersion now creates a Smartshell instance and runs git add deno.json, git commit -m "v<newVersion>", and git tag v<newVersion> to persist the version bump
|
||||||
|
- syncVersionToDenoJson now stages deno.json, amends the npm version commit with --no-edit, and recreates the tag with -fa to keep package.json and deno.json in sync
|
||||||
|
- Added informative logger messages after creating commits and tags
|
||||||
|
|
||||||
|
## 2025-10-23 - 1.18.8 - fix(mod_commit)
|
||||||
|
Improve commit workflow: detect project type and current branch; add robust version bump helpers for npm/deno
|
||||||
|
|
||||||
|
- Add mod_commit/mod.helpers.ts with utilities: detectCurrentBranch(), detectProjectType(), bumpProjectVersion(), bumpDenoVersion(), bumpNpmVersion(), syncVersionToDenoJson(), and calculateNewVersion()
|
||||||
|
- Refactor ts/mod_commit/index.ts to use the new helpers: bumpProjectVersion(projectType, ... ) instead of a hard npm version call and push the actual current branch instead of hardcoding 'master'
|
||||||
|
- Support bumping versions for npm-only, deno-only, and hybrid (both) projects and synchronize versions from package.json to deno.json when applicable
|
||||||
|
- Improve branch detection with a fallback to 'master' and informative logging on detection failures
|
||||||
|
- Add local Claude settings file (.claude/settings.local.json) (editor/CI config) — no code behavior change but included in diff
|
||||||
|
|
||||||
|
## 2025-09-07 - 1.18.7 - fix(claude)
|
||||||
|
Add .claude local settings to whitelist dev tool permissions
|
||||||
|
|
||||||
|
- Add .claude/settings.local.json to configure allowed permissions for local AI/tooling helpers (Bash commands, WebFetch, and mcp_serena actions).
|
||||||
|
- Disable enableAllProjectMcpServers (set to false) to limit automatic project MCP server usage.
|
||||||
|
|
||||||
|
## 2025-09-07 - 1.18.6 - fix(deps)
|
||||||
|
Bump dependency versions and add local Claude settings
|
||||||
|
|
||||||
|
- Updated devDependencies: @git.zone/tsbuild ^2.6.4 → ^2.6.8, @git.zone/tstest ^2.3.4 → ^2.3.6, @push.rocks/smartfile ^11.2.5 → ^11.2.7
|
||||||
|
- Updated dependencies: @git.zone/tsdoc ^1.5.1 → ^1.5.2, @git.zone/tspublish ^1.10.1 → ^1.10.3, @push.rocks/smartlog ^3.1.8 → ^3.1.9, @push.rocks/smartnpm ^2.0.4 → ^2.0.6, @push.rocks/smartscaf ^4.0.17 → ^4.0.19
|
||||||
|
- Added .claude/settings.local.json to configure local Claude permissions/settings
|
||||||
|
|
||||||
|
## 2025-08-17 - 1.18.5 - fix(dependencies)
|
||||||
|
Bump smartshell and smartscaf versions; add .claude local settings
|
||||||
|
|
||||||
|
- Update @push.rocks/smartshell from ^3.2.4 to ^3.3.0 in package.json
|
||||||
|
- Update @push.rocks/smartscaf from ^4.0.16 to ^4.0.17 in package.json
|
||||||
|
- Add .claude/settings.local.json for local assistant permissions/configuration
|
||||||
|
|
||||||
|
## 2025-08-17 - 1.18.4 - fix(cli)
|
||||||
|
Update dependencies, add local Claude settings, and update gitignore template
|
||||||
|
|
||||||
|
- Bump several dependencies: @git.zone/tsbuild -> ^2.6.4, @git.zone/tspublish -> ^1.10.1, @git.zone/tstest -> ^2.3.4, @push.rocks/smartfile -> ^11.2.5, @push.rocks/npmextra -> ^5.3.3, @push.rocks/smartchok -> ^1.1.1, @push.rocks/smartlog -> ^3.1.8, @push.rocks/smartpath -> ^6.0.0, prettier -> ^3.6.2
|
||||||
|
- Add .claude/settings.local.json with local permissions configuration for AI tooling
|
||||||
|
- Update assets/templates/gitignore to ignore .claude/ and .serena/ directories
|
||||||
|
- Add pnpm onlyBuiltDependencies entries: esbuild and mongodb-memory-server
|
||||||
|
|
||||||
|
## 2025-08-16 - 1.18.3 - fix(services)
|
||||||
|
Simplify S3 endpoint handling in ServiceConfiguration to store host only
|
||||||
|
|
||||||
|
- S3_ENDPOINT now stores the raw host (e.g. 'localhost') instead of a full URL with protocol and port.
|
||||||
|
- Default .nogit/env.json creation uses the host-only S3_ENDPOINT.
|
||||||
|
- Sync/update logic (when syncing with Docker or reconfiguring ports) sets S3_ENDPOINT to the host only.
|
||||||
|
- Consumers that previously relied on S3_ENDPOINT containing protocol and port should now construct the full endpoint URL using S3_USESSL, S3_HOST and S3_PORT.
|
||||||
|
|
||||||
|
## 2025-08-16 - 1.18.1 - fix(services)
|
||||||
|
Improve services and commit flow: stop AiDoc, use silent docker inspect, sync ports with logging, fix config loading, and bump deps
|
||||||
|
|
||||||
|
- Ensure AiDoc is stopped after building commit recommendation to avoid resource leaks
|
||||||
|
- Use execSilent for `docker inspect` in DockerContainer to avoid shell noise and improve JSON parsing
|
||||||
|
- Sync Docker-exposed ports into service configuration with explicit notes (logs) when MongoDB / S3 ports are updated
|
||||||
|
- Fix synchronous config loading by removing an unnecessary await in ServiceConfiguration.loadConfig
|
||||||
|
- Bump dependencies: @push.rocks/smartshell -> ^3.2.4, @git.zone/tsdoc -> ^1.5.1
|
||||||
|
- Add pnpm.onlyBuiltDependencies for puppeteer and sharp to package.json
|
||||||
|
- Add local Claude settings file (.claude/settings.local.json) with development permissions
|
||||||
|
|
||||||
|
## 2025-08-16 - 1.18.0 - feat(services)
|
||||||
|
Add Docker port mapping sync and reconfigure workflow for local services
|
||||||
|
|
||||||
|
- Add getPortMappings to DockerContainer to extract port bindings from docker inspect output
|
||||||
|
- Sync existing container port mappings into .nogit/env.json when loading/creating service configuration
|
||||||
|
- Validate and automatically update ports only when containers are not present; preserve container ports when containers exist
|
||||||
|
- Recreate containers automatically if detected container port mappings differ from configuration (MongoDB and MinIO)
|
||||||
|
- Add reconfigure method and new CLI command to reassign ports and optionally restart services
|
||||||
|
- Improve status output to show configured ports and port availability information
|
||||||
|
- Minor helpers and imports updated (DockerContainer injected into ServiceConfiguration)
|
||||||
|
- Add .claude/settings.local.json (local permissions config) to repository
|
||||||
|
|
||||||
|
## 2025-08-15 - 1.17.5 - fix(services)
|
||||||
|
Update S3 credentials naming and add S3_ENDPOINT/S3_USESSL support for improved MinIO integration
|
||||||
|
|
||||||
|
- Replaced S3_USER/S3_PASS with S3_ACCESSKEY/S3_SECRETKEY in ServiceConfiguration
|
||||||
|
- Added S3_ENDPOINT field with automatic protocol selection based on S3_USESSL
|
||||||
|
- Introduced S3_USESSL boolean field for SSL/TLS configuration
|
||||||
|
- Updated ServiceManager logging to display new S3_USESSL configuration
|
||||||
|
- Added .claude/settings.local.json for local permission settings
|
||||||
|
|
||||||
|
## 2025-08-15 - 1.17.4 - fix(services)
|
||||||
|
Update S3 credentials naming and add S3_ENDPOINT/S3_USESSL support for improved MinIO integration
|
||||||
|
|
||||||
|
- Replaced S3_USER/S3_PASS with S3_ACCESSKEY/S3_SECRETKEY in ServiceConfiguration
|
||||||
|
- Added S3_ENDPOINT field with automatic protocol selection based on S3_USESSL
|
||||||
|
- Added S3_USESSL boolean field for SSL/TLS configuration support
|
||||||
|
- Updated ServiceManager to use new credential names in container setup and logging
|
||||||
|
|
||||||
|
## 2025-08-15 - 1.17.3 - fix(serviceconfig)
|
||||||
|
Update service configuration to include dynamic MongoDB connection string and add local permissions settings
|
||||||
|
|
||||||
|
- Added .claude/settings.local.json for local permissions configuration
|
||||||
|
- Updated ServiceConfiguration to compute and update MONGODB_URL based on current config values
|
||||||
|
|
||||||
|
## 2025-08-15 - 1.17.2 - fix(ci-test-services)
|
||||||
|
Update CI/CD configurations, test settings, and Docker service for MongoDB.
|
||||||
|
|
||||||
|
- Add .claude/settings.local.json with updated permission settings
|
||||||
|
- Introduce new GitLab CI, VSCode launch and settings, and updated test configuration files (.gitignore, .npmrc, npmextra.json, package.json, qenv.yml, readme.md)
|
||||||
|
- Update test scripts in test/test and test/ts to improve project validation
|
||||||
|
- Fix MongoDB Docker container command by adding '--bind_ip_all' for proper network binding
|
||||||
|
|
||||||
|
## 2025-08-15 - 1.17.1 - fix(services)
|
||||||
|
Improve services module logging and enhance MongoDB Compass integration
|
||||||
|
|
||||||
|
- Refactored services module to use centralized logger from gitzone.logging.ts
|
||||||
|
- Automatically display MongoDB Compass connection string when starting services or checking status
|
||||||
|
- Removed custom printMessage wrapper in favor of standard logger.log() calls
|
||||||
|
- Consistent logging across all service commands
|
||||||
|
|
||||||
|
## 2025-08-14 - 1.17.0 - feat(services)
|
||||||
|
Add comprehensive development services management for MongoDB and MinIO containers
|
||||||
|
|
||||||
|
- Implemented `gitzone services` command for managing local development services
|
||||||
|
- Added MongoDB and MinIO (S3-compatible) container orchestration
|
||||||
|
- Smart port assignment (20000-30000 range) to avoid conflicts between projects
|
||||||
|
- Project-specific container names for complete isolation
|
||||||
|
- Data persistence in `.nogit/` directories
|
||||||
|
- MongoDB Compass connection string generation with network IP detection
|
||||||
|
- Auto-configuration via `.nogit/env.json` with secure defaults
|
||||||
|
- Commands: start, stop, restart, status, config, compass, logs, remove, clean
|
||||||
|
- Interactive confirmations for destructive operations
|
||||||
|
|
||||||
|
## 2025-08-08 - 1.16.10 - fix(format)
|
||||||
|
Improve concurrency control in caching and rollback modules, refine gitignore custom section handling, and enhance Prettier file processing.
|
||||||
|
|
||||||
|
- Added mutex locking in ChangeCache and RollbackManager to prevent race conditions during manifest updates
|
||||||
|
- Updated gitignore logic to detect and preserve custom sections
|
||||||
|
- Enhanced Prettier batching and file formatting for better performance
|
||||||
|
|
||||||
|
## 2025-08-08 - 1.16.9 - fix(format)
|
||||||
|
|
||||||
|
Improve concurrency control in cache and rollback modules, refine gitignore custom section handling, and enhance Prettier file processing
|
||||||
|
|
||||||
|
- Added mutex locking in ChangeCache and RollbackManager to prevent race conditions during manifest updates
|
||||||
|
- Updated gitignore logic to detect and preserve existing custom sections from various markers
|
||||||
|
- Simplified Prettier formatter to process files sequentially, skip files without extensions, and log detailed status
|
||||||
|
- Minor refactoring in base formatter and tsconfig file updates for improved reliability
|
||||||
|
|
||||||
## 2025-08-08 - 1.16.8 - fix(format)
|
## 2025-08-08 - 1.16.8 - fix(format)
|
||||||
|
|
||||||
Improve concurrency control in cache and rollback management with mutex locking and refine formatting details
|
Improve concurrency control in cache and rollback management with mutex locking and refine formatting details
|
||||||
|
|
||||||
- Added 'withMutex' functions in ChangeCache and RollbackManager to synchronize file I/O operations
|
- Added 'withMutex' functions in ChangeCache and RollbackManager to synchronize file I/O operations
|
||||||
@@ -8,6 +287,7 @@ Improve concurrency control in cache and rollback management with mutex locking
|
|||||||
- Fixed minor formatting issues in commit info and package.json
|
- Fixed minor formatting issues in commit info and package.json
|
||||||
|
|
||||||
## 2025-08-08 - 1.16.7 - fix(core)
|
## 2025-08-08 - 1.16.7 - fix(core)
|
||||||
|
|
||||||
Improve formatting, logging, and rollback integrity in core modules
|
Improve formatting, logging, and rollback integrity in core modules
|
||||||
|
|
||||||
- Add .claude/settings.local.json with defined permissions for allowed commands
|
- Add .claude/settings.local.json with defined permissions for allowed commands
|
||||||
@@ -16,6 +296,7 @@ Improve formatting, logging, and rollback integrity in core modules
|
|||||||
- Enhance logging messages and overall code clarity in CLI and commit modules
|
- Enhance logging messages and overall code clarity in CLI and commit modules
|
||||||
|
|
||||||
## 2025-08-08 - 1.16.6 - fix(changecache)
|
## 2025-08-08 - 1.16.6 - fix(changecache)
|
||||||
|
|
||||||
Improve cache manifest validation and atomic file writes; add local settings and overrides
|
Improve cache manifest validation and atomic file writes; add local settings and overrides
|
||||||
|
|
||||||
- Add manifest structure validation and default fallback in getManifest
|
- Add manifest structure validation and default fallback in getManifest
|
||||||
@@ -25,6 +306,7 @@ Improve cache manifest validation and atomic file writes; add local settings and
|
|||||||
- Add an empty assets/overrides.json file for future overrides
|
- Add an empty assets/overrides.json file for future overrides
|
||||||
|
|
||||||
## 2025-08-08 - 1.16.5 - fix(prettier)
|
## 2025-08-08 - 1.16.5 - fix(prettier)
|
||||||
|
|
||||||
Improve file selection in Prettier formatter, remove legacy package overrides, and update CI template indentation
|
Improve file selection in Prettier formatter, remove legacy package overrides, and update CI template indentation
|
||||||
|
|
||||||
- Added .claude/settings.local.json with updated permission settings for local commands
|
- Added .claude/settings.local.json with updated permission settings for local commands
|
||||||
@@ -33,12 +315,14 @@ Improve file selection in Prettier formatter, remove legacy package overrides, a
|
|||||||
- Refined Prettier formatter logic by defining include directories, root config files, and filtering duplicates instead of manual exclusion
|
- Refined Prettier formatter logic by defining include directories, root config files, and filtering duplicates instead of manual exclusion
|
||||||
|
|
||||||
## 2025-08-08 - 1.16.4 - fix(prettier)
|
## 2025-08-08 - 1.16.4 - fix(prettier)
|
||||||
|
|
||||||
Improve file exclusion in the Prettier formatter to skip unnecessary files and directories.
|
Improve file exclusion in the Prettier formatter to skip unnecessary files and directories.
|
||||||
|
|
||||||
- Added exclusion patterns for node_modules, .git, dist, .nogit, coverage, .nyc_output, vendor, bower_components, jspm_packages, and minified files.
|
- Added exclusion patterns for node_modules, .git, dist, .nogit, coverage, .nyc_output, vendor, bower_components, jspm_packages, and minified files.
|
||||||
- Optimized filtering logic to ensure only valid files are processed.
|
- Optimized filtering logic to ensure only valid files are processed.
|
||||||
|
|
||||||
## 2025-08-08 - 1.16.3 - fix(changecache/prettier)
|
## 2025-08-08 - 1.16.3 - fix(changecache/prettier)
|
||||||
|
|
||||||
Skip directories during file processing to prevent errors in changecache and prettier formatting
|
Skip directories during file processing to prevent errors in changecache and prettier formatting
|
||||||
|
|
||||||
- Removed unnecessary await on synchronous file reads in changecache
|
- Removed unnecessary await on synchronous file reads in changecache
|
||||||
@@ -46,18 +330,21 @@ Skip directories during file processing to prevent errors in changecache and pre
|
|||||||
- Filtered out directories in prettier formatter to avoid processing non-files
|
- Filtered out directories in prettier formatter to avoid processing non-files
|
||||||
|
|
||||||
## 2025-08-07 - 1.16.2 - fix(format)
|
## 2025-08-07 - 1.16.2 - fix(format)
|
||||||
|
|
||||||
Fix format command confirmation prompt to correctly check user response
|
Fix format command confirmation prompt to correctly check user response
|
||||||
|
|
||||||
- Fixed bug where format command always showed "cancelled" even when user confirmed
|
- Fixed bug where format command always showed "cancelled" even when user confirmed
|
||||||
- Changed response check from `response.proceed` to `response.value` for SmartInteract compatibility
|
- Changed response check from `response.proceed` to `response.value` for SmartInteract compatibility
|
||||||
|
|
||||||
## 2025-08-04 - 1.16.1 - fix(package/config)
|
## 2025-08-04 - 1.16.1 - fix(package/config)
|
||||||
|
|
||||||
Move smartdiff dependency to runtime and add local bash permissions settings
|
Move smartdiff dependency to runtime and add local bash permissions settings
|
||||||
|
|
||||||
- Moved '@push.rocks/smartdiff' from devDependencies to dependencies in package.json
|
- Moved '@push.rocks/smartdiff' from devDependencies to dependencies in package.json
|
||||||
- Added .claude/settings.local.json with allowed bash commands (grep, mkdir, find, ls)
|
- Added .claude/settings.local.json with allowed bash commands (grep, mkdir, find, ls)
|
||||||
|
|
||||||
## 2025-05-19 - 1.16.0 - feat(format)
|
## 2025-05-19 - 1.16.0 - feat(format)
|
||||||
|
|
||||||
Enhance format module with rollback, diff reporting, and improved parallel execution
|
Enhance format module with rollback, diff reporting, and improved parallel execution
|
||||||
|
|
||||||
- Implemented rollback functionality with backup management and automatic rollback on error
|
- Implemented rollback functionality with backup management and automatic rollback on error
|
||||||
@@ -68,12 +355,14 @@ Enhance format module with rollback, diff reporting, and improved parallel execu
|
|||||||
- Updated package.json to include new dependency '@push.rocks/smartdiff'
|
- Updated package.json to include new dependency '@push.rocks/smartdiff'
|
||||||
|
|
||||||
## 2025-05-14 - 1.15.5 - fix(dependencies)
|
## 2025-05-14 - 1.15.5 - fix(dependencies)
|
||||||
|
|
||||||
Update @git.zone/tsdoc to ^1.5.0 and @types/node to ^22.15.18
|
Update @git.zone/tsdoc to ^1.5.0 and @types/node to ^22.15.18
|
||||||
|
|
||||||
- Bumped @git.zone/tsdoc from ^1.4.5 to ^1.5.0
|
- Bumped @git.zone/tsdoc from ^1.4.5 to ^1.5.0
|
||||||
- Bumped @types/node from ^22.15.17 to ^22.15.18
|
- Bumped @types/node from ^22.15.17 to ^22.15.18
|
||||||
|
|
||||||
## 2025-05-13 - 1.15.4 - fix(package.json)
|
## 2025-05-13 - 1.15.4 - fix(package.json)
|
||||||
|
|
||||||
Update dependency versions: bump @git.zone/tsdoc, @push.rocks/lik, @push.rocks/smartlog, and @types/node to their latest releases
|
Update dependency versions: bump @git.zone/tsdoc, @push.rocks/lik, @push.rocks/smartlog, and @types/node to their latest releases
|
||||||
|
|
||||||
- Upgrade @git.zone/tsdoc from ^1.4.4 to ^1.4.5
|
- Upgrade @git.zone/tsdoc from ^1.4.4 to ^1.4.5
|
||||||
@@ -82,6 +371,7 @@ Update dependency versions: bump @git.zone/tsdoc, @push.rocks/lik, @push.rocks/s
|
|||||||
- Upgrade @types/node from ^22.14.1 to ^22.15.17
|
- Upgrade @types/node from ^22.14.1 to ^22.15.17
|
||||||
|
|
||||||
## 2025-04-15 - 1.15.3 - fix(deps)
|
## 2025-04-15 - 1.15.3 - fix(deps)
|
||||||
|
|
||||||
update dependency versions and improve website template variable handling
|
update dependency versions and improve website template variable handling
|
||||||
|
|
||||||
- Bumped @git.zone/tsbuild from ^2.2.1 to ^2.3.2 and @types/node to ^22.14.1
|
- Bumped @git.zone/tsbuild from ^2.2.1 to ^2.3.2 and @types/node to ^22.14.1
|
||||||
@@ -89,56 +379,65 @@ update dependency versions and improve website template variable handling
|
|||||||
- Refactored website template update to correctly supply variables with added logging
|
- Refactored website template update to correctly supply variables with added logging
|
||||||
|
|
||||||
## 2025-04-15 - 1.15.2 - fix(website_update)
|
## 2025-04-15 - 1.15.2 - fix(website_update)
|
||||||
|
|
||||||
Await supplyVariables call in website update template
|
Await supplyVariables call in website update template
|
||||||
|
|
||||||
- Changed website template update to properly await the supplyVariables method
|
- Changed website template update to properly await the supplyVariables method
|
||||||
- Ensured asynchronous consistency in updating website template variables
|
- Ensured asynchronous consistency in updating website template variables
|
||||||
|
|
||||||
## 2025-04-15 - 1.15.1 - fix(cli)
|
## 2025-04-15 - 1.15.1 - fix(cli)
|
||||||
|
|
||||||
Refresh internal CLI tooling and configuration for consistency.
|
Refresh internal CLI tooling and configuration for consistency.
|
||||||
|
|
||||||
|
|
||||||
## 2025-04-15 - 1.15.0 - feat(config/template)
|
## 2025-04-15 - 1.15.0 - feat(config/template)
|
||||||
|
|
||||||
Add assetbrokerUrl and legalUrl fields to module config and update website template to supply these values
|
Add assetbrokerUrl and legalUrl fields to module config and update website template to supply these values
|
||||||
|
|
||||||
- Added assetbrokerUrl and legalUrl properties in ts/classes.gitzoneconfig.ts
|
- Added assetbrokerUrl and legalUrl properties in ts/classes.gitzoneconfig.ts
|
||||||
- Updated ts/mod_format/format.templates.ts to pass assetbrokerUrl and legalUrl to website template
|
- Updated ts/mod_format/format.templates.ts to pass assetbrokerUrl and legalUrl to website template
|
||||||
|
|
||||||
## 2025-04-15 - 1.14.1 - fix(package.json)
|
## 2025-04-15 - 1.14.1 - fix(package.json)
|
||||||
|
|
||||||
Add packageManager field to specify pnpm version for consistent package management
|
Add packageManager field to specify pnpm version for consistent package management
|
||||||
|
|
||||||
- Inserted packageManager property in package.json with pnpm version info to ensure reproducible dependency installs
|
- Inserted packageManager property in package.json with pnpm version info to ensure reproducible dependency installs
|
||||||
|
|
||||||
## 2025-04-15 - 1.14.0 - feat(tsconfig_update)
|
## 2025-04-15 - 1.14.0 - feat(tsconfig_update)
|
||||||
|
|
||||||
Add runafter directive to trigger gitzone format after tsconfig update
|
Add runafter directive to trigger gitzone format after tsconfig update
|
||||||
|
|
||||||
- Added runafter configuration in assets/templates/tsconfig_update/.smartscaf.yml to automate formatting task
|
- Added runafter configuration in assets/templates/tsconfig_update/.smartscaf.yml to automate formatting task
|
||||||
|
|
||||||
## 2025-03-07 - 1.13.1 - fix(cli)
|
## 2025-03-07 - 1.13.1 - fix(cli)
|
||||||
|
|
||||||
Improve commit message logging
|
Improve commit message logging
|
||||||
|
|
||||||
- Updated logging to display recommended next commit details.
|
- Updated logging to display recommended next commit details.
|
||||||
- Enabled interactive prompt for choosing commit type and scope.
|
- Enabled interactive prompt for choosing commit type and scope.
|
||||||
|
|
||||||
## 2025-02-28 - 1.13.0 - feat(templates)
|
## 2025-02-28 - 1.13.0 - feat(templates)
|
||||||
|
|
||||||
Updated and added new TypeScript template files for npm projects
|
Updated and added new TypeScript template files for npm projects
|
||||||
|
|
||||||
- Added new paths.ts and plugins.ts template files for npm projects.
|
- Added new paths.ts and plugins.ts template files for npm projects.
|
||||||
- Removed outdated some.plugins.ts template file.
|
- Removed outdated some.plugins.ts template file.
|
||||||
|
|
||||||
## 2025-02-25 - 1.12.8 - fix(metadata)
|
## 2025-02-25 - 1.12.8 - fix(metadata)
|
||||||
|
|
||||||
Updated package and npmextra json description and keywords for enhanced development workflow clarity
|
Updated package and npmextra json description and keywords for enhanced development workflow clarity
|
||||||
|
|
||||||
- Updated the description in package.json to focus on project setup and management.
|
- Updated the description in package.json to focus on project setup and management.
|
||||||
- Aligned the keywords in both package.json and npmextra.json to include more relevant terms such as gitzone utilities, template management, and CI/CD.
|
- Aligned the keywords in both package.json and npmextra.json to include more relevant terms such as gitzone utilities, template management, and CI/CD.
|
||||||
|
|
||||||
## 2025-02-25 - 1.12.7 - fix(meta)
|
## 2025-02-25 - 1.12.7 - fix(meta)
|
||||||
|
|
||||||
Fix issues in project metadata and configuration.
|
Fix issues in project metadata and configuration.
|
||||||
|
|
||||||
- Updated package metadata to ensure accurate project description and licensing.
|
- Updated package metadata to ensure accurate project description and licensing.
|
||||||
- Ensured npm access level configuration consistency within npmextra.json.
|
- Ensured npm access level configuration consistency within npmextra.json.
|
||||||
|
|
||||||
## 2025-02-25 - 1.12.7 - fix(ci)
|
## 2025-02-25 - 1.12.7 - fix(ci)
|
||||||
|
|
||||||
Updated dependencies and added CI/CD workflows.
|
Updated dependencies and added CI/CD workflows.
|
||||||
|
|
||||||
- Updated several dependencies in package.json for compatibility and security.
|
- Updated several dependencies in package.json for compatibility and security.
|
||||||
@@ -147,6 +446,7 @@ Updated dependencies and added CI/CD workflows.
|
|||||||
- Ensured consistent formatting with Prettier and TypeScript configurations.
|
- Ensured consistent formatting with Prettier and TypeScript configurations.
|
||||||
|
|
||||||
## 2025-01-29 - 1.12.6 - fix(project)
|
## 2025-01-29 - 1.12.6 - fix(project)
|
||||||
|
|
||||||
Minor fixes and cleanup
|
Minor fixes and cleanup
|
||||||
|
|
||||||
- Removed outdated pages/ directory entry in .gitignore.
|
- Removed outdated pages/ directory entry in .gitignore.
|
||||||
@@ -155,6 +455,7 @@ Minor fixes and cleanup
|
|||||||
- Fixed formatting issues across various TypeScript files.
|
- Fixed formatting issues across various TypeScript files.
|
||||||
|
|
||||||
## 2025-01-29 - 1.12.5 - fix(cli)
|
## 2025-01-29 - 1.12.5 - fix(cli)
|
||||||
|
|
||||||
Initial implementation of CLI utility with project management features
|
Initial implementation of CLI utility with project management features
|
||||||
|
|
||||||
- Integration of various plugins for logging, command-line interactions, and project management.
|
- Integration of various plugins for logging, command-line interactions, and project management.
|
||||||
@@ -162,34 +463,40 @@ Initial implementation of CLI utility with project management features
|
|||||||
- Implement commands for packaging, versioning, and deprecating npm packages.
|
- Implement commands for packaging, versioning, and deprecating npm packages.
|
||||||
|
|
||||||
## 2025-01-29 - 1.12.2 - fix(format)
|
## 2025-01-29 - 1.12.2 - fix(format)
|
||||||
|
|
||||||
Add overrides for peek-readable in package.json formatting
|
Add overrides for peek-readable in package.json formatting
|
||||||
|
|
||||||
- Added a URL correction in the packageJson repository information.
|
- Added a URL correction in the packageJson repository information.
|
||||||
- Introduced support for pnpm overrides by including an `overrides.json` file.
|
- Introduced support for pnpm overrides by including an `overrides.json` file.
|
||||||
|
|
||||||
## 2025-01-18 - 1.12.1 - fix(dependencies)
|
## 2025-01-18 - 1.12.1 - fix(dependencies)
|
||||||
|
|
||||||
Update various package dependencies and Dockerfile base image
|
Update various package dependencies and Dockerfile base image
|
||||||
|
|
||||||
- Updated Dockerfile base image from 'alpinenpmci' to 'alpine_npmci'.
|
- Updated Dockerfile base image from 'alpinenpmci' to 'alpine_npmci'.
|
||||||
- Upgraded @git.zone/tsbuild, @git.zone/tsrun, @git.zone/tsdoc, and other dependencies to their latest versions.
|
- Upgraded @git.zone/tsbuild, @git.zone/tsrun, @git.zone/tsdoc, and other dependencies to their latest versions.
|
||||||
|
|
||||||
## 2025-01-17 - 1.12.0 - feat(build)
|
## 2025-01-17 - 1.12.0 - feat(build)
|
||||||
|
|
||||||
Update TypeScript configuration to support emit decorator metadata
|
Update TypeScript configuration to support emit decorator metadata
|
||||||
|
|
||||||
- Added emitDecoratorMetadata to the tsconfig.json template in assets/templates/tsconfig_update.
|
- Added emitDecoratorMetadata to the tsconfig.json template in assets/templates/tsconfig_update.
|
||||||
|
|
||||||
## 2025-01-08 - 1.11.0 - feat(cli)
|
## 2025-01-08 - 1.11.0 - feat(cli)
|
||||||
|
|
||||||
Add Docker command for cleaning up Docker system and extend deprecation command for multiple registries
|
Add Docker command for cleaning up Docker system and extend deprecation command for multiple registries
|
||||||
|
|
||||||
- Added a new command 'docker' to handle Docker system cleanup operations.
|
- Added a new command 'docker' to handle Docker system cleanup operations.
|
||||||
- Improved the 'deprecate' command to support deprecating packages across multiple npm registry URLs.
|
- Improved the 'deprecate' command to support deprecating packages across multiple npm registry URLs.
|
||||||
|
|
||||||
## 2025-01-01 - 1.10.10 - fix(templates)
|
## 2025-01-01 - 1.10.10 - fix(templates)
|
||||||
|
|
||||||
Corrected typo in template file comment
|
Corrected typo in template file comment
|
||||||
|
|
||||||
- Fixed repeated comment in the template file for services under 'assets/templates/service/ts/some.plugins.ts'.
|
- Fixed repeated comment in the template file for services under 'assets/templates/service/ts/some.plugins.ts'.
|
||||||
|
|
||||||
## 2025-01-01 - 1.10.9 - fix(templates)
|
## 2025-01-01 - 1.10.9 - fix(templates)
|
||||||
|
|
||||||
Correct template file paths and organization for service projects
|
Correct template file paths and organization for service projects
|
||||||
|
|
||||||
- Moved 'some.classes.some.ts' to 'classes.some.ts'
|
- Moved 'some.classes.some.ts' to 'classes.some.ts'
|
||||||
@@ -197,60 +504,70 @@ Correct template file paths and organization for service projects
|
|||||||
- Resolved incorrect import paths in service templates
|
- Resolved incorrect import paths in service templates
|
||||||
|
|
||||||
## 2025-01-01 - 1.10.8 - fix(assets/templates)
|
## 2025-01-01 - 1.10.8 - fix(assets/templates)
|
||||||
|
|
||||||
Update CI template configurations to use module.githost
|
Update CI template configurations to use module.githost
|
||||||
|
|
||||||
- Replaced occurrences of {{git.host}} with {{module.githost}} in CI workflow files
|
- Replaced occurrences of {{git.host}} with {{module.githost}} in CI workflow files
|
||||||
- Updated package dependencies for service template
|
- Updated package dependencies for service template
|
||||||
|
|
||||||
## 2024-12-26 - 1.10.7 - fix(assets)
|
## 2024-12-26 - 1.10.7 - fix(assets)
|
||||||
|
|
||||||
Correct URLs in templates and fix TypeScript declaration
|
Correct URLs in templates and fix TypeScript declaration
|
||||||
|
|
||||||
- Updated incorrect URLs in Dockerfile templates to 'host.today'.
|
- Updated incorrect URLs in Dockerfile templates to 'host.today'.
|
||||||
- Fixed type declaration for 'TemplateResult' in header.ts file.
|
- Fixed type declaration for 'TemplateResult' in header.ts file.
|
||||||
|
|
||||||
## 2024-12-08 - 1.10.6 - fix(ci)
|
## 2024-12-08 - 1.10.6 - fix(ci)
|
||||||
|
|
||||||
Corrected Docker image URL in CI templates
|
Corrected Docker image URL in CI templates
|
||||||
|
|
||||||
- Updated Docker image URL from 'code.foss.global/hosttoday' to 'code.foss.global/host.today' in default_nottags.yaml and default_tags.yaml.
|
- Updated Docker image URL from 'code.foss.global/hosttoday' to 'code.foss.global/host.today' in default_nottags.yaml and default_tags.yaml.
|
||||||
- Adjusted gitignore template to include a custom section delineation.
|
- Adjusted gitignore template to include a custom section delineation.
|
||||||
|
|
||||||
## 2024-12-02 - 1.10.5 - fix(assets)
|
## 2024-12-02 - 1.10.5 - fix(assets)
|
||||||
|
|
||||||
Update .gitignore template to remove pages directory
|
Update .gitignore template to remove pages directory
|
||||||
|
|
||||||
- Removed 'pages/' from the ignored directories in the .gitignore template.
|
- Removed 'pages/' from the ignored directories in the .gitignore template.
|
||||||
|
|
||||||
## 2024-11-05 - 1.10.4 - fix(mod_format)
|
## 2024-11-05 - 1.10.4 - fix(mod_format)
|
||||||
|
|
||||||
Correct file extension for TypeScript path configuration
|
Correct file extension for TypeScript path configuration
|
||||||
|
|
||||||
- Fixed the TypeScript configuration to use correct file extensions for module subdirectories.
|
- Fixed the TypeScript configuration to use correct file extensions for module subdirectories.
|
||||||
|
|
||||||
## 2024-10-27 - 1.10.3 - fix(mod_format)
|
## 2024-10-27 - 1.10.3 - fix(mod_format)
|
||||||
|
|
||||||
Reorder TypeScript formatting steps in mod_format module
|
Reorder TypeScript formatting steps in mod_format module
|
||||||
|
|
||||||
- Moved TypeScript configuration formatting earlier in the sequence for better logical consistency.
|
- Moved TypeScript configuration formatting earlier in the sequence for better logical consistency.
|
||||||
|
|
||||||
## 2024-10-27 - 1.10.2 - fix(format)
|
## 2024-10-27 - 1.10.2 - fix(format)
|
||||||
|
|
||||||
Add logging for tsconfig.json formatting
|
Add logging for tsconfig.json formatting
|
||||||
|
|
||||||
- Added an info log message for tsconfig.json formatting in format.tsconfig.ts.
|
- Added an info log message for tsconfig.json formatting in format.tsconfig.ts.
|
||||||
|
|
||||||
## 2024-10-27 - 1.10.1 - fix(format)
|
## 2024-10-27 - 1.10.1 - fix(format)
|
||||||
|
|
||||||
Fixed async issue in tsconfig module lookup and corrected property access
|
Fixed async issue in tsconfig module lookup and corrected property access
|
||||||
|
|
||||||
|
|
||||||
## 2024-10-27 - 1.10.0 - feat(mod_format)
|
## 2024-10-27 - 1.10.0 - feat(mod_format)
|
||||||
|
|
||||||
Add support for tsconfig.json formatting
|
Add support for tsconfig.json formatting
|
||||||
|
|
||||||
- Added a new script to format tsconfig.json.
|
- Added a new script to format tsconfig.json.
|
||||||
- Updated package.json to include `@git.zone/tspublish` as a dependency.
|
- Updated package.json to include `@git.zone/tspublish` as a dependency.
|
||||||
|
|
||||||
## 2024-10-23 - 1.9.126 - fix(format)
|
## 2024-10-23 - 1.9.126 - fix(format)
|
||||||
|
|
||||||
Remove redundant package.json property checks
|
Remove redundant package.json property checks
|
||||||
|
|
||||||
- Removed property checks for `main`, `typings`, and `browserslist` from format.packagejson.ts
|
- Removed property checks for `main`, `typings`, and `browserslist` from format.packagejson.ts
|
||||||
- This change streamlines the formatting process by removing unnecessary exits
|
- This change streamlines the formatting process by removing unnecessary exits
|
||||||
|
|
||||||
## 2024-09-29 - 1.9.125 - fix(cli)
|
## 2024-09-29 - 1.9.125 - fix(cli)
|
||||||
|
|
||||||
Fix package version configuration and formatting issues
|
Fix package version configuration and formatting issues
|
||||||
|
|
||||||
- Updated metadata fields in package.json (repository URL, bugs URL, and homepage).
|
- Updated metadata fields in package.json (repository URL, bugs URL, and homepage).
|
||||||
@@ -258,15 +575,17 @@ Fix package version configuration and formatting issues
|
|||||||
- Added missing Prettier default TypeScript and Markdown configurations.
|
- Added missing Prettier default TypeScript and Markdown configurations.
|
||||||
|
|
||||||
## 2024-09-27 - 1.9.124 - fix(cli)
|
## 2024-09-27 - 1.9.124 - fix(cli)
|
||||||
|
|
||||||
Ensured proper existence and initialization of readme files
|
Ensured proper existence and initialization of readme files
|
||||||
|
|
||||||
- Ensured readme.md and readme.hints.md files are created and initialized if they do not exist.
|
- Ensured readme.md and readme.hints.md files are created and initialized if they do not exist.
|
||||||
|
|
||||||
## 2024-09-27 - 1.9.123 - fix(core)
|
## 2024-09-27 - 1.9.123 - fix(core)
|
||||||
|
|
||||||
No changes detected
|
No changes detected
|
||||||
|
|
||||||
|
|
||||||
## 2024-09-27 - 1.9.123 - fix(core)
|
## 2024-09-27 - 1.9.123 - fix(core)
|
||||||
|
|
||||||
Update dependencies and improve build configurations
|
Update dependencies and improve build configurations
|
||||||
|
|
||||||
- Updated several dependencies in package.json for better compatibility
|
- Updated several dependencies in package.json for better compatibility
|
||||||
@@ -277,88 +596,111 @@ Update dependencies and improve build configurations
|
|||||||
- Provided initial structure for readme and readme hints
|
- Provided initial structure for readme and readme hints
|
||||||
|
|
||||||
## 2024-06-24 - 1.9.122 - fix(mod_commit)
|
## 2024-06-24 - 1.9.122 - fix(mod_commit)
|
||||||
|
|
||||||
Update package.json dependencies: @git.zone/tsdoc and @push.rocks/smartpromise to latest versions.
|
Update package.json dependencies: @git.zone/tsdoc and @push.rocks/smartpromise to latest versions.
|
||||||
|
|
||||||
- - Updated @git.zone/tsdoc to ^1.3.12
|
- - Updated @git.zone/tsdoc to ^1.3.12
|
||||||
- - Updated @push.rocks/smartfile to ^11.0.21
|
- - Updated @push.rocks/smartfile to ^11.0.21
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.121 - fix(mod_commit)
|
## 2024-06-23 - 1.9.121 - fix(mod_commit)
|
||||||
|
|
||||||
Fix changelog template rendering by removing extra new line when no version details are provided.
|
Fix changelog template rendering by removing extra new line when no version details are provided.
|
||||||
|
|
||||||
- Update package.json dependencies: @git.zone/tsdoc and @push.rocks/smartpromise to latest versions.
|
- Update package.json dependencies: @git.zone/tsdoc and @push.rocks/smartpromise to latest versions.
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.120 - fix(mod_commit)
|
## 2024-06-23 - 1.9.120 - fix(mod_commit)
|
||||||
|
|
||||||
Handle edge case for empty version details in changelog formatting
|
Handle edge case for empty version details in changelog formatting
|
||||||
|
|
||||||
- Added check for the length of the recommendedNextVersionDetails array
|
- Added check for the length of the recommendedNextVersionDetails array
|
||||||
- Ensure no extra newline in changelog if there are no version details
|
- Ensure no extra newline in changelog if there are no version details
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.119 - fix(dependencies)
|
## 2024-06-23 - 1.9.119 - fix(dependencies)
|
||||||
|
|
||||||
Update @git.zone/tsdoc to v1.3.8
|
Update @git.zone/tsdoc to v1.3.8
|
||||||
|
|
||||||
- Updated @git.zone/tsdoc from v1.3.7 to v1.3.8 in package.json
|
- Updated @git.zone/tsdoc from v1.3.7 to v1.3.8 in package.json
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.118 - fix(dependencies)
|
## 2024-06-23 - 1.9.118 - fix(dependencies)
|
||||||
|
|
||||||
Update @git.zone/tsdoc to version 1.3.7
|
Update @git.zone/tsdoc to version 1.3.7
|
||||||
|
|
||||||
- Bump @git.zone/tsdoc from 1.3.6 to 1.3.7 in both package.json and pnpm-lock.yaml
|
- Bump @git.zone/tsdoc from 1.3.6 to 1.3.7 in both package.json and pnpm-lock.yaml
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.117 - fix(dependencies)
|
## 2024-06-23 - 1.9.117 - fix(dependencies)
|
||||||
|
|
||||||
Update @git.zone/tsdoc dependency to v1.3.6
|
Update @git.zone/tsdoc dependency to v1.3.6
|
||||||
|
|
||||||
- Updated @git.zone/tsdoc version from 1.3.5 to 1.3.6 in package.json
|
- Updated @git.zone/tsdoc version from 1.3.5 to 1.3.6 in package.json
|
||||||
- Updated pnpm-lock.yaml to reflect the new version of @git.zone/tsdoc
|
- Updated pnpm-lock.yaml to reflect the new version of @git.zone/tsdoc
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.116 - fix(dependencies)
|
## 2024-06-23 - 1.9.116 - fix(dependencies)
|
||||||
|
|
||||||
Update @git.zone/tsdoc to version 1.3.5
|
Update @git.zone/tsdoc to version 1.3.5
|
||||||
|
|
||||||
- Updated the @git.zone/tsdoc dependency in package.json and pnpm-lock.yaml from version 1.3.4 to 1.3.5
|
- Updated the @git.zone/tsdoc dependency in package.json and pnpm-lock.yaml from version 1.3.4 to 1.3.5
|
||||||
- Removed the outdated changelog.md file.
|
- Removed the outdated changelog.md file.
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.114 - fix(format)
|
## 2024-06-23 - 1.9.114 - fix(format)
|
||||||
|
|
||||||
Fixed formatting issues across multiple TypeScript files.
|
Fixed formatting issues across multiple TypeScript files.
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.113 - fix(mod_commit)
|
## 2024-06-23 - 1.9.113 - fix(mod_commit)
|
||||||
|
|
||||||
Remove extra new lines in changelog.
|
Remove extra new lines in changelog.
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.112 - fix(core)
|
## 2024-06-23 - 1.9.112 - fix(core)
|
||||||
|
|
||||||
Update changelog formatting and remove outdated entries.
|
Update changelog formatting and remove outdated entries.
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.111 - fix(changelog)
|
## 2024-06-23 - 1.9.111 - fix(changelog)
|
||||||
|
|
||||||
Remove outdated changelog entries and update formatting.
|
Remove outdated changelog entries and update formatting.
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.110 - fix(dependencies)
|
## 2024-06-23 - 1.9.110 - fix(dependencies)
|
||||||
|
|
||||||
Update @git.zone/tsdoc to version 1.3.4.
|
Update @git.zone/tsdoc to version 1.3.4.
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.109 - fix(changelog)
|
## 2024-06-23 - 1.9.109 - fix(changelog)
|
||||||
|
|
||||||
Remove outdated entries and adjust formatting in changelog.
|
Remove outdated entries and adjust formatting in changelog.
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.108 - fix(dependencies)
|
## 2024-06-23 - 1.9.108 - fix(dependencies)
|
||||||
|
|
||||||
Update @git.zone/tsdoc dependency to version 1.3.2.
|
Update @git.zone/tsdoc dependency to version 1.3.2.
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.107 - fix(changelog)
|
## 2024-06-23 - 1.9.107 - fix(changelog)
|
||||||
|
|
||||||
Remove placeholder entries and adjust formatting in changelog.
|
Remove placeholder entries and adjust formatting in changelog.
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.106 - fix(dependencies)
|
## 2024-06-23 - 1.9.106 - fix(dependencies)
|
||||||
|
|
||||||
Updated @git.zone/tsdoc from version 1.3.0 to 1.3.1.
|
Updated @git.zone/tsdoc from version 1.3.0 to 1.3.1.
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.105 - fix(dependencies)
|
## 2024-06-23 - 1.9.105 - fix(dependencies)
|
||||||
|
|
||||||
Updated @git.zone/tsdoc dependency from 1.2.2 to 1.3.0 in package.json and pnpm-lock.yaml.
|
Updated @git.zone/tsdoc dependency from 1.2.2 to 1.3.0 in package.json and pnpm-lock.yaml.
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.104 - fix(changelog)
|
## 2024-06-23 - 1.9.104 - fix(changelog)
|
||||||
|
|
||||||
Remove placeholder entries and adjust formatting in changelog.
|
Remove placeholder entries and adjust formatting in changelog.
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.103 - fix(changelog)
|
## 2024-06-23 - 1.9.103 - fix(changelog)
|
||||||
|
|
||||||
Fix changelog to remove placeholder entries and adjust formatting.
|
Fix changelog to remove placeholder entries and adjust formatting.
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.102 - fix(logging)
|
## 2024-06-23 - 1.9.102 - fix(logging)
|
||||||
|
|
||||||
Optimize logger instantiation and configuration.
|
Optimize logger instantiation and configuration.
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.101 - fix(metadata)
|
## 2024-06-23 - 1.9.101 - fix(metadata)
|
||||||
|
|
||||||
Ensure accurate project metadata in package.json.
|
Ensure accurate project metadata in package.json.
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.100 - fix(dependencies)
|
## 2024-06-23 - 1.9.100 - fix(dependencies)
|
||||||
|
|
||||||
Updated @git.zone/tsdoc dependency version to ^1.2.2 in package.json and pnpm-lock.yaml.
|
Updated @git.zone/tsdoc dependency version to ^1.2.2 in package.json and pnpm-lock.yaml.
|
||||||
|
|
||||||
## 2024-06-23 - 1.9.99 - fix(mod_commit)
|
## 2024-06-23 - 1.9.99 - fix(mod_commit)
|
||||||
|
|
||||||
Fix variable reassignment issue in changelog writing step.
|
Fix variable reassignment issue in changelog writing step.
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"npmci": {
|
"szci": {
|
||||||
"npmGlobalTools": [],
|
"npmGlobalTools": [],
|
||||||
"npmAccessLevel": "private",
|
"npmAccessLevel": "private",
|
||||||
"npmRegistryUrl": "verdaccio.lossless.one"
|
"npmRegistryUrl": "verdaccio.lossless.one"
|
||||||
|
|||||||
49
package.json
49
package.json
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "@git.zone/cli",
|
"name": "@git.zone/cli",
|
||||||
"private": false,
|
"private": false,
|
||||||
"version": "1.16.8",
|
"version": "2.2.0",
|
||||||
"description": "A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.",
|
"description": "A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.",
|
||||||
"main": "dist_ts/index.ts",
|
"main": "dist_ts/index.ts",
|
||||||
"typings": "dist_ts/index.d.ts",
|
"typings": "dist_ts/index.d.ts",
|
||||||
@@ -57,44 +57,45 @@
|
|||||||
},
|
},
|
||||||
"homepage": "https://gitlab.com/gitzone/private/gitzone#readme",
|
"homepage": "https://gitlab.com/gitzone/private/gitzone#readme",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.3.2",
|
"@git.zone/tsbuild": "^3.1.2",
|
||||||
"@git.zone/tsrun": "^1.3.3",
|
"@git.zone/tsrun": "^2.0.0",
|
||||||
"@git.zone/tstest": "^1.0.96",
|
"@git.zone/tstest": "^3.1.3",
|
||||||
"@types/node": "^22.15.18"
|
"@push.rocks/smartdelay": "^3.0.5",
|
||||||
|
"@push.rocks/smartinteract": "^2.0.16",
|
||||||
|
"@push.rocks/smartnetwork": "^4.4.0",
|
||||||
|
"@push.rocks/smartshell": "^3.3.0",
|
||||||
|
"@types/node": "^24.10.1"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@git.zone/tsdoc": "^1.5.0",
|
"@git.zone/tsdoc": "^1.10.0",
|
||||||
"@git.zone/tspublish": "^1.9.1",
|
"@git.zone/tspublish": "^1.10.3",
|
||||||
"@push.rocks/commitinfo": "^1.0.12",
|
"@push.rocks/commitinfo": "^1.0.12",
|
||||||
"@push.rocks/early": "^4.0.4",
|
"@push.rocks/early": "^4.0.4",
|
||||||
"@push.rocks/gulp-function": "^3.0.7",
|
"@push.rocks/gulp-function": "^3.0.7",
|
||||||
"@push.rocks/lik": "^6.2.2",
|
"@push.rocks/lik": "^6.2.2",
|
||||||
"@push.rocks/npmextra": "^5.1.2",
|
"@push.rocks/npmextra": "^5.3.3",
|
||||||
"@push.rocks/projectinfo": "^5.0.2",
|
"@push.rocks/projectinfo": "^5.0.2",
|
||||||
"@push.rocks/smartchok": "^1.0.34",
|
"@push.rocks/smartcli": "^4.0.19",
|
||||||
"@push.rocks/smartcli": "^4.0.11",
|
|
||||||
"@push.rocks/smartdelay": "^3.0.5",
|
|
||||||
"@push.rocks/smartdiff": "^1.0.3",
|
"@push.rocks/smartdiff": "^1.0.3",
|
||||||
"@push.rocks/smartfile": "^11.2.0",
|
"@push.rocks/smartfile": "^13.1.0",
|
||||||
|
"@push.rocks/smartfs": "^1.2.0",
|
||||||
"@push.rocks/smartgulp": "^3.0.4",
|
"@push.rocks/smartgulp": "^3.0.4",
|
||||||
"@push.rocks/smartinteract": "^2.0.15",
|
"@push.rocks/smartjson": "^5.2.0",
|
||||||
"@push.rocks/smartjson": "^5.0.20",
|
|
||||||
"@push.rocks/smartlegal": "^1.0.27",
|
"@push.rocks/smartlegal": "^1.0.27",
|
||||||
"@push.rocks/smartlog": "^3.0.9",
|
"@push.rocks/smartlog": "^3.1.10",
|
||||||
"@push.rocks/smartlog-destination-local": "^9.0.2",
|
"@push.rocks/smartlog-destination-local": "^9.0.2",
|
||||||
"@push.rocks/smartmustache": "^3.0.2",
|
"@push.rocks/smartmustache": "^3.0.2",
|
||||||
"@push.rocks/smartnpm": "^2.0.4",
|
"@push.rocks/smartnpm": "^2.0.6",
|
||||||
"@push.rocks/smartobject": "^1.0.12",
|
"@push.rocks/smartobject": "^1.0.12",
|
||||||
"@push.rocks/smartopen": "^2.0.0",
|
"@push.rocks/smartopen": "^2.0.0",
|
||||||
"@push.rocks/smartpath": "^5.0.18",
|
"@push.rocks/smartpath": "^6.0.0",
|
||||||
"@push.rocks/smartpromise": "^4.2.3",
|
"@push.rocks/smartpromise": "^4.2.3",
|
||||||
"@push.rocks/smartscaf": "^4.0.16",
|
"@push.rocks/smartscaf": "^4.0.19",
|
||||||
"@push.rocks/smartshell": "^3.2.3",
|
|
||||||
"@push.rocks/smartstream": "^3.2.5",
|
"@push.rocks/smartstream": "^3.2.5",
|
||||||
"@push.rocks/smartunique": "^3.0.9",
|
"@push.rocks/smartunique": "^3.0.9",
|
||||||
"@push.rocks/smartupdate": "^2.0.6",
|
"@push.rocks/smartupdate": "^2.0.6",
|
||||||
"@types/through2": "^2.0.41",
|
"@types/through2": "^2.0.41",
|
||||||
"prettier": "^3.5.3",
|
"prettier": "^3.7.3",
|
||||||
"through2": "^4.0.2"
|
"through2": "^4.0.2"
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
@@ -113,7 +114,13 @@
|
|||||||
"last 1 chrome versions"
|
"last 1 chrome versions"
|
||||||
],
|
],
|
||||||
"pnpm": {
|
"pnpm": {
|
||||||
"overrides": {}
|
"overrides": {},
|
||||||
|
"onlyBuiltDependencies": [
|
||||||
|
"esbuild",
|
||||||
|
"mongodb-memory-server",
|
||||||
|
"puppeteer",
|
||||||
|
"sharp"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"packageManager": "pnpm@10.7.0+sha512.6b865ad4b62a1d9842b61d674a393903b871d9244954f652b8842c2b553c72176b278f64c463e52d40fff8aba385c235c8c9ecf5cc7de4fd78b8bb6d49633ab6"
|
"packageManager": "pnpm@10.7.0+sha512.6b865ad4b62a1d9842b61d674a393903b871d9244954f652b8842c2b553c72176b278f64c463e52d40fff8aba385c235c8c9ecf5cc7de4fd78b8bb6d49633ab6"
|
||||||
}
|
}
|
||||||
|
|||||||
10438
pnpm-lock.yaml
generated
10438
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
120
readme.hints.md
120
readme.hints.md
@@ -1,10 +1,11 @@
|
|||||||
# Gitzone CLI - Development Hints
|
# Gitzone CLI - Development Hints
|
||||||
|
|
||||||
* the cli of the git.zone project.
|
- the cli of the git.zone project.
|
||||||
|
|
||||||
## Project Overview
|
## Project Overview
|
||||||
|
|
||||||
Gitzone CLI (`@git.zone/cli`) is a comprehensive toolbelt for streamlining local development cycles. It provides utilities for:
|
Gitzone CLI (`@git.zone/cli`) is a comprehensive toolbelt for streamlining local development cycles. It provides utilities for:
|
||||||
|
|
||||||
- Project initialization and templating (via smartscaf)
|
- Project initialization and templating (via smartscaf)
|
||||||
- Code formatting and standardization
|
- Code formatting and standardization
|
||||||
- Version control and commit management
|
- Version control and commit management
|
||||||
@@ -14,12 +15,14 @@ Gitzone CLI (`@git.zone/cli`) is a comprehensive toolbelt for streamlining local
|
|||||||
## Architecture
|
## Architecture
|
||||||
|
|
||||||
### Core Structure
|
### Core Structure
|
||||||
|
|
||||||
- Main CLI entry: `cli.ts` / `cli.child.ts`
|
- Main CLI entry: `cli.ts` / `cli.child.ts`
|
||||||
- Modular architecture with separate modules in `ts/mod_*` directories
|
- Modular architecture with separate modules in `ts/mod_*` directories
|
||||||
- Each module handles specific functionality (format, commit, docker, etc.)
|
- Each module handles specific functionality (format, commit, docker, etc.)
|
||||||
- Extensive use of plugins pattern via `plugins.ts` files
|
- Extensive use of plugins pattern via `plugins.ts` files
|
||||||
|
|
||||||
### Configuration Management
|
### Configuration Management
|
||||||
|
|
||||||
- Uses `npmextra.json` for all tool configuration
|
- Uses `npmextra.json` for all tool configuration
|
||||||
- Configuration stored under `gitzone` key in npmextra
|
- Configuration stored under `gitzone` key in npmextra
|
||||||
- No separate `.gitzonerc` file - everything in npmextra.json
|
- No separate `.gitzonerc` file - everything in npmextra.json
|
||||||
@@ -30,6 +33,7 @@ Gitzone CLI (`@git.zone/cli`) is a comprehensive toolbelt for streamlining local
|
|||||||
The format module is responsible for project standardization:
|
The format module is responsible for project standardization:
|
||||||
|
|
||||||
#### Current Modules:
|
#### Current Modules:
|
||||||
|
|
||||||
1. **cleanup** - Removes obsolete files (yarn.lock, tslint.json, etc.)
|
1. **cleanup** - Removes obsolete files (yarn.lock, tslint.json, etc.)
|
||||||
2. **copy** - File copying with glob patterns (fully implemented)
|
2. **copy** - File copying with glob patterns (fully implemented)
|
||||||
3. **gitignore** - Creates/updates .gitignore from templates
|
3. **gitignore** - Creates/updates .gitignore from templates
|
||||||
@@ -42,6 +46,7 @@ The format module is responsible for project standardization:
|
|||||||
10. **tsconfig** - Formats TypeScript configuration
|
10. **tsconfig** - Formats TypeScript configuration
|
||||||
|
|
||||||
#### Execution Order (Dependency-Based):
|
#### Execution Order (Dependency-Based):
|
||||||
|
|
||||||
- Modules are now executed in parallel groups based on dependencies
|
- Modules are now executed in parallel groups based on dependencies
|
||||||
- Independent modules run concurrently for better performance
|
- Independent modules run concurrently for better performance
|
||||||
- Dependency analyzer ensures correct execution order
|
- Dependency analyzer ensures correct execution order
|
||||||
@@ -84,6 +89,41 @@ The format module is responsible for project standardization:
|
|||||||
5. **Performance Optimizations**: Parallel execution and caching
|
5. **Performance Optimizations**: Parallel execution and caching
|
||||||
6. **Reporting**: Diff views, statistics, verbose logging
|
6. **Reporting**: Diff views, statistics, verbose logging
|
||||||
7. **Architecture**: Clean separation of concerns with new classes
|
7. **Architecture**: Clean separation of concerns with new classes
|
||||||
|
8. **Unified Version Bumping**: Self-managed version updates eliminating npm warning pollution in deno.json
|
||||||
|
|
||||||
|
### Version Bumping Refactor (Latest)
|
||||||
|
|
||||||
|
The commit module's version bumping has been refactored to eliminate npm command dependencies:
|
||||||
|
|
||||||
|
**Changes:**
|
||||||
|
- Removed `bumpNpmVersion()` - was causing npm warnings to pollute deno.json
|
||||||
|
- Removed `syncVersionToDenoJson()` - no longer needed with unified approach
|
||||||
|
- Removed separate `bumpDenoVersion()` - replaced by unified implementation
|
||||||
|
- Added `readCurrentVersion()` helper - reads from either package.json or deno.json
|
||||||
|
- Added `updateVersionFile()` helper - updates JSON files directly
|
||||||
|
- Unified `bumpProjectVersion()` - handles npm/deno/both with single clean code path
|
||||||
|
|
||||||
|
**Benefits:**
|
||||||
|
- No npm warning pollution in version fields
|
||||||
|
- Full control over version bumping process
|
||||||
|
- Simpler git history (no amending, no force-tagging)
|
||||||
|
- Same code path for all project types
|
||||||
|
- Reuses existing `calculateNewVersion()` function
|
||||||
|
|
||||||
|
### Auto-Accept Flag for Commits
|
||||||
|
|
||||||
|
The commit module now supports `-y/--yes` flag for non-interactive commits:
|
||||||
|
|
||||||
|
**Usage:**
|
||||||
|
- `gitzone commit -y` - Auto-accepts AI recommendations without prompts
|
||||||
|
- `gitzone commit -yp` - Auto-accepts and pushes to origin
|
||||||
|
- Separate `-p/--push` flag controls push behavior
|
||||||
|
|
||||||
|
**Implementation:**
|
||||||
|
- Creates AnswerBucket programmatically when `-y` flag detected
|
||||||
|
- Preserves all UI output for transparency
|
||||||
|
- Fully backward compatible with interactive mode
|
||||||
|
- CI/CD friendly for automated workflows
|
||||||
|
|
||||||
## Development Tips
|
## Development Tips
|
||||||
|
|
||||||
@@ -132,6 +172,27 @@ The format module is responsible for project standardization:
|
|||||||
|
|
||||||
## CLI Usage
|
## CLI Usage
|
||||||
|
|
||||||
|
### Commit Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Interactive commit (default)
|
||||||
|
gitzone commit
|
||||||
|
|
||||||
|
# Auto-accept AI recommendations (no prompts)
|
||||||
|
gitzone commit -y
|
||||||
|
gitzone commit --yes
|
||||||
|
|
||||||
|
# Auto-accept and push to origin
|
||||||
|
gitzone commit -yp
|
||||||
|
gitzone commit -y -p
|
||||||
|
gitzone commit --yes --push
|
||||||
|
|
||||||
|
# Run format before commit
|
||||||
|
gitzone commit --format
|
||||||
|
```
|
||||||
|
|
||||||
|
### Format Commands
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Basic format
|
# Basic format
|
||||||
gitzone format
|
gitzone format
|
||||||
@@ -182,7 +243,60 @@ gitzone format --clean-backups
|
|||||||
|
|
||||||
## API Changes
|
## API Changes
|
||||||
|
|
||||||
- smartfile API updated to use fs.* and memory.* namespaces
|
### Smartfile v13 Migration (Latest - Completed)
|
||||||
|
|
||||||
|
The project has been fully migrated from @push.rocks/smartfile v11 to v13, which introduced a major breaking change where filesystem operations were split into two separate packages:
|
||||||
|
|
||||||
|
**Packages:**
|
||||||
|
- `@push.rocks/smartfile` v13.0.1 - File representation classes (SmartFile, StreamFile, VirtualDirectory)
|
||||||
|
- `@push.rocks/smartfs` v1.1.0 - Filesystem operations (read, write, exists, stat, etc.)
|
||||||
|
|
||||||
|
**Key API Changes:**
|
||||||
|
1. **File Reading**:
|
||||||
|
- Old: `plugins.smartfile.fs.toStringSync(path)` or `plugins.smartfile.fs.toObjectSync(path)`
|
||||||
|
- New: `await plugins.smartfs.file(path).encoding('utf8').read()` + JSON.parse if needed
|
||||||
|
- Important: `read()` returns `string | Buffer` - use `as string` type assertion when encoding is set
|
||||||
|
|
||||||
|
2. **File Writing**:
|
||||||
|
- Old: `plugins.smartfile.memory.toFs(content, path)` or `plugins.smartfile.memory.toFsSync(content, path)`
|
||||||
|
- New: `await plugins.smartfs.file(path).encoding('utf8').write(content)`
|
||||||
|
|
||||||
|
3. **File Existence**:
|
||||||
|
- Old: `plugins.smartfile.fs.fileExists(path)` or `plugins.smartfile.fs.fileExistsSync(path)`
|
||||||
|
- New: `await plugins.smartfs.file(path).exists()`
|
||||||
|
|
||||||
|
4. **Directory Operations**:
|
||||||
|
- Old: `plugins.smartfile.fs.ensureDir(path)`
|
||||||
|
- New: `await plugins.smartfs.directory(path).recursive().create()`
|
||||||
|
- Old: `plugins.smartfile.fs.remove(path)`
|
||||||
|
- New: `await plugins.smartfs.directory(path).recursive().delete()` or `await plugins.smartfs.file(path).delete()`
|
||||||
|
|
||||||
|
5. **Directory Listing**:
|
||||||
|
- Old: `plugins.smartfile.fs.listFolders(path)` or `plugins.smartfile.fs.listFoldersSync(path)`
|
||||||
|
- New: `await plugins.smartfs.directory(path).list()` then filter by `stats.isDirectory`
|
||||||
|
- Note: `list()` returns `IDirectoryEntry[]` with `path` and `name` properties - use `stat()` to check if directory
|
||||||
|
|
||||||
|
6. **File Stats**:
|
||||||
|
- Old: `stats.isDirectory()` (method)
|
||||||
|
- New: `stats.isDirectory` (boolean property)
|
||||||
|
- Old: `stats.mtimeMs`
|
||||||
|
- New: `stats.mtime.getTime()`
|
||||||
|
|
||||||
|
7. **SmartFile Factory**:
|
||||||
|
- Old: Direct SmartFile instantiation
|
||||||
|
- New: `plugins.smartfile.SmartFileFactory.nodeFs()` then factory methods
|
||||||
|
|
||||||
|
**Migration Pattern:**
|
||||||
|
All sync methods must become async. Functions that were previously synchronous (like `getProjectName()`) now return `Promise<T>` and must be awaited.
|
||||||
|
|
||||||
|
**Affected Modules:**
|
||||||
|
- ts/mod_format/* (largest area - 15+ files)
|
||||||
|
- ts/mod_commit/* (version bumping)
|
||||||
|
- ts/mod_services/* (configuration management)
|
||||||
|
- ts/mod_meta/* (meta repository management)
|
||||||
|
- ts/mod_standard/* (template listing)
|
||||||
|
- ts/mod_template/* (template operations)
|
||||||
|
|
||||||
|
**Previous API Changes:**
|
||||||
- smartnpm requires instance creation: `new NpmRegistry()`
|
- smartnpm requires instance creation: `new NpmRegistry()`
|
||||||
- All file operations now use updated APIs
|
|
||||||
- Type imports use `import type` for proper verbatim module syntax
|
- Type imports use `import type` for proper verbatim module syntax
|
||||||
201
readme.md
201
readme.md
@@ -7,7 +7,11 @@
|
|||||||
|
|
||||||
## 🎯 What is gitzone?
|
## 🎯 What is gitzone?
|
||||||
|
|
||||||
gitzone is a powerful command-line interface that supercharges your development workflow with automated project management, intelligent code formatting, and seamless version control. Whether you're bootstrapping a new TypeScript project, maintaining code quality, or managing complex multi-repository setups, gitzone has got you covered.
|
gitzone is a powerful command-line interface that supercharges your development workflow with automated project management, intelligent code formatting, seamless version control, and development service orchestration. Whether you're bootstrapping a new TypeScript project, maintaining code quality, managing complex multi-repository setups, or spinning up local development databases, gitzone has got you covered.
|
||||||
|
|
||||||
|
## Issue Reporting and Security
|
||||||
|
|
||||||
|
For reporting bugs, issues, or security vulnerabilities, please visit [community.foss.global/](https://community.foss.global/). This is the central community hub for all issue reporting. Developers who sign and comply with our contribution agreement and go through identification can also get a [code.foss.global/](https://code.foss.global/) account to submit Pull Requests directly.
|
||||||
|
|
||||||
## 🏃♂️ Quick Start
|
## 🏃♂️ Quick Start
|
||||||
|
|
||||||
@@ -23,7 +27,7 @@ pnpm add -g @git.zone/cli
|
|||||||
|
|
||||||
Once installed, you can use either `gitzone` or the shorter `gzone` command from anywhere in your terminal.
|
Once installed, you can use either `gitzone` or the shorter `gzone` command from anywhere in your terminal.
|
||||||
|
|
||||||
### Your First Command
|
### Your First Commands
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Create a new TypeScript npm package
|
# Create a new TypeScript npm package
|
||||||
@@ -32,12 +36,66 @@ gitzone template npm
|
|||||||
# Format your entire codebase
|
# Format your entire codebase
|
||||||
gitzone format
|
gitzone format
|
||||||
|
|
||||||
# Create a semantic commit
|
# Start local MongoDB and MinIO services
|
||||||
|
gitzone services start
|
||||||
|
|
||||||
|
# Create a semantic commit with AI-powered suggestions
|
||||||
gitzone commit
|
gitzone commit
|
||||||
```
|
```
|
||||||
|
|
||||||
## 🛠️ Core Features
|
## 🛠️ Core Features
|
||||||
|
|
||||||
|
### 🐳 Development Services Management
|
||||||
|
|
||||||
|
Effortlessly manage local MongoDB and MinIO (S3-compatible) services for your development environment:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
gitzone services [command]
|
||||||
|
```
|
||||||
|
|
||||||
|
**Available commands:**
|
||||||
|
|
||||||
|
- **`start [service]`** - Start services (mongo|s3|all)
|
||||||
|
- **`stop [service]`** - Stop services (mongo|s3|all)
|
||||||
|
- **`restart [service]`** - Restart services
|
||||||
|
- **`status`** - Show current service status
|
||||||
|
- **`config`** - Display configuration details
|
||||||
|
- **`compass`** - Get MongoDB Compass connection string with network IP
|
||||||
|
- **`logs [service] [lines]`** - View service logs
|
||||||
|
- **`remove`** - Remove containers (preserves data)
|
||||||
|
- **`clean`** - Remove containers AND data (⚠️ destructive)
|
||||||
|
|
||||||
|
**Key features:**
|
||||||
|
|
||||||
|
- 🎲 **Smart port assignment** - Automatically assigns random ports (20000-30000) to avoid conflicts
|
||||||
|
- 📦 **Project isolation** - Each project gets its own containers with unique names
|
||||||
|
- 💾 **Data persistence** - Data stored in `.nogit/` directories survives container restarts
|
||||||
|
- 🔗 **MongoDB Compass support** - Instantly get connection strings for GUI access
|
||||||
|
- 🌐 **Network IP detection** - Automatically detects your local network IP for remote connections
|
||||||
|
- ⚙️ **Auto-configuration** - Creates `.nogit/env.json` with smart defaults
|
||||||
|
|
||||||
|
**Example workflow:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Start all services for your project
|
||||||
|
gitzone services start
|
||||||
|
|
||||||
|
# Check what's running
|
||||||
|
gitzone services status
|
||||||
|
|
||||||
|
# Get MongoDB Compass connection string
|
||||||
|
gitzone services compass
|
||||||
|
# Output: mongodb://defaultadmin:defaultpass@192.168.1.100:27018/myproject?authSource=admin
|
||||||
|
|
||||||
|
# View MongoDB logs
|
||||||
|
gitzone services logs mongo 50
|
||||||
|
|
||||||
|
# Stop services when done
|
||||||
|
gitzone services stop
|
||||||
|
```
|
||||||
|
|
||||||
|
The services are configured via `.nogit/env.json` which is automatically created with secure defaults and random ports for each project.
|
||||||
|
|
||||||
### 📦 Project Templates
|
### 📦 Project Templates
|
||||||
|
|
||||||
Instantly scaffold production-ready projects with best practices built-in:
|
Instantly scaffold production-ready projects with best practices built-in:
|
||||||
@@ -47,14 +105,16 @@ gitzone template [template-name]
|
|||||||
```
|
```
|
||||||
|
|
||||||
**Available templates:**
|
**Available templates:**
|
||||||
|
|
||||||
- **`npm`** - TypeScript npm package with testing, CI/CD, and full tooling
|
- **`npm`** - TypeScript npm package with testing, CI/CD, and full tooling
|
||||||
- **`service`** - Microservice architecture with Docker support
|
- **`service`** - Microservice architecture with Docker support
|
||||||
- **`website`** - Modern web application with LitElement and service workers
|
- **`website`** - Modern web application with LitElement and service workers
|
||||||
- **`wcc`** - Web Component Collection for reusable UI components
|
- **`wcc`** - Web Component Collection for reusable UI components
|
||||||
|
|
||||||
Each template comes pre-configured with:
|
Each template comes pre-configured with:
|
||||||
|
|
||||||
- ✅ TypeScript with modern configurations
|
- ✅ TypeScript with modern configurations
|
||||||
- ✅ Automated testing setup
|
- ✅ Automated testing setup with `@git.zone/tstest`
|
||||||
- ✅ CI/CD pipelines (GitLab/GitHub)
|
- ✅ CI/CD pipelines (GitLab/GitHub)
|
||||||
- ✅ Code formatting and linting
|
- ✅ Code formatting and linting
|
||||||
- ✅ Documentation structure
|
- ✅ Documentation structure
|
||||||
@@ -81,6 +141,7 @@ gitzone format --verbose
|
|||||||
```
|
```
|
||||||
|
|
||||||
**Format features:**
|
**Format features:**
|
||||||
|
|
||||||
- 🔄 **Smart caching** - Only processes changed files
|
- 🔄 **Smart caching** - Only processes changed files
|
||||||
- 🛡️ **Rollback support** - Undo formatting changes if needed
|
- 🛡️ **Rollback support** - Undo formatting changes if needed
|
||||||
- 📊 **Detailed reporting** - See exactly what changed
|
- 📊 **Detailed reporting** - See exactly what changed
|
||||||
@@ -88,6 +149,7 @@ gitzone format --verbose
|
|||||||
- 🎯 **Module-specific formatting** - Target specific formatters
|
- 🎯 **Module-specific formatting** - Target specific formatters
|
||||||
|
|
||||||
**Rollback capabilities:**
|
**Rollback capabilities:**
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# List all available backups
|
# List all available backups
|
||||||
gitzone format --list-backups
|
gitzone format --list-backups
|
||||||
@@ -103,6 +165,7 @@ gitzone format --clean-backups
|
|||||||
```
|
```
|
||||||
|
|
||||||
**Formatters included:**
|
**Formatters included:**
|
||||||
|
|
||||||
- **Prettier** - JavaScript/TypeScript code formatting
|
- **Prettier** - JavaScript/TypeScript code formatting
|
||||||
- **License** - Ensure proper licensing
|
- **License** - Ensure proper licensing
|
||||||
- **Package.json** - Standardize package configurations
|
- **Package.json** - Standardize package configurations
|
||||||
@@ -111,28 +174,38 @@ gitzone format --clean-backups
|
|||||||
- **Gitignore** - Repository ignore rules
|
- **Gitignore** - Repository ignore rules
|
||||||
- **Templates** - Project template updates
|
- **Templates** - Project template updates
|
||||||
- **Npmextra** - Extended npm configurations
|
- **Npmextra** - Extended npm configurations
|
||||||
|
- **Cleanup** - Removes obsolete files (yarn.lock, package-lock.json, tslint.json, etc.)
|
||||||
|
|
||||||
### 🔀 Semantic Commits & Versioning
|
### 🔀 Semantic Commits & Versioning
|
||||||
|
|
||||||
Create standardized commits that automatically handle versioning:
|
Create standardized commits with AI-powered suggestions that automatically handle versioning:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
# Interactive commit with AI recommendations
|
||||||
gitzone commit
|
gitzone commit
|
||||||
|
|
||||||
|
# Auto-accept AI recommendations
|
||||||
|
gitzone commit -y
|
||||||
|
|
||||||
|
# Auto-accept and push
|
||||||
|
gitzone commit -y -p
|
||||||
```
|
```
|
||||||
|
|
||||||
Features:
|
Features:
|
||||||
- 📝 Interactive commit message builder
|
|
||||||
|
- 🤖 **AI-powered analysis** - Analyzes your changes and suggests commit type, scope, and message
|
||||||
|
- 📝 Interactive commit message builder with smart defaults
|
||||||
- 🏷️ Automatic version bumping (major/minor/patch)
|
- 🏷️ Automatic version bumping (major/minor/patch)
|
||||||
- 📜 Changelog generation
|
- 📜 Changelog generation
|
||||||
- 🚀 Optional auto-push to origin
|
- 🚀 Optional auto-push to origin
|
||||||
- 🎯 Conventional commit compliance
|
- 🎯 Conventional commit compliance
|
||||||
|
|
||||||
The commit wizard guides you through:
|
The commit wizard guides you through:
|
||||||
1. **Type selection** (feat/fix/docs/style/refactor/perf/test/chore)
|
|
||||||
|
1. **Type selection** (fix/feat/BREAKING CHANGE) with AI recommendation
|
||||||
2. **Scope definition** (component/module affected)
|
2. **Scope definition** (component/module affected)
|
||||||
3. **Description crafting**
|
3. **Description crafting**
|
||||||
4. **Breaking change detection**
|
4. **Version bump determination**
|
||||||
5. **Version bump determination**
|
|
||||||
|
|
||||||
### 🏗️ Meta Repository Management
|
### 🏗️ Meta Repository Management
|
||||||
|
|
||||||
@@ -153,6 +226,7 @@ gitzone meta remove [name]
|
|||||||
```
|
```
|
||||||
|
|
||||||
Perfect for:
|
Perfect for:
|
||||||
|
|
||||||
- Monorepo management
|
- Monorepo management
|
||||||
- Multi-package projects
|
- Multi-package projects
|
||||||
- Coordinated deployments
|
- Coordinated deployments
|
||||||
@@ -168,6 +242,7 @@ gitzone docker prune
|
|||||||
```
|
```
|
||||||
|
|
||||||
This command removes:
|
This command removes:
|
||||||
|
|
||||||
- Stopped containers
|
- Stopped containers
|
||||||
- Unused images
|
- Unused images
|
||||||
- Dangling volumes
|
- Dangling volumes
|
||||||
@@ -196,6 +271,7 @@ gitzone deprecate
|
|||||||
```
|
```
|
||||||
|
|
||||||
Interactive wizard for:
|
Interactive wizard for:
|
||||||
|
|
||||||
- Setting deprecation notices
|
- Setting deprecation notices
|
||||||
- Guiding users to replacements
|
- Guiding users to replacements
|
||||||
- Updating registry metadata
|
- Updating registry metadata
|
||||||
@@ -210,6 +286,7 @@ gitzone start
|
|||||||
```
|
```
|
||||||
|
|
||||||
Automatically:
|
Automatically:
|
||||||
|
|
||||||
- Checks out master branch
|
- Checks out master branch
|
||||||
- Pulls latest changes
|
- Pulls latest changes
|
||||||
- Installs dependencies
|
- Installs dependencies
|
||||||
@@ -266,44 +343,58 @@ Customize gitzone behavior through `npmextra.json`:
|
|||||||
## 🏆 Best Practices
|
## 🏆 Best Practices
|
||||||
|
|
||||||
### For New Projects
|
### For New Projects
|
||||||
|
|
||||||
1. Start with a template: `gitzone template npm`
|
1. Start with a template: `gitzone template npm`
|
||||||
2. Customize the generated structure
|
2. Set up local services: `gitzone services start`
|
||||||
3. Run initial format: `gitzone format`
|
3. Customize the generated structure
|
||||||
4. Set up CI/CD: `gitzone open ci`
|
4. Run initial format: `gitzone format`
|
||||||
|
5. Set up CI/CD: `gitzone open ci`
|
||||||
|
|
||||||
### For Existing Projects
|
### For Existing Projects
|
||||||
|
|
||||||
1. Initialize: `gitzone start`
|
1. Initialize: `gitzone start`
|
||||||
2. Format codebase: `gitzone format --dry-run` (preview first!)
|
2. Format codebase: `gitzone format --dry-run` (preview first!)
|
||||||
3. Apply formatting: `gitzone format --yes`
|
3. Apply formatting: `gitzone format --yes`
|
||||||
4. Commit changes: `gitzone commit`
|
4. Set up services: `gitzone services start`
|
||||||
|
5. Commit changes: `gitzone commit`
|
||||||
|
|
||||||
### For Teams
|
### For Teams
|
||||||
|
|
||||||
1. Document format preferences in `npmextra.json`
|
1. Document format preferences in `npmextra.json`
|
||||||
2. Use `--save-plan` for reviewable format changes
|
2. Share `.nogit/env.json` template for consistent service setup
|
||||||
3. Enable rollback for safety
|
3. Use `--save-plan` for reviewable format changes
|
||||||
4. Standardize commit conventions
|
4. Enable rollback for safety
|
||||||
|
5. Standardize commit conventions
|
||||||
|
|
||||||
## 🎯 Common Workflows
|
## 🎯 Common Workflows
|
||||||
|
|
||||||
### Clean Development Cycle
|
### Full-Stack Development Cycle
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 1. Start fresh
|
# 1. Start fresh
|
||||||
gitzone start
|
gitzone start
|
||||||
|
|
||||||
# 2. Make changes
|
# 2. Spin up databases and services
|
||||||
|
gitzone services start
|
||||||
|
|
||||||
|
# 3. Make changes
|
||||||
# ... your development work ...
|
# ... your development work ...
|
||||||
|
|
||||||
# 3. Format code
|
# 4. Check service logs if needed
|
||||||
|
gitzone services logs mongo
|
||||||
|
|
||||||
|
# 5. Format code
|
||||||
gitzone format
|
gitzone format
|
||||||
|
|
||||||
# 4. Commit with semantic versioning
|
# 6. Commit with semantic versioning
|
||||||
gitzone commit
|
gitzone commit
|
||||||
|
|
||||||
# 5. Deploy (if CI/CD configured)
|
# 7. Stop services when done
|
||||||
# Automatic via git push
|
gitzone services stop
|
||||||
```
|
```
|
||||||
|
|
||||||
### Multi-Repository Management
|
### Multi-Repository Management
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 1. Set up meta repository
|
# 1. Set up meta repository
|
||||||
gitzone meta init
|
gitzone meta init
|
||||||
@@ -318,6 +409,7 @@ gitzone meta update
|
|||||||
```
|
```
|
||||||
|
|
||||||
### Safe Formatting with Rollback
|
### Safe Formatting with Rollback
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# 1. Preview changes
|
# 1. Preview changes
|
||||||
gitzone format --dry-run
|
gitzone format --dry-run
|
||||||
@@ -332,20 +424,44 @@ gitzone format --from-plan format-changes.json
|
|||||||
gitzone format --rollback
|
gitzone format --rollback
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Database-Driven Development
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# 1. Start MongoDB and MinIO
|
||||||
|
gitzone services start
|
||||||
|
|
||||||
|
# 2. Get connection string for your app
|
||||||
|
gitzone services config
|
||||||
|
|
||||||
|
# 3. Connect with MongoDB Compass
|
||||||
|
gitzone services compass
|
||||||
|
|
||||||
|
# 4. Monitor services
|
||||||
|
gitzone services status
|
||||||
|
|
||||||
|
# 5. Clean everything when done
|
||||||
|
gitzone services clean # ⚠️ Warning: deletes data
|
||||||
|
```
|
||||||
|
|
||||||
## 🔌 Integrations
|
## 🔌 Integrations
|
||||||
|
|
||||||
### CI/CD Platforms
|
### CI/CD Platforms
|
||||||
|
|
||||||
- **GitLab CI** - Full pipeline support with templates
|
- **GitLab CI** - Full pipeline support with templates
|
||||||
- **GitHub Actions** - Automated workflows
|
- **GitHub Actions** - Automated workflows
|
||||||
- **Docker** - Container-based deployments
|
- **Docker** - Container-based deployments
|
||||||
|
|
||||||
### Development Tools
|
### Development Tools
|
||||||
|
|
||||||
- **TypeScript** - First-class support
|
- **TypeScript** - First-class support
|
||||||
- **Prettier** - Code formatting
|
- **Prettier** - Code formatting
|
||||||
- **ESLint** - Linting (via format modules)
|
|
||||||
- **npm/pnpm** - Package management
|
- **npm/pnpm** - Package management
|
||||||
|
- **MongoDB** - Local database service
|
||||||
|
- **MinIO** - S3-compatible object storage
|
||||||
|
- **MongoDB Compass** - Database GUI integration
|
||||||
|
|
||||||
### Version Control
|
### Version Control
|
||||||
|
|
||||||
- **Git** - Deep integration
|
- **Git** - Deep integration
|
||||||
- **Semantic Versioning** - Automatic version bumping
|
- **Semantic Versioning** - Automatic version bumping
|
||||||
- **Conventional Commits** - Standardized commit messages
|
- **Conventional Commits** - Standardized commit messages
|
||||||
@@ -357,50 +473,81 @@ gitzone format --rollback
|
|||||||
3. **Leverage templates**: Start projects right with proven structures
|
3. **Leverage templates**: Start projects right with proven structures
|
||||||
4. **Enable caching**: Dramatically speeds up formatting operations
|
4. **Enable caching**: Dramatically speeds up formatting operations
|
||||||
5. **Save format plans**: Review changes before applying in production
|
5. **Save format plans**: Review changes before applying in production
|
||||||
|
6. **Port management**: Let services auto-assign ports to avoid conflicts
|
||||||
|
7. **Use MongoDB Compass**: `gitzone services compass` for visual DB management
|
||||||
|
|
||||||
## 🐛 Troubleshooting
|
## 🐛 Troubleshooting
|
||||||
|
|
||||||
### Format Command Shows "Cancelled"
|
### Format Command Shows "Cancelled"
|
||||||
|
|
||||||
If the format command shows cancelled even after confirming:
|
If the format command shows cancelled even after confirming:
|
||||||
|
|
||||||
- Check your `npmextra.json` configuration
|
- Check your `npmextra.json` configuration
|
||||||
- Try with `--yes` flag to skip confirmation
|
- Try with `--yes` flag to skip confirmation
|
||||||
- Use `--verbose` for detailed output
|
- Use `--verbose` for detailed output
|
||||||
|
|
||||||
### Docker Commands Fail
|
### Docker Commands Fail
|
||||||
|
|
||||||
Ensure Docker daemon is running:
|
Ensure Docker daemon is running:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker info
|
docker info
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Services Won't Start
|
||||||
|
|
||||||
|
Check for port conflicts:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Services auto-assign ports, but you can check the config
|
||||||
|
cat .nogit/env.json
|
||||||
|
|
||||||
|
# Verify Docker is running
|
||||||
|
docker ps
|
||||||
|
```
|
||||||
|
|
||||||
### Template Creation Issues
|
### Template Creation Issues
|
||||||
|
|
||||||
Verify npm/pnpm is properly configured:
|
Verify npm/pnpm is properly configured:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
npm config get registry
|
npm config get registry
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### MongoDB Connection Issues
|
||||||
|
|
||||||
|
- Ensure services are running: `gitzone services status`
|
||||||
|
- Check firewall settings for the assigned ports
|
||||||
|
- Use `gitzone services compass` for the correct connection string
|
||||||
|
|
||||||
## 📈 Performance
|
## 📈 Performance
|
||||||
|
|
||||||
gitzone is optimized for speed:
|
gitzone is optimized for speed:
|
||||||
|
|
||||||
- **Parallel processing** for format operations
|
- **Parallel processing** for format operations
|
||||||
- **Smart caching** to avoid redundant work
|
- **Smart caching** to avoid redundant work
|
||||||
- **Incremental updates** for meta repositories
|
- **Incremental updates** for meta repositories
|
||||||
- **Minimal dependencies** for fast installation
|
- **Minimal dependencies** for fast installation
|
||||||
|
- **Isolated services** prevent resource conflicts
|
||||||
|
- **Auto port assignment** eliminates manual configuration
|
||||||
|
|
||||||
## License and Legal Information
|
## License and Legal Information
|
||||||
|
|
||||||
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
This repository contains open-source code licensed under the MIT License. A copy of the license can be found in the [LICENSE](./LICENSE) file.
|
||||||
|
|
||||||
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
### Trademarks
|
### Trademarks
|
||||||
|
|
||||||
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
|
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH or third parties, and are not included within the scope of the MIT license granted herein.
|
||||||
|
|
||||||
|
Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines or the guidelines of the respective third-party owners, and any usage must be approved in writing. Third-party trademarks used herein are the property of their respective owners and used only in a descriptive manner, e.g. for an implementation of an API or similar.
|
||||||
|
|
||||||
### Company Information
|
### Company Information
|
||||||
|
|
||||||
Task Venture Capital GmbH
|
Task Venture Capital GmbH
|
||||||
Registered at District court Bremen HRB 35230 HB, Germany
|
Registered at District Court Bremen HRB 35230 HB, Germany
|
||||||
|
|
||||||
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
For any legal inquiries or further information, please contact us via email at hello@task.vc.
|
||||||
|
|
||||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||||
257
readme.plan.md
257
readme.plan.md
@@ -1,170 +1,121 @@
|
|||||||
# Gitzone Format Module Improvement Plan
|
# GitZone Services Command Implementation Plan
|
||||||
|
|
||||||
Please reread /home/philkunz/.claude/CLAUDE.md before proceeding with any implementation.
|
|
||||||
|
|
||||||
## Overview
|
## Overview
|
||||||
This plan outlines improvements for the gitzone format module to enhance its functionality, reliability, and maintainability.
|
Implement the `gitzone services` command to manage MongoDB and MinIO containers for development projects.
|
||||||
|
|
||||||
## Phase 1: Core Improvements (High Priority) - COMPLETED ✅
|
## Tasks
|
||||||
|
|
||||||
### 1. Enhanced Error Handling & Recovery ✅
|
### Module Structure Setup
|
||||||
- [x] Implement rollback mechanism for failed format operations
|
- [x] Create `ts/mod_services/` directory
|
||||||
- [x] Add detailed error messages with recovery suggestions
|
- [x] Create `mod.plugins.ts` with required imports
|
||||||
- [x] Create a `--dry-run` flag to preview changes before applying
|
- [x] Create `helpers.ts` with utility functions
|
||||||
- [x] Add transaction-like behavior: all-or-nothing formatting
|
- [x] Create `classes.serviceconfiguration.ts` for config handling
|
||||||
- [x] Implement plan → action workflow as default behavior
|
- [x] Create `classes.dockercontainer.ts` for Docker operations
|
||||||
|
- [x] Create `classes.servicemanager.ts` for service management
|
||||||
|
- [x] Create `index.ts` with main command logic
|
||||||
|
|
||||||
### 2. Complete Missing Functionality ✅
|
### Core Functionality
|
||||||
- [x] Implement the `ensureDependency` function in format.packagejson.ts
|
- [x] Implement ServiceConfiguration class
|
||||||
- [x] Develop the copy module for file pattern-based copying
|
- [x] Load/create `.nogit/env.json` configuration
|
||||||
- [x] Add dependency version constraint management
|
- [x] Generate random available ports (20000-30000 range)
|
||||||
- [x] Support workspace/monorepo configurations (via configuration)
|
- [x] Preserve existing custom values
|
||||||
|
- [x] Provide default values for missing fields
|
||||||
|
|
||||||
### 3. Configuration & Flexibility ✅
|
- [x] Implement DockerContainer class
|
||||||
- [x] Extend npmextra.json gitzone configuration section
|
- [x] Check container status
|
||||||
- [x] Allow custom license exclusion/inclusion lists
|
- [x] Start/stop/restart containers
|
||||||
- [x] Make format steps configurable (skip/include specific modules)
|
- [x] Execute Docker commands
|
||||||
- [x] Support custom template directories (via configuration)
|
- [x] Handle container logs
|
||||||
- [x] Add format profiles for different project types
|
- [x] Manage volumes and port bindings
|
||||||
|
|
||||||
### 4. Architecture Changes ✅
|
- [x] Implement ServiceManager class
|
||||||
- [x] Introduce a `FormatContext` class to manage state across modules
|
- [x] Manage MongoDB containers
|
||||||
- [x] Create abstract `BaseFormatter` class for consistent module structure
|
- [x] Manage MinIO containers
|
||||||
- [x] Implement event system for inter-module communication (via context)
|
- [x] Handle container lifecycle
|
||||||
- [x] Add validation layer before format execution
|
- [x] Generate project-specific container names
|
||||||
- [x] Implement `FormatPlanner` class for plan → action workflow
|
- [x] Manage data directories in `.nogit/`
|
||||||
|
- [x] Generate MongoDB Compass connection strings
|
||||||
|
|
||||||
## Phase 2: Performance & Reporting (Medium Priority) - COMPLETED ✅
|
### Commands Implementation
|
||||||
|
- [x] `start` command - Start services (mongo|s3|all)
|
||||||
|
- [x] `stop` command - Stop services (mongo|s3|all)
|
||||||
|
- [x] `restart` command - Restart services (mongo|s3|all)
|
||||||
|
- [x] `status` command - Show service status
|
||||||
|
- [x] `config` command - Show current configuration
|
||||||
|
- [x] `compass` command - Show MongoDB Compass connection string
|
||||||
|
- [x] `logs` command - Show service logs with line count
|
||||||
|
- [x] `remove` command - Remove containers (preserve data)
|
||||||
|
- [x] `clean` command - Remove containers and data
|
||||||
|
|
||||||
### 5. Performance Optimizations ✅
|
### Integration
|
||||||
- [x] Implement parallel execution for independent format modules
|
- [x] Add `@push.rocks/smartshell` to main plugins.ts
|
||||||
- [x] Add file change detection to skip unchanged files
|
- [x] Add `@push.rocks/smartnetwork` to main plugins.ts
|
||||||
- [x] Create format cache to track last formatted state
|
- [x] Add `@push.rocks/smartinteraction` to main plugins.ts
|
||||||
- [x] Optimize Prettier runs by batching files
|
- [x] Register services command in `gitzone.cli.ts`
|
||||||
|
|
||||||
### 6. Enhanced Reporting & Visibility ✅
|
### Features
|
||||||
- [x] Generate comprehensive format report showing all changes
|
- [x] Auto-configuration with smart defaults
|
||||||
- [x] Add diff view for file modifications
|
- [x] Random port assignment to avoid conflicts
|
||||||
- [x] Create verbose logging option
|
- [x] Project isolation with unique container names
|
||||||
- [x] Add format statistics (files changed, time taken, etc.)
|
- [x] Data persistence in `.nogit/` directories
|
||||||
|
- [x] Status display (running/stopped/not installed)
|
||||||
|
- [x] Interactive confirmations for destructive operations
|
||||||
|
- [x] Colored console output
|
||||||
|
- [x] MinIO bucket auto-creation
|
||||||
|
- [x] MongoDB Compass connection string with network IP
|
||||||
|
|
||||||
## Phase 3: Advanced Features (Lower Priority) - PARTIALLY COMPLETED
|
### Testing
|
||||||
|
- [ ] Test service start/stop operations
|
||||||
|
- [ ] Test configuration creation and updates
|
||||||
|
- [ ] Test port collision handling
|
||||||
|
- [ ] Test data persistence
|
||||||
|
- [ ] Test MongoDB Compass connection string generation
|
||||||
|
- [ ] Test all command variations
|
||||||
|
|
||||||
### 7. Better Integration & Extensibility ⏳
|
## Configuration Format
|
||||||
- [ ] Create plugin system for custom format modules
|
```json
|
||||||
- [ ] Add hooks for pre/post format operations
|
{
|
||||||
- [ ] Support custom validation rules
|
"PROJECT_NAME": "derived-from-package-name",
|
||||||
- [ ] Integrate with git hooks for pre-commit formatting
|
"MONGODB_HOST": "localhost",
|
||||||
|
"MONGODB_NAME": "project-name",
|
||||||
|
"MONGODB_PORT": "random-port",
|
||||||
|
"MONGODB_USER": "defaultadmin",
|
||||||
|
"MONGODB_PASS": "defaultpass",
|
||||||
|
"S3_HOST": "localhost",
|
||||||
|
"S3_PORT": "random-port",
|
||||||
|
"S3_CONSOLE_PORT": "s3-port+1",
|
||||||
|
"S3_USER": "defaultadmin",
|
||||||
|
"S3_PASS": "defaultpass",
|
||||||
|
"S3_BUCKET": "project-name-documents"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
### 8. Improved Template Integration ⏳
|
## Command Examples
|
||||||
- [ ] Better error handling when smartscaf operations fail
|
```bash
|
||||||
- [ ] Add pre/post template hooks for custom processing
|
gitzone services start # Start all services
|
||||||
- [ ] Validate template results before proceeding with format
|
gitzone services start mongo # Start only MongoDB
|
||||||
- [ ] Support skipping template updates via configuration
|
gitzone services stop # Stop all services
|
||||||
|
gitzone services status # Check service status
|
||||||
|
gitzone services config # Show configuration
|
||||||
|
gitzone services compass # Show MongoDB Compass connection string
|
||||||
|
gitzone services logs mongo 50 # Show last 50 lines of MongoDB logs
|
||||||
|
gitzone services remove # Remove containers (preserve data)
|
||||||
|
gitzone services clean # Remove containers and data
|
||||||
|
```
|
||||||
|
|
||||||
### 9. Enhanced License Management ⏳
|
## Progress Notes
|
||||||
- [ ] Make license checking configurable (partial)
|
Implementation started: 2025-08-14
|
||||||
- [ ] Add license compatibility matrix
|
Implementation completed: 2025-08-14
|
||||||
- [x] Support license exceptions for specific packages
|
|
||||||
- [ ] Generate license report for compliance
|
|
||||||
|
|
||||||
### 10. Better Package.json Management ⏳
|
## Summary
|
||||||
- [ ] Smart dependency sorting and grouping
|
Successfully implemented the `gitzone services` command in TypeScript, providing a complete replacement for the `services.sh` shell script. The implementation includes:
|
||||||
- [ ] Automated script generation based on project type
|
|
||||||
- [ ] Support for pnpm workspace configurations
|
|
||||||
- [ ] Validation of package.json schema
|
|
||||||
|
|
||||||
### 11. Quality of Life Improvements ⏳
|
1. **Complete Docker service management** for MongoDB and MinIO containers
|
||||||
- [ ] Interactive mode for format configuration
|
2. **Smart configuration management** with automatic port assignment and conflict avoidance
|
||||||
- [ ] Undo/redo capability for format operations
|
3. **MongoDB Compass support** with network IP detection for remote connections
|
||||||
- [ ] Format presets for common scenarios
|
4. **Project isolation** using project-specific container names
|
||||||
- [x] Better progress indicators and user feedback
|
5. **Data persistence** in `.nogit/` directories
|
||||||
|
6. **Interactive confirmations** for destructive operations
|
||||||
|
7. **Comprehensive command set** including start, stop, restart, status, config, compass, logs, remove, and clean commands
|
||||||
|
|
||||||
## Implementation Status
|
The module is fully integrated into the gitzone CLI and ready for testing.
|
||||||
|
|
||||||
### ✅ Completed Features
|
|
||||||
|
|
||||||
1. **Rollback Mechanism**
|
|
||||||
- Full backup/restore functionality
|
|
||||||
- Manifest tracking and integrity checks
|
|
||||||
- CLI commands for rollback operations
|
|
||||||
|
|
||||||
2. **Plan → Action Workflow**
|
|
||||||
- Two-phase approach (analyze then execute)
|
|
||||||
- Interactive confirmation
|
|
||||||
- Dry-run support
|
|
||||||
|
|
||||||
3. **Configuration System**
|
|
||||||
- Comprehensive npmextra.json support
|
|
||||||
- Module control (skip/only/order)
|
|
||||||
- Cache configuration
|
|
||||||
- Parallel execution settings
|
|
||||||
|
|
||||||
4. **Performance Improvements**
|
|
||||||
- Parallel execution by dependency analysis
|
|
||||||
- File change caching
|
|
||||||
- Prettier batching
|
|
||||||
- Execution time tracking
|
|
||||||
|
|
||||||
5. **Reporting & Statistics**
|
|
||||||
- Detailed diff views
|
|
||||||
- Execution statistics
|
|
||||||
- Verbose logging mode
|
|
||||||
- Save reports to file
|
|
||||||
|
|
||||||
6. **Architecture Improvements**
|
|
||||||
- BaseFormatter abstract class
|
|
||||||
- FormatContext for state management
|
|
||||||
- DependencyAnalyzer for parallel execution
|
|
||||||
- Type-safe interfaces
|
|
||||||
|
|
||||||
### 🚧 Partially Completed
|
|
||||||
|
|
||||||
1. **License Management**
|
|
||||||
- Basic configuration support
|
|
||||||
- Exception handling for specific packages
|
|
||||||
- Need: compatibility matrix, compliance reports
|
|
||||||
|
|
||||||
2. **Package.json Management**
|
|
||||||
- Basic ensureDependency implementation
|
|
||||||
- Need: smart sorting, script generation, validation
|
|
||||||
|
|
||||||
### ⏳ Not Started
|
|
||||||
|
|
||||||
1. **Plugin System**
|
|
||||||
- Need to design plugin API
|
|
||||||
- Hook system for pre/post operations
|
|
||||||
- Custom validation rules
|
|
||||||
|
|
||||||
2. **Git Integration**
|
|
||||||
- Pre-commit hooks
|
|
||||||
- Automatic formatting on commit
|
|
||||||
|
|
||||||
3. **Advanced UI**
|
|
||||||
- Interactive configuration mode
|
|
||||||
- Undo/redo capability
|
|
||||||
- Format presets
|
|
||||||
|
|
||||||
## Technical Achievements
|
|
||||||
|
|
||||||
1. **Type Safety**: All new code uses TypeScript interfaces and types
|
|
||||||
2. **Error Handling**: Comprehensive try-catch blocks with rollback
|
|
||||||
3. **API Compatibility**: Updated to use latest smartfile/smartnpm APIs
|
|
||||||
4. **Testing**: Ready for comprehensive test suite
|
|
||||||
5. **Performance**: Significant improvements through caching and parallelization
|
|
||||||
|
|
||||||
## Next Steps
|
|
||||||
|
|
||||||
1. Write comprehensive tests for all new functionality
|
|
||||||
2. Create user documentation for new features
|
|
||||||
3. Consider plugin API design for extensibility
|
|
||||||
4. Implement remaining Phase 3 features based on user feedback
|
|
||||||
5. Performance benchmarking and optimization
|
|
||||||
|
|
||||||
## Success Metrics Achieved
|
|
||||||
|
|
||||||
- ✅ Reduced error rates through rollback mechanism
|
|
||||||
- ✅ Faster execution through parallel processing and caching
|
|
||||||
- ✅ Enhanced user control through configuration
|
|
||||||
- ✅ Better visibility through reporting and statistics
|
|
||||||
- ✅ Improved maintainability through better architecture
|
|
||||||
1
test
Submodule
1
test
Submodule
Submodule test added at 0b89443584
@@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@git.zone/cli',
|
name: '@git.zone/cli',
|
||||||
version: '1.16.8',
|
version: '2.2.0',
|
||||||
description: 'A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.'
|
description: 'A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.'
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -131,6 +131,14 @@ export let run = async () => {
|
|||||||
modHelpers.run(argvArg);
|
modHelpers.run(argvArg);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
/**
|
||||||
|
* manage development services (MongoDB, S3/MinIO)
|
||||||
|
*/
|
||||||
|
gitzoneSmartcli.addCommand('services').subscribe(async (argvArg) => {
|
||||||
|
const modServices = await import('./mod_services/index.js');
|
||||||
|
await modServices.run(argvArg);
|
||||||
|
});
|
||||||
|
|
||||||
// start parsing of the cli
|
// start parsing of the cli
|
||||||
gitzoneSmartcli.startParse();
|
gitzoneSmartcli.startParse();
|
||||||
return await done.promise;
|
return await done.promise;
|
||||||
|
|||||||
@@ -3,6 +3,8 @@
|
|||||||
import * as plugins from './mod.plugins.js';
|
import * as plugins from './mod.plugins.js';
|
||||||
import * as paths from '../paths.js';
|
import * as paths from '../paths.js';
|
||||||
import { logger } from '../gitzone.logging.js';
|
import { logger } from '../gitzone.logging.js';
|
||||||
|
import * as helpers from './mod.helpers.js';
|
||||||
|
import * as ui from './mod.ui.js';
|
||||||
|
|
||||||
export const run = async (argvArg: any) => {
|
export const run = async (argvArg: any) => {
|
||||||
if (argvArg.format) {
|
if (argvArg.format) {
|
||||||
@@ -10,22 +12,48 @@ export const run = async (argvArg: any) => {
|
|||||||
await formatMod.run();
|
await formatMod.run();
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.log('info', `gathering facts...`);
|
ui.printHeader('🔍 Analyzing repository changes...');
|
||||||
|
|
||||||
const aidoc = new plugins.tsdoc.AiDoc();
|
const aidoc = new plugins.tsdoc.AiDoc();
|
||||||
await aidoc.start();
|
await aidoc.start();
|
||||||
|
|
||||||
const nextCommitObject = await aidoc.buildNextCommitObject(paths.cwd);
|
const nextCommitObject = await aidoc.buildNextCommitObject(paths.cwd);
|
||||||
|
|
||||||
logger.log(
|
await aidoc.stop();
|
||||||
'info',
|
|
||||||
`---------
|
ui.printRecommendation({
|
||||||
Next recommended commit would be:
|
recommendedNextVersion: nextCommitObject.recommendedNextVersion,
|
||||||
===========
|
recommendedNextVersionLevel: nextCommitObject.recommendedNextVersionLevel,
|
||||||
-> ${nextCommitObject.recommendedNextVersion}:
|
recommendedNextVersionScope: nextCommitObject.recommendedNextVersionScope,
|
||||||
-> ${nextCommitObject.recommendedNextVersionLevel}(${nextCommitObject.recommendedNextVersionScope}): ${nextCommitObject.recommendedNextVersionMessage}
|
recommendedNextVersionMessage: nextCommitObject.recommendedNextVersionMessage,
|
||||||
===========
|
});
|
||||||
`,
|
|
||||||
);
|
let answerBucket: plugins.smartinteract.AnswerBucket;
|
||||||
|
|
||||||
|
// Check if -y or --yes flag is set to auto-accept recommendations
|
||||||
|
if (argvArg.y || argvArg.yes) {
|
||||||
|
// Auto-mode: create AnswerBucket programmatically
|
||||||
|
logger.log('info', '✓ Auto-accepting AI recommendations (--yes flag)');
|
||||||
|
|
||||||
|
answerBucket = new plugins.smartinteract.AnswerBucket();
|
||||||
|
answerBucket.addAnswer({
|
||||||
|
name: 'commitType',
|
||||||
|
value: nextCommitObject.recommendedNextVersionLevel,
|
||||||
|
});
|
||||||
|
answerBucket.addAnswer({
|
||||||
|
name: 'commitScope',
|
||||||
|
value: nextCommitObject.recommendedNextVersionScope,
|
||||||
|
});
|
||||||
|
answerBucket.addAnswer({
|
||||||
|
name: 'commitDescription',
|
||||||
|
value: nextCommitObject.recommendedNextVersionMessage,
|
||||||
|
});
|
||||||
|
answerBucket.addAnswer({
|
||||||
|
name: 'pushToOrigin',
|
||||||
|
value: !!(argvArg.p || argvArg.push), // Only push if -p flag also provided
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// Interactive mode: prompt user for input
|
||||||
const commitInteract = new plugins.smartinteract.SmartInteract();
|
const commitInteract = new plugins.smartinteract.SmartInteract();
|
||||||
commitInteract.addQuestions([
|
commitInteract.addQuestions([
|
||||||
{
|
{
|
||||||
@@ -54,7 +82,8 @@ export const run = async (argvArg: any) => {
|
|||||||
default: true,
|
default: true,
|
||||||
},
|
},
|
||||||
]);
|
]);
|
||||||
const answerBucket = await commitInteract.runQueue();
|
answerBucket = await commitInteract.runQueue();
|
||||||
|
}
|
||||||
const commitString = createCommitStringFromAnswerBucket(answerBucket);
|
const commitString = createCommitStringFromAnswerBucket(answerBucket);
|
||||||
const commitVersionType = (() => {
|
const commitVersionType = (() => {
|
||||||
switch (answerBucket.getAnswerFor('commitType')) {
|
switch (answerBucket.getAnswerFor('commitType')) {
|
||||||
@@ -67,20 +96,30 @@ export const run = async (argvArg: any) => {
|
|||||||
}
|
}
|
||||||
})();
|
})();
|
||||||
|
|
||||||
logger.log('info', `OK! Creating commit with message '${commitString}'`);
|
ui.printHeader('✨ Creating Semantic Commit');
|
||||||
|
ui.printCommitMessage(commitString);
|
||||||
const smartshellInstance = new plugins.smartshell.Smartshell({
|
const smartshellInstance = new plugins.smartshell.Smartshell({
|
||||||
executor: 'bash',
|
executor: 'bash',
|
||||||
sourceFilePaths: [],
|
sourceFilePaths: [],
|
||||||
});
|
});
|
||||||
|
|
||||||
logger.log('info', `Baking commitinfo into code ...`);
|
// Determine total steps (6 if pushing, 5 if not)
|
||||||
|
const totalSteps = answerBucket.getAnswerFor('pushToOrigin') && !(process.env.CI === 'true') ? 6 : 5;
|
||||||
|
let currentStep = 0;
|
||||||
|
|
||||||
|
// Step 1: Baking commitinfo
|
||||||
|
currentStep++;
|
||||||
|
ui.printStep(currentStep, totalSteps, '🔧 Baking commit info into code', 'in-progress');
|
||||||
const commitInfo = new plugins.commitinfo.CommitInfo(
|
const commitInfo = new plugins.commitinfo.CommitInfo(
|
||||||
paths.cwd,
|
paths.cwd,
|
||||||
commitVersionType,
|
commitVersionType,
|
||||||
);
|
);
|
||||||
await commitInfo.writeIntoPotentialDirs();
|
await commitInfo.writeIntoPotentialDirs();
|
||||||
|
ui.printStep(currentStep, totalSteps, '🔧 Baking commit info into code', 'done');
|
||||||
|
|
||||||
logger.log('info', `Writing changelog.md ...`);
|
// Step 2: Writing changelog
|
||||||
|
currentStep++;
|
||||||
|
ui.printStep(currentStep, totalSteps, '📄 Generating changelog.md', 'in-progress');
|
||||||
let changelog = nextCommitObject.changelog;
|
let changelog = nextCommitObject.changelog;
|
||||||
changelog = changelog.replaceAll(
|
changelog = changelog.replaceAll(
|
||||||
'{{nextVersion}}',
|
'{{nextVersion}}',
|
||||||
@@ -103,21 +142,58 @@ export const run = async (argvArg: any) => {
|
|||||||
changelog = changelog.replaceAll('\n{{nextVersionDetails}}', '');
|
changelog = changelog.replaceAll('\n{{nextVersionDetails}}', '');
|
||||||
}
|
}
|
||||||
|
|
||||||
await plugins.smartfile.memory.toFs(
|
await plugins.smartfs
|
||||||
changelog,
|
.file(plugins.path.join(paths.cwd, `changelog.md`))
|
||||||
plugins.path.join(paths.cwd, `changelog.md`),
|
.encoding('utf8')
|
||||||
);
|
.write(changelog);
|
||||||
|
ui.printStep(currentStep, totalSteps, '📄 Generating changelog.md', 'done');
|
||||||
|
|
||||||
logger.log('info', `Staging files for commit:`);
|
// Step 3: Staging files
|
||||||
|
currentStep++;
|
||||||
|
ui.printStep(currentStep, totalSteps, '📦 Staging files', 'in-progress');
|
||||||
await smartshellInstance.exec(`git add -A`);
|
await smartshellInstance.exec(`git add -A`);
|
||||||
|
ui.printStep(currentStep, totalSteps, '📦 Staging files', 'done');
|
||||||
|
|
||||||
|
// Step 4: Creating commit
|
||||||
|
currentStep++;
|
||||||
|
ui.printStep(currentStep, totalSteps, '💾 Creating git commit', 'in-progress');
|
||||||
await smartshellInstance.exec(`git commit -m "${commitString}"`);
|
await smartshellInstance.exec(`git commit -m "${commitString}"`);
|
||||||
await smartshellInstance.exec(`npm version ${commitVersionType}`);
|
ui.printStep(currentStep, totalSteps, '💾 Creating git commit', 'done');
|
||||||
|
|
||||||
|
// Step 5: Bumping version
|
||||||
|
currentStep++;
|
||||||
|
const projectType = await helpers.detectProjectType();
|
||||||
|
const newVersion = await helpers.bumpProjectVersion(projectType, commitVersionType, currentStep, totalSteps);
|
||||||
|
|
||||||
|
// Step 6: Push to remote (optional)
|
||||||
|
const currentBranch = await helpers.detectCurrentBranch();
|
||||||
if (
|
if (
|
||||||
answerBucket.getAnswerFor('pushToOrigin') &&
|
answerBucket.getAnswerFor('pushToOrigin') &&
|
||||||
!(process.env.CI === 'true')
|
!(process.env.CI === 'true')
|
||||||
) {
|
) {
|
||||||
await smartshellInstance.exec(`git push origin master --follow-tags`);
|
currentStep++;
|
||||||
|
ui.printStep(currentStep, totalSteps, `🚀 Pushing to origin/${currentBranch}`, 'in-progress');
|
||||||
|
await smartshellInstance.exec(`git push origin ${currentBranch} --follow-tags`);
|
||||||
|
ui.printStep(currentStep, totalSteps, `🚀 Pushing to origin/${currentBranch}`, 'done');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
console.log(''); // Add spacing before summary
|
||||||
|
|
||||||
|
// Get commit SHA for summary
|
||||||
|
const commitShaResult = await smartshellInstance.exec('git rev-parse --short HEAD');
|
||||||
|
const commitSha = commitShaResult.stdout.trim();
|
||||||
|
|
||||||
|
// Print final summary
|
||||||
|
ui.printSummary({
|
||||||
|
projectType,
|
||||||
|
branch: currentBranch,
|
||||||
|
commitType: answerBucket.getAnswerFor('commitType'),
|
||||||
|
commitScope: answerBucket.getAnswerFor('commitScope'),
|
||||||
|
commitMessage: answerBucket.getAnswerFor('commitDescription'),
|
||||||
|
newVersion: newVersion,
|
||||||
|
commitSha: commitSha,
|
||||||
|
pushed: answerBucket.getAnswerFor('pushToOrigin') && !(process.env.CI === 'true'),
|
||||||
|
});
|
||||||
};
|
};
|
||||||
|
|
||||||
const createCommitStringFromAnswerBucket = (
|
const createCommitStringFromAnswerBucket = (
|
||||||
|
|||||||
218
ts/mod_commit/mod.helpers.ts
Normal file
218
ts/mod_commit/mod.helpers.ts
Normal file
@@ -0,0 +1,218 @@
|
|||||||
|
import * as plugins from './mod.plugins.js';
|
||||||
|
import * as paths from '../paths.js';
|
||||||
|
import { logger } from '../gitzone.logging.js';
|
||||||
|
import * as ui from './mod.ui.js';
|
||||||
|
|
||||||
|
export type ProjectType = 'npm' | 'deno' | 'both' | 'none';
|
||||||
|
export type VersionType = 'patch' | 'minor' | 'major';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Detects the current git branch
|
||||||
|
* @returns The current branch name, defaults to 'master' if detection fails
|
||||||
|
*/
|
||||||
|
export async function detectCurrentBranch(): Promise<string> {
|
||||||
|
try {
|
||||||
|
const smartshellInstance = new plugins.smartshell.Smartshell({
|
||||||
|
executor: 'bash',
|
||||||
|
sourceFilePaths: [],
|
||||||
|
});
|
||||||
|
const result = await smartshellInstance.exec('git branch --show-current');
|
||||||
|
const branchName = result.stdout.trim();
|
||||||
|
|
||||||
|
if (!branchName) {
|
||||||
|
logger.log('warn', 'Could not detect current branch, falling back to "master"');
|
||||||
|
return 'master';
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log('info', `Detected current branch: ${branchName}`);
|
||||||
|
return branchName;
|
||||||
|
} catch (error) {
|
||||||
|
logger.log('warn', `Failed to detect branch: ${error.message}, falling back to "master"`);
|
||||||
|
return 'master';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Detects the project type based on presence of package.json and/or deno.json
|
||||||
|
* @returns The project type
|
||||||
|
*/
|
||||||
|
export async function detectProjectType(): Promise<ProjectType> {
|
||||||
|
const packageJsonPath = plugins.path.join(paths.cwd, 'package.json');
|
||||||
|
const denoJsonPath = plugins.path.join(paths.cwd, 'deno.json');
|
||||||
|
|
||||||
|
const hasPackageJson = await plugins.smartfs.file(packageJsonPath).exists();
|
||||||
|
const hasDenoJson = await plugins.smartfs.file(denoJsonPath).exists();
|
||||||
|
|
||||||
|
if (hasPackageJson && hasDenoJson) {
|
||||||
|
logger.log('info', 'Detected dual project (npm + deno)');
|
||||||
|
return 'both';
|
||||||
|
} else if (hasPackageJson) {
|
||||||
|
logger.log('info', 'Detected npm project');
|
||||||
|
return 'npm';
|
||||||
|
} else if (hasDenoJson) {
|
||||||
|
logger.log('info', 'Detected deno project');
|
||||||
|
return 'deno';
|
||||||
|
} else {
|
||||||
|
throw new Error('No package.json or deno.json found in current directory');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parses a semantic version string and bumps it according to the version type
|
||||||
|
* @param currentVersion Current version string (e.g., "1.2.3")
|
||||||
|
* @param versionType Type of version bump
|
||||||
|
* @returns New version string
|
||||||
|
*/
|
||||||
|
function calculateNewVersion(currentVersion: string, versionType: VersionType): string {
|
||||||
|
const versionMatch = currentVersion.match(/^(\d+)\.(\d+)\.(\d+)/);
|
||||||
|
|
||||||
|
if (!versionMatch) {
|
||||||
|
throw new Error(`Invalid version format: ${currentVersion}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
let [, major, minor, patch] = versionMatch.map(Number);
|
||||||
|
|
||||||
|
switch (versionType) {
|
||||||
|
case 'major':
|
||||||
|
major += 1;
|
||||||
|
minor = 0;
|
||||||
|
patch = 0;
|
||||||
|
break;
|
||||||
|
case 'minor':
|
||||||
|
minor += 1;
|
||||||
|
patch = 0;
|
||||||
|
break;
|
||||||
|
case 'patch':
|
||||||
|
patch += 1;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
return `${major}.${minor}.${patch}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reads the current version from package.json or deno.json
|
||||||
|
* @param projectType The project type to determine which file to read
|
||||||
|
* @returns The current version string
|
||||||
|
*/
|
||||||
|
async function readCurrentVersion(projectType: ProjectType): Promise<string> {
|
||||||
|
if (projectType === 'npm' || projectType === 'both') {
|
||||||
|
const packageJsonPath = plugins.path.join(paths.cwd, 'package.json');
|
||||||
|
const content = (await plugins.smartfs
|
||||||
|
.file(packageJsonPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
|
const packageJson = JSON.parse(content) as { version?: string };
|
||||||
|
|
||||||
|
if (!packageJson.version) {
|
||||||
|
throw new Error('package.json does not contain a version field');
|
||||||
|
}
|
||||||
|
return packageJson.version;
|
||||||
|
} else {
|
||||||
|
const denoJsonPath = plugins.path.join(paths.cwd, 'deno.json');
|
||||||
|
const content = (await plugins.smartfs
|
||||||
|
.file(denoJsonPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
|
const denoConfig = JSON.parse(content) as { version?: string };
|
||||||
|
|
||||||
|
if (!denoConfig.version) {
|
||||||
|
throw new Error('deno.json does not contain a version field');
|
||||||
|
}
|
||||||
|
return denoConfig.version;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Updates the version field in a JSON file (package.json or deno.json)
|
||||||
|
* @param filePath Path to the JSON file
|
||||||
|
* @param newVersion The new version to write
|
||||||
|
*/
|
||||||
|
async function updateVersionFile(filePath: string, newVersion: string): Promise<void> {
|
||||||
|
const content = (await plugins.smartfs
|
||||||
|
.file(filePath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
|
const config = JSON.parse(content) as { version?: string };
|
||||||
|
config.version = newVersion;
|
||||||
|
await plugins.smartfs
|
||||||
|
.file(filePath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.write(JSON.stringify(config, null, 2) + '\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bumps the project version based on project type
|
||||||
|
* Handles npm-only, deno-only, and dual projects with unified logic
|
||||||
|
* @param projectType The detected project type
|
||||||
|
* @param versionType The type of version bump
|
||||||
|
* @param currentStep The current step number for progress display
|
||||||
|
* @param totalSteps The total number of steps for progress display
|
||||||
|
* @returns The new version string
|
||||||
|
*/
|
||||||
|
export async function bumpProjectVersion(
|
||||||
|
projectType: ProjectType,
|
||||||
|
versionType: VersionType,
|
||||||
|
currentStep?: number,
|
||||||
|
totalSteps?: number
|
||||||
|
): Promise<string> {
|
||||||
|
if (projectType === 'none') {
|
||||||
|
throw new Error('Cannot bump version: no package.json or deno.json found');
|
||||||
|
}
|
||||||
|
|
||||||
|
const projectEmoji = projectType === 'npm' ? '📦' : projectType === 'deno' ? '🦕' : '🔀';
|
||||||
|
const description = `🏷️ Bumping version (${projectEmoji} ${projectType})`;
|
||||||
|
|
||||||
|
if (currentStep && totalSteps) {
|
||||||
|
ui.printStep(currentStep, totalSteps, description, 'in-progress');
|
||||||
|
}
|
||||||
|
|
||||||
|
const smartshellInstance = new plugins.smartshell.Smartshell({
|
||||||
|
executor: 'bash',
|
||||||
|
sourceFilePaths: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
try {
|
||||||
|
// 1. Read current version
|
||||||
|
const currentVersion = await readCurrentVersion(projectType);
|
||||||
|
|
||||||
|
// 2. Calculate new version (reuse existing function!)
|
||||||
|
const newVersion = calculateNewVersion(currentVersion, versionType);
|
||||||
|
|
||||||
|
logger.log('info', `Bumping version: ${currentVersion} → ${newVersion}`);
|
||||||
|
|
||||||
|
// 3. Determine which files to update
|
||||||
|
const filesToUpdate: string[] = [];
|
||||||
|
const packageJsonPath = plugins.path.join(paths.cwd, 'package.json');
|
||||||
|
const denoJsonPath = plugins.path.join(paths.cwd, 'deno.json');
|
||||||
|
|
||||||
|
if (projectType === 'npm' || projectType === 'both') {
|
||||||
|
await updateVersionFile(packageJsonPath, newVersion);
|
||||||
|
filesToUpdate.push('package.json');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (projectType === 'deno' || projectType === 'both') {
|
||||||
|
await updateVersionFile(denoJsonPath, newVersion);
|
||||||
|
filesToUpdate.push('deno.json');
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Stage all updated files
|
||||||
|
await smartshellInstance.exec(`git add ${filesToUpdate.join(' ')}`);
|
||||||
|
|
||||||
|
// 5. Create version commit
|
||||||
|
await smartshellInstance.exec(`git commit -m "v${newVersion}"`);
|
||||||
|
|
||||||
|
// 6. Create version tag
|
||||||
|
await smartshellInstance.exec(`git tag v${newVersion} -m "v${newVersion}"`);
|
||||||
|
|
||||||
|
logger.log('info', `Created commit and tag v${newVersion}`);
|
||||||
|
|
||||||
|
if (currentStep && totalSteps) {
|
||||||
|
ui.printStep(currentStep, totalSteps, description, 'done');
|
||||||
|
}
|
||||||
|
|
||||||
|
return newVersion;
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`Failed to bump project version: ${error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
196
ts/mod_commit/mod.ui.ts
Normal file
196
ts/mod_commit/mod.ui.ts
Normal file
@@ -0,0 +1,196 @@
|
|||||||
|
import { logger } from '../gitzone.logging.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* UI helper module for beautiful CLI output
|
||||||
|
*/
|
||||||
|
|
||||||
|
interface ICommitSummary {
|
||||||
|
projectType: string;
|
||||||
|
branch: string;
|
||||||
|
commitType: string;
|
||||||
|
commitScope: string;
|
||||||
|
commitMessage: string;
|
||||||
|
newVersion: string;
|
||||||
|
commitSha?: string;
|
||||||
|
pushed: boolean;
|
||||||
|
repoUrl?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface IRecommendation {
|
||||||
|
recommendedNextVersion: string;
|
||||||
|
recommendedNextVersionLevel: string;
|
||||||
|
recommendedNextVersionScope: string;
|
||||||
|
recommendedNextVersionMessage: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Print a header with a box around it
|
||||||
|
*/
|
||||||
|
export function printHeader(title: string): void {
|
||||||
|
const width = 57;
|
||||||
|
const padding = Math.max(0, width - title.length - 2);
|
||||||
|
const leftPad = Math.floor(padding / 2);
|
||||||
|
const rightPad = padding - leftPad;
|
||||||
|
|
||||||
|
console.log('');
|
||||||
|
console.log('╭─' + '─'.repeat(width) + '─╮');
|
||||||
|
console.log('│ ' + title + ' '.repeat(rightPad + leftPad) + ' │');
|
||||||
|
console.log('╰─' + '─'.repeat(width) + '─╯');
|
||||||
|
console.log('');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Print a section with a border
|
||||||
|
*/
|
||||||
|
export function printSection(title: string, lines: string[]): void {
|
||||||
|
const width = 59;
|
||||||
|
|
||||||
|
console.log('┌─ ' + title + ' ' + '─'.repeat(Math.max(0, width - title.length - 3)) + '┐');
|
||||||
|
console.log('│' + ' '.repeat(width) + '│');
|
||||||
|
|
||||||
|
for (const line of lines) {
|
||||||
|
const padding = width - line.length;
|
||||||
|
console.log('│ ' + line + ' '.repeat(Math.max(0, padding - 2)) + '│');
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('│' + ' '.repeat(width) + '│');
|
||||||
|
console.log('└─' + '─'.repeat(width) + '─┘');
|
||||||
|
console.log('');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Print AI recommendations in a nice box
|
||||||
|
*/
|
||||||
|
export function printRecommendation(recommendation: IRecommendation): void {
|
||||||
|
const lines = [
|
||||||
|
`Suggested Version: v${recommendation.recommendedNextVersion}`,
|
||||||
|
`Suggested Type: ${recommendation.recommendedNextVersionLevel}`,
|
||||||
|
`Suggested Scope: ${recommendation.recommendedNextVersionScope}`,
|
||||||
|
`Suggested Message: ${recommendation.recommendedNextVersionMessage}`,
|
||||||
|
];
|
||||||
|
|
||||||
|
printSection('📊 AI Recommendations', lines);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Print a progress step
|
||||||
|
*/
|
||||||
|
export function printStep(
|
||||||
|
current: number,
|
||||||
|
total: number,
|
||||||
|
description: string,
|
||||||
|
status: 'in-progress' | 'done' | 'error'
|
||||||
|
): void {
|
||||||
|
const statusIcon = status === 'done' ? '✓' : status === 'error' ? '✗' : '⏳';
|
||||||
|
const dots = '.'.repeat(Math.max(0, 40 - description.length));
|
||||||
|
|
||||||
|
console.log(` [${current}/${total}] ${description}${dots} ${statusIcon}`);
|
||||||
|
|
||||||
|
// Clear the line on next update if in progress
|
||||||
|
if (status === 'in-progress') {
|
||||||
|
process.stdout.write('\x1b[1A'); // Move cursor up one line
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get emoji for project type
|
||||||
|
*/
|
||||||
|
function getProjectTypeEmoji(projectType: string): string {
|
||||||
|
switch (projectType) {
|
||||||
|
case 'npm':
|
||||||
|
return '📦 npm';
|
||||||
|
case 'deno':
|
||||||
|
return '🦕 Deno';
|
||||||
|
case 'both':
|
||||||
|
return '🔀 npm + Deno';
|
||||||
|
default:
|
||||||
|
return '❓ Unknown';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get emoji for commit type
|
||||||
|
*/
|
||||||
|
function getCommitTypeEmoji(commitType: string): string {
|
||||||
|
switch (commitType) {
|
||||||
|
case 'fix':
|
||||||
|
return '🔧 fix';
|
||||||
|
case 'feat':
|
||||||
|
return '✨ feat';
|
||||||
|
case 'BREAKING CHANGE':
|
||||||
|
return '💥 BREAKING CHANGE';
|
||||||
|
default:
|
||||||
|
return commitType;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Print final commit summary
|
||||||
|
*/
|
||||||
|
export function printSummary(summary: ICommitSummary): void {
|
||||||
|
const lines = [
|
||||||
|
`Project Type: ${getProjectTypeEmoji(summary.projectType)}`,
|
||||||
|
`Branch: 🌿 ${summary.branch}`,
|
||||||
|
`Commit Type: ${getCommitTypeEmoji(summary.commitType)}`,
|
||||||
|
`Scope: 📍 ${summary.commitScope}`,
|
||||||
|
`New Version: 🏷️ v${summary.newVersion}`,
|
||||||
|
];
|
||||||
|
|
||||||
|
if (summary.commitSha) {
|
||||||
|
lines.push(`Commit SHA: 📌 ${summary.commitSha}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (summary.pushed) {
|
||||||
|
lines.push(`Remote: ✓ Pushed successfully`);
|
||||||
|
} else {
|
||||||
|
lines.push(`Remote: ⊘ Not pushed (local only)`);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (summary.repoUrl && summary.commitSha) {
|
||||||
|
lines.push('');
|
||||||
|
lines.push(`View at: ${summary.repoUrl}/commit/${summary.commitSha}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
printSection('✅ Commit Summary', lines);
|
||||||
|
|
||||||
|
if (summary.pushed) {
|
||||||
|
console.log('🎉 All done! Your changes are committed and pushed.\n');
|
||||||
|
} else {
|
||||||
|
console.log('✓ Commit created successfully.\n');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Print an info message with consistent formatting
|
||||||
|
*/
|
||||||
|
export function printInfo(message: string): void {
|
||||||
|
console.log(` ℹ️ ${message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Print a success message
|
||||||
|
*/
|
||||||
|
export function printSuccess(message: string): void {
|
||||||
|
console.log(` ✓ ${message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Print a warning message
|
||||||
|
*/
|
||||||
|
export function printWarning(message: string): void {
|
||||||
|
logger.log('warn', `⚠️ ${message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Print an error message
|
||||||
|
*/
|
||||||
|
export function printError(message: string): void {
|
||||||
|
logger.log('error', `✗ ${message}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Print commit message being created
|
||||||
|
*/
|
||||||
|
export function printCommitMessage(commitString: string): void {
|
||||||
|
console.log(`\n 📝 Commit: ${commitString}\n`);
|
||||||
|
}
|
||||||
@@ -53,15 +53,27 @@ export abstract class BaseFormatter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
protected async modifyFile(filepath: string, content: string): Promise<void> {
|
protected async modifyFile(filepath: string, content: string): Promise<void> {
|
||||||
await plugins.smartfile.memory.toFs(content, filepath);
|
// Validate filepath before writing
|
||||||
|
if (!filepath || filepath.trim() === '') {
|
||||||
|
throw new Error(`Invalid empty filepath in modifyFile`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure we have a proper path with directory component
|
||||||
|
// If the path has no directory component (e.g., "package.json"), prepend "./"
|
||||||
|
let normalizedPath = filepath;
|
||||||
|
if (!plugins.path.parse(filepath).dir) {
|
||||||
|
normalizedPath = './' + filepath;
|
||||||
|
}
|
||||||
|
|
||||||
|
await plugins.smartfs.file(normalizedPath).encoding('utf8').write(content);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected async createFile(filepath: string, content: string): Promise<void> {
|
protected async createFile(filepath: string, content: string): Promise<void> {
|
||||||
await plugins.smartfile.memory.toFs(content, filepath);
|
await plugins.smartfs.file(filepath).encoding('utf8').write(content);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected async deleteFile(filepath: string): Promise<void> {
|
protected async deleteFile(filepath: string): Promise<void> {
|
||||||
await plugins.smartfile.fs.remove(filepath);
|
await plugins.smartfs.file(filepath).delete();
|
||||||
}
|
}
|
||||||
|
|
||||||
protected async shouldProcessFile(filepath: string): Promise<boolean> {
|
protected async shouldProcessFile(filepath: string): Promise<boolean> {
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ export class ChangeCache {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async initialize(): Promise<void> {
|
async initialize(): Promise<void> {
|
||||||
await plugins.smartfile.fs.ensureDir(this.cacheDir);
|
await plugins.smartfs.directory(this.cacheDir).recursive().create();
|
||||||
}
|
}
|
||||||
|
|
||||||
async getManifest(): Promise<ICacheManifest> {
|
async getManifest(): Promise<ICacheManifest> {
|
||||||
@@ -35,13 +35,16 @@ export class ChangeCache {
|
|||||||
files: [],
|
files: [],
|
||||||
};
|
};
|
||||||
|
|
||||||
const exists = await plugins.smartfile.fs.fileExists(this.manifestPath);
|
const exists = await plugins.smartfs.file(this.manifestPath).exists();
|
||||||
if (!exists) {
|
if (!exists) {
|
||||||
return defaultManifest;
|
return defaultManifest;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const content = plugins.smartfile.fs.toStringSync(this.manifestPath);
|
const content = (await plugins.smartfs
|
||||||
|
.file(this.manifestPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
const manifest = JSON.parse(content);
|
const manifest = JSON.parse(content);
|
||||||
|
|
||||||
// Validate the manifest structure
|
// Validate the manifest structure
|
||||||
@@ -57,7 +60,7 @@ export class ChangeCache {
|
|||||||
);
|
);
|
||||||
// Try to delete the corrupted file
|
// Try to delete the corrupted file
|
||||||
try {
|
try {
|
||||||
await plugins.smartfile.fs.remove(this.manifestPath);
|
await plugins.smartfs.file(this.manifestPath).delete();
|
||||||
} catch (removeError) {
|
} catch (removeError) {
|
||||||
// Ignore removal errors
|
// Ignore removal errors
|
||||||
}
|
}
|
||||||
@@ -72,11 +75,14 @@ export class ChangeCache {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Ensure directory exists
|
// Ensure directory exists
|
||||||
await plugins.smartfile.fs.ensureDir(this.cacheDir);
|
await plugins.smartfs.directory(this.cacheDir).recursive().create();
|
||||||
|
|
||||||
// Write directly with proper JSON stringification
|
// Write directly with proper JSON stringification
|
||||||
const jsonContent = JSON.stringify(manifest, null, 2);
|
const jsonContent = JSON.stringify(manifest, null, 2);
|
||||||
await plugins.smartfile.memory.toFs(jsonContent, this.manifestPath);
|
await plugins.smartfs
|
||||||
|
.file(this.manifestPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.write(jsonContent);
|
||||||
}
|
}
|
||||||
|
|
||||||
async hasFileChanged(filePath: string): Promise<boolean> {
|
async hasFileChanged(filePath: string): Promise<boolean> {
|
||||||
@@ -85,20 +91,23 @@ export class ChangeCache {
|
|||||||
: plugins.path.join(paths.cwd, filePath);
|
: plugins.path.join(paths.cwd, filePath);
|
||||||
|
|
||||||
// Check if file exists
|
// Check if file exists
|
||||||
const exists = await plugins.smartfile.fs.fileExists(absolutePath);
|
const exists = await plugins.smartfs.file(absolutePath).exists();
|
||||||
if (!exists) {
|
if (!exists) {
|
||||||
return true; // File doesn't exist, so it's "changed" (will be created)
|
return true; // File doesn't exist, so it's "changed" (will be created)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get current file stats
|
// Get current file stats
|
||||||
const stats = await plugins.smartfile.fs.stat(absolutePath);
|
const stats = await plugins.smartfs.file(absolutePath).stat();
|
||||||
|
|
||||||
// Skip directories
|
// Skip directories
|
||||||
if (stats.isDirectory()) {
|
if (stats.isDirectory) {
|
||||||
return false; // Directories are not processed
|
return false; // Directories are not processed
|
||||||
}
|
}
|
||||||
|
|
||||||
const content = plugins.smartfile.fs.toStringSync(absolutePath);
|
const content = (await plugins.smartfs
|
||||||
|
.file(absolutePath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
const currentChecksum = this.calculateChecksum(content);
|
const currentChecksum = this.calculateChecksum(content);
|
||||||
|
|
||||||
// Get cached info
|
// Get cached info
|
||||||
@@ -113,7 +122,7 @@ export class ChangeCache {
|
|||||||
return (
|
return (
|
||||||
cachedFile.checksum !== currentChecksum ||
|
cachedFile.checksum !== currentChecksum ||
|
||||||
cachedFile.size !== stats.size ||
|
cachedFile.size !== stats.size ||
|
||||||
cachedFile.modified !== stats.mtimeMs
|
cachedFile.modified !== stats.mtime.getTime()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -123,14 +132,17 @@ export class ChangeCache {
|
|||||||
: plugins.path.join(paths.cwd, filePath);
|
: plugins.path.join(paths.cwd, filePath);
|
||||||
|
|
||||||
// Get current file stats
|
// Get current file stats
|
||||||
const stats = await plugins.smartfile.fs.stat(absolutePath);
|
const stats = await plugins.smartfs.file(absolutePath).stat();
|
||||||
|
|
||||||
// Skip directories
|
// Skip directories
|
||||||
if (stats.isDirectory()) {
|
if (stats.isDirectory) {
|
||||||
return; // Don't cache directories
|
return; // Don't cache directories
|
||||||
}
|
}
|
||||||
|
|
||||||
const content = plugins.smartfile.fs.toStringSync(absolutePath);
|
const content = (await plugins.smartfs
|
||||||
|
.file(absolutePath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
const checksum = this.calculateChecksum(content);
|
const checksum = this.calculateChecksum(content);
|
||||||
|
|
||||||
// Update manifest
|
// Update manifest
|
||||||
@@ -140,7 +152,7 @@ export class ChangeCache {
|
|||||||
const cacheEntry: IFileCache = {
|
const cacheEntry: IFileCache = {
|
||||||
path: filePath,
|
path: filePath,
|
||||||
checksum,
|
checksum,
|
||||||
modified: stats.mtimeMs,
|
modified: stats.mtime.getTime(),
|
||||||
size: stats.size,
|
size: stats.size,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -176,7 +188,7 @@ export class ChangeCache {
|
|||||||
? file.path
|
? file.path
|
||||||
: plugins.path.join(paths.cwd, file.path);
|
: plugins.path.join(paths.cwd, file.path);
|
||||||
|
|
||||||
if (await plugins.smartfile.fs.fileExists(absolutePath)) {
|
if (await plugins.smartfs.file(absolutePath).exists()) {
|
||||||
validFiles.push(file);
|
validFiles.push(file);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,14 +21,15 @@ export class DiffReporter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const exists = await plugins.smartfile.fs.fileExists(change.path);
|
const exists = await plugins.smartfs.file(change.path).exists();
|
||||||
if (!exists) {
|
if (!exists) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const currentContent = await plugins.smartfile.fs.toStringSync(
|
const currentContent = (await plugins.smartfs
|
||||||
change.path,
|
.file(change.path)
|
||||||
);
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
|
|
||||||
// For planned changes, we need the new content
|
// For planned changes, we need the new content
|
||||||
if (!change.content) {
|
if (!change.content) {
|
||||||
@@ -107,10 +108,10 @@ export class DiffReporter {
|
|||||||
})),
|
})),
|
||||||
};
|
};
|
||||||
|
|
||||||
await plugins.smartfile.memory.toFs(
|
await plugins.smartfs
|
||||||
JSON.stringify(report, null, 2),
|
.file(outputPath)
|
||||||
outputPath,
|
.encoding('utf8')
|
||||||
);
|
.write(JSON.stringify(report, null, 2));
|
||||||
logger.log('info', `Diff report saved to ${outputPath}`);
|
logger.log('info', `Diff report saved to ${outputPath}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -192,10 +192,10 @@ export class FormatStats {
|
|||||||
moduleStats: Array.from(this.stats.moduleStats.values()),
|
moduleStats: Array.from(this.stats.moduleStats.values()),
|
||||||
};
|
};
|
||||||
|
|
||||||
await plugins.smartfile.memory.toFs(
|
await plugins.smartfs
|
||||||
JSON.stringify(report, null, 2),
|
.file(outputPath)
|
||||||
outputPath,
|
.encoding('utf8')
|
||||||
);
|
.write(JSON.stringify(report, null, 2));
|
||||||
logger.log('info', `Statistics report saved to ${outputPath}`);
|
logger.log('info', `Statistics report saved to ${outputPath}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -36,21 +36,27 @@ export class RollbackManager {
|
|||||||
: plugins.path.join(paths.cwd, filepath);
|
: plugins.path.join(paths.cwd, filepath);
|
||||||
|
|
||||||
// Check if file exists
|
// Check if file exists
|
||||||
const exists = await plugins.smartfile.fs.fileExists(absolutePath);
|
const exists = await plugins.smartfs.file(absolutePath).exists();
|
||||||
if (!exists) {
|
if (!exists) {
|
||||||
// File doesn't exist yet (will be created), so we skip backup
|
// File doesn't exist yet (will be created), so we skip backup
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read file content and metadata
|
// Read file content and metadata
|
||||||
const content = plugins.smartfile.fs.toStringSync(absolutePath);
|
const content = (await plugins.smartfs
|
||||||
const stats = await plugins.smartfile.fs.stat(absolutePath);
|
.file(absolutePath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
|
const stats = await plugins.smartfs.file(absolutePath).stat();
|
||||||
const checksum = this.calculateChecksum(content);
|
const checksum = this.calculateChecksum(content);
|
||||||
|
|
||||||
// Create backup
|
// Create backup
|
||||||
const backupPath = this.getBackupPath(operationId, filepath);
|
const backupPath = this.getBackupPath(operationId, filepath);
|
||||||
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(backupPath));
|
await plugins.smartfs
|
||||||
await plugins.smartfile.memory.toFs(content, backupPath);
|
.directory(plugins.path.dirname(backupPath))
|
||||||
|
.recursive()
|
||||||
|
.create();
|
||||||
|
await plugins.smartfs.file(backupPath).encoding('utf8').write(content);
|
||||||
|
|
||||||
// Update operation
|
// Update operation
|
||||||
operation.files.push({
|
operation.files.push({
|
||||||
@@ -84,7 +90,10 @@ export class RollbackManager {
|
|||||||
|
|
||||||
// Verify backup integrity
|
// Verify backup integrity
|
||||||
const backupPath = this.getBackupPath(operationId, file.path);
|
const backupPath = this.getBackupPath(operationId, file.path);
|
||||||
const backupContent = plugins.smartfile.fs.toStringSync(backupPath);
|
const backupContent = await plugins.smartfs
|
||||||
|
.file(backupPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read();
|
||||||
const backupChecksum = this.calculateChecksum(backupContent);
|
const backupChecksum = this.calculateChecksum(backupContent);
|
||||||
|
|
||||||
if (backupChecksum !== file.checksum) {
|
if (backupChecksum !== file.checksum) {
|
||||||
@@ -92,7 +101,10 @@ export class RollbackManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Restore file
|
// Restore file
|
||||||
await plugins.smartfile.memory.toFs(file.originalContent, absolutePath);
|
await plugins.smartfs
|
||||||
|
.file(absolutePath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.write(file.originalContent);
|
||||||
|
|
||||||
// Restore permissions
|
// Restore permissions
|
||||||
const mode = parseInt(file.permissions, 8);
|
const mode = parseInt(file.permissions, 8);
|
||||||
@@ -129,7 +141,7 @@ export class RollbackManager {
|
|||||||
'operations',
|
'operations',
|
||||||
operation.id,
|
operation.id,
|
||||||
);
|
);
|
||||||
await plugins.smartfile.fs.remove(operationDir);
|
await plugins.smartfs.directory(operationDir).recursive().delete();
|
||||||
|
|
||||||
// Remove from manifest
|
// Remove from manifest
|
||||||
manifest.operations = manifest.operations.filter(
|
manifest.operations = manifest.operations.filter(
|
||||||
@@ -148,13 +160,16 @@ export class RollbackManager {
|
|||||||
|
|
||||||
for (const file of operation.files) {
|
for (const file of operation.files) {
|
||||||
const backupPath = this.getBackupPath(operationId, file.path);
|
const backupPath = this.getBackupPath(operationId, file.path);
|
||||||
const exists = await plugins.smartfile.fs.fileExists(backupPath);
|
const exists = await plugins.smartfs.file(backupPath).exists();
|
||||||
|
|
||||||
if (!exists) {
|
if (!exists) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
const content = plugins.smartfile.fs.toStringSync(backupPath);
|
const content = await plugins.smartfs
|
||||||
|
.file(backupPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read();
|
||||||
const checksum = this.calculateChecksum(content);
|
const checksum = this.calculateChecksum(content);
|
||||||
|
|
||||||
if (checksum !== file.checksum) {
|
if (checksum !== file.checksum) {
|
||||||
@@ -171,10 +186,11 @@ export class RollbackManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private async ensureBackupDir(): Promise<void> {
|
private async ensureBackupDir(): Promise<void> {
|
||||||
await plugins.smartfile.fs.ensureDir(this.backupDir);
|
await plugins.smartfs.directory(this.backupDir).recursive().create();
|
||||||
await plugins.smartfile.fs.ensureDir(
|
await plugins.smartfs
|
||||||
plugins.path.join(this.backupDir, 'operations'),
|
.directory(plugins.path.join(this.backupDir, 'operations'))
|
||||||
);
|
.recursive()
|
||||||
|
.create();
|
||||||
}
|
}
|
||||||
|
|
||||||
private generateOperationId(): string {
|
private generateOperationId(): string {
|
||||||
@@ -204,13 +220,16 @@ export class RollbackManager {
|
|||||||
private async getManifest(): Promise<{ operations: IFormatOperation[] }> {
|
private async getManifest(): Promise<{ operations: IFormatOperation[] }> {
|
||||||
const defaultManifest = { operations: [] };
|
const defaultManifest = { operations: [] };
|
||||||
|
|
||||||
const exists = await plugins.smartfile.fs.fileExists(this.manifestPath);
|
const exists = await plugins.smartfs.file(this.manifestPath).exists();
|
||||||
if (!exists) {
|
if (!exists) {
|
||||||
return defaultManifest;
|
return defaultManifest;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const content = plugins.smartfile.fs.toStringSync(this.manifestPath);
|
const content = (await plugins.smartfs
|
||||||
|
.file(this.manifestPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
const manifest = JSON.parse(content);
|
const manifest = JSON.parse(content);
|
||||||
|
|
||||||
// Validate the manifest structure
|
// Validate the manifest structure
|
||||||
@@ -228,7 +247,7 @@ export class RollbackManager {
|
|||||||
);
|
);
|
||||||
// Try to delete the corrupted file
|
// Try to delete the corrupted file
|
||||||
try {
|
try {
|
||||||
await plugins.smartfile.fs.remove(this.manifestPath);
|
await plugins.smartfs.file(this.manifestPath).delete();
|
||||||
} catch (removeError) {
|
} catch (removeError) {
|
||||||
// Ignore removal errors
|
// Ignore removal errors
|
||||||
}
|
}
|
||||||
@@ -249,7 +268,10 @@ export class RollbackManager {
|
|||||||
|
|
||||||
// Write directly with proper JSON stringification
|
// Write directly with proper JSON stringification
|
||||||
const jsonContent = JSON.stringify(manifest, null, 2);
|
const jsonContent = JSON.stringify(manifest, null, 2);
|
||||||
await plugins.smartfile.memory.toFs(jsonContent, this.manifestPath);
|
await plugins.smartfs
|
||||||
|
.file(this.manifestPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.write(jsonContent);
|
||||||
}
|
}
|
||||||
|
|
||||||
private async getOperation(
|
private async getOperation(
|
||||||
|
|||||||
@@ -13,12 +13,12 @@ const filesToDelete = [
|
|||||||
|
|
||||||
export const run = async (projectArg: Project) => {
|
export const run = async (projectArg: Project) => {
|
||||||
for (const relativeFilePath of filesToDelete) {
|
for (const relativeFilePath of filesToDelete) {
|
||||||
const fileExists = plugins.smartfile.fs.fileExistsSync(relativeFilePath);
|
const fileExists = await plugins.smartfs.file(relativeFilePath).exists();
|
||||||
if (fileExists) {
|
if (fileExists) {
|
||||||
logger.log('info', `Found ${relativeFilePath}! Removing it!`);
|
logger.log('info', `Found ${relativeFilePath}! Removing it!`);
|
||||||
plugins.smartfile.fs.removeSync(
|
await plugins.smartfs
|
||||||
plugins.path.join(paths.cwd, relativeFilePath),
|
.file(plugins.path.join(paths.cwd, relativeFilePath))
|
||||||
);
|
.delete();
|
||||||
} else {
|
} else {
|
||||||
logger.log('info', `Project is free of ${relativeFilePath}`);
|
logger.log('info', `Project is free of ${relativeFilePath}`);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -24,7 +24,12 @@ export const run = async (projectArg: Project) => {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
// Handle glob patterns
|
// Handle glob patterns
|
||||||
const files = await plugins.smartfile.fs.listFileTree('.', pattern.from);
|
const entries = await plugins.smartfs
|
||||||
|
.directory('.')
|
||||||
|
.recursive()
|
||||||
|
.filter(pattern.from)
|
||||||
|
.list();
|
||||||
|
const files = entries.map((entry) => entry.path);
|
||||||
|
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
const sourcePath = file;
|
const sourcePath = file;
|
||||||
@@ -46,10 +51,13 @@ export const run = async (projectArg: Project) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Ensure destination directory exists
|
// Ensure destination directory exists
|
||||||
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(destPath));
|
await plugins.smartfs
|
||||||
|
.directory(plugins.path.dirname(destPath))
|
||||||
|
.recursive()
|
||||||
|
.create();
|
||||||
|
|
||||||
// Copy file
|
// Copy file
|
||||||
await plugins.smartfile.fs.copy(sourcePath, destPath);
|
await plugins.smartfs.file(sourcePath).copy(destPath);
|
||||||
logger.log('info', `Copied ${sourcePath} to ${destPath}`);
|
logger.log('info', `Copied ${sourcePath} to ${destPath}`);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -7,16 +7,48 @@ import { logger } from '../gitzone.logging.js';
|
|||||||
const gitignorePath = plugins.path.join(paths.cwd, './.gitignore');
|
const gitignorePath = plugins.path.join(paths.cwd, './.gitignore');
|
||||||
|
|
||||||
export const run = async (projectArg: Project) => {
|
export const run = async (projectArg: Project) => {
|
||||||
const gitignoreExists = await plugins.smartfile.fs.fileExists(gitignorePath);
|
const gitignoreExists = await plugins.smartfs.file(gitignorePath).exists();
|
||||||
const templateModule = await import('../mod_template/index.js');
|
let customContent = '';
|
||||||
const ciTemplate = await templateModule.getTemplate('gitignore');
|
|
||||||
if (gitignoreExists) {
|
if (gitignoreExists) {
|
||||||
// lets get the existing gitignore file
|
// lets get the existing gitignore file
|
||||||
const existingGitIgnoreString =
|
const existingGitIgnoreString = (await plugins.smartfs
|
||||||
plugins.smartfile.fs.toStringSync(gitignorePath);
|
.file(gitignorePath)
|
||||||
let customPart = existingGitIgnoreString.split('# custom\n')[1];
|
.encoding('utf8')
|
||||||
customPart ? null : (customPart = '');
|
.read()) as string;
|
||||||
|
|
||||||
|
// Check for different custom section markers
|
||||||
|
const customMarkers = ['#------# custom', '# custom'];
|
||||||
|
for (const marker of customMarkers) {
|
||||||
|
const splitResult = existingGitIgnoreString.split(marker);
|
||||||
|
if (splitResult.length > 1) {
|
||||||
|
// Get everything after the marker (excluding the marker itself)
|
||||||
|
customContent = splitResult[1].trim();
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
ciTemplate.writeToDisk(paths.cwd);
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Write the template
|
||||||
|
const templateModule = await import('../mod_template/index.js');
|
||||||
|
const ciTemplate = await templateModule.getTemplate('gitignore');
|
||||||
|
await ciTemplate.writeToDisk(paths.cwd);
|
||||||
|
|
||||||
|
// Append the custom content if it exists
|
||||||
|
if (customContent) {
|
||||||
|
const newGitignoreContent = (await plugins.smartfs
|
||||||
|
.file(gitignorePath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
|
// The template already ends with "#------# custom", so just append the content
|
||||||
|
const finalContent =
|
||||||
|
newGitignoreContent.trimEnd() + '\n' + customContent + '\n';
|
||||||
|
await plugins.smartfs
|
||||||
|
.file(gitignorePath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.write(finalContent);
|
||||||
|
logger.log('info', 'Updated .gitignore while preserving custom section!');
|
||||||
|
} else {
|
||||||
logger.log('info', 'Added a .gitignore!');
|
logger.log('info', 'Added a .gitignore!');
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -7,9 +7,9 @@ import { logger } from '../gitzone.logging.js';
|
|||||||
const incompatibleLicenses: string[] = ['AGPL', 'GPL', 'SSPL'];
|
const incompatibleLicenses: string[] = ['AGPL', 'GPL', 'SSPL'];
|
||||||
|
|
||||||
export const run = async (projectArg: Project) => {
|
export const run = async (projectArg: Project) => {
|
||||||
const nodeModulesInstalled = await plugins.smartfile.fs.isDirectory(
|
const nodeModulesInstalled = await plugins.smartfs
|
||||||
plugins.path.join(paths.cwd, 'node_modules'),
|
.directory(plugins.path.join(paths.cwd, 'node_modules'))
|
||||||
);
|
.exists();
|
||||||
if (!nodeModulesInstalled) {
|
if (!nodeModulesInstalled) {
|
||||||
logger.log('warn', 'No node_modules found. Skipping license check');
|
logger.log('warn', 'No node_modules found. Skipping license check');
|
||||||
return;
|
return;
|
||||||
|
|||||||
@@ -83,10 +83,10 @@ export const run = async (projectArg: Project) => {
|
|||||||
type: 'git',
|
type: 'git',
|
||||||
url: `https://${gitzoneData.module.githost}/${gitzoneData.module.gitscope}/${gitzoneData.module.gitrepo}.git`,
|
url: `https://${gitzoneData.module.githost}/${gitzoneData.module.gitscope}/${gitzoneData.module.gitrepo}.git`,
|
||||||
};
|
};
|
||||||
(packageJson.bugs = {
|
((packageJson.bugs = {
|
||||||
url: `https://${gitzoneData.module.githost}/${gitzoneData.module.gitscope}/${gitzoneData.module.gitrepo}/issues`,
|
url: `https://${gitzoneData.module.githost}/${gitzoneData.module.gitscope}/${gitzoneData.module.gitrepo}/issues`,
|
||||||
}),
|
}),
|
||||||
(packageJson.homepage = `https://${gitzoneData.module.githost}/${gitzoneData.module.gitscope}/${gitzoneData.module.gitrepo}#readme`);
|
(packageJson.homepage = `https://${gitzoneData.module.githost}/${gitzoneData.module.gitscope}/${gitzoneData.module.gitrepo}#readme`));
|
||||||
|
|
||||||
// Check for module type
|
// Check for module type
|
||||||
if (!packageJson.type) {
|
if (!packageJson.type) {
|
||||||
@@ -154,10 +154,11 @@ export const run = async (projectArg: Project) => {
|
|||||||
];
|
];
|
||||||
|
|
||||||
// check for dependencies
|
// check for dependencies
|
||||||
|
// Note: @push.rocks/tapbundle is deprecated - use @git.zone/tstest/tapbundle instead
|
||||||
await ensureDependency(
|
await ensureDependency(
|
||||||
packageJson,
|
packageJson,
|
||||||
'devDep',
|
'devDep',
|
||||||
'latest',
|
'exclude',
|
||||||
'@push.rocks/tapbundle',
|
'@push.rocks/tapbundle',
|
||||||
);
|
);
|
||||||
await ensureDependency(
|
await ensureDependency(
|
||||||
@@ -174,9 +175,11 @@ export const run = async (projectArg: Project) => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// set overrides
|
// set overrides
|
||||||
const overrides = plugins.smartfile.fs.toObjectSync(
|
const overridesContent = (await plugins.smartfs
|
||||||
plugins.path.join(paths.assetsDir, 'overrides.json'),
|
.file(plugins.path.join(paths.assetsDir, 'overrides.json'))
|
||||||
);
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
|
const overrides = JSON.parse(overridesContent);
|
||||||
packageJson.pnpm = packageJson.pnpm || {};
|
packageJson.pnpm = packageJson.pnpm || {};
|
||||||
packageJson.pnpm.overrides = overrides;
|
packageJson.pnpm.overrides = overrides;
|
||||||
|
|
||||||
|
|||||||
@@ -6,25 +6,22 @@ export const run = async () => {
|
|||||||
const readmeHintsPath = plugins.path.join(paths.cwd, 'readme.hints.md');
|
const readmeHintsPath = plugins.path.join(paths.cwd, 'readme.hints.md');
|
||||||
|
|
||||||
// Check and initialize readme.md if it doesn't exist
|
// Check and initialize readme.md if it doesn't exist
|
||||||
const readmeExists = await plugins.smartfile.fs.fileExists(readmePath);
|
const readmeExists = await plugins.smartfs.file(readmePath).exists();
|
||||||
if (!readmeExists) {
|
if (!readmeExists) {
|
||||||
await plugins.smartfile.fs.toFs(
|
await plugins.smartfs.file(readmePath)
|
||||||
'# Project Readme\n\nThis is the initial readme file.',
|
.encoding('utf8')
|
||||||
readmePath,
|
.write('# Project Readme\n\nThis is the initial readme file.');
|
||||||
);
|
|
||||||
console.log('Initialized readme.md');
|
console.log('Initialized readme.md');
|
||||||
} else {
|
} else {
|
||||||
console.log('readme.md already exists');
|
console.log('readme.md already exists');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check and initialize readme.hints.md if it doesn't exist
|
// Check and initialize readme.hints.md if it doesn't exist
|
||||||
const readmeHintsExists =
|
const readmeHintsExists = await plugins.smartfs.file(readmeHintsPath).exists();
|
||||||
await plugins.smartfile.fs.fileExists(readmeHintsPath);
|
|
||||||
if (!readmeHintsExists) {
|
if (!readmeHintsExists) {
|
||||||
await plugins.smartfile.fs.toFs(
|
await plugins.smartfs.file(readmeHintsPath)
|
||||||
'# Project Readme Hints\n\nThis is the initial readme hints file.',
|
.encoding('utf8')
|
||||||
readmeHintsPath,
|
.write('# Project Readme Hints\n\nThis is the initial readme hints file.');
|
||||||
);
|
|
||||||
console.log('Initialized readme.hints.md');
|
console.log('Initialized readme.hints.md');
|
||||||
} else {
|
} else {
|
||||||
console.log('readme.hints.md already exists');
|
console.log('readme.hints.md already exists');
|
||||||
|
|||||||
@@ -10,12 +10,6 @@ import { Project } from '../classes.project.js';
|
|||||||
export const run = async (project: Project) => {
|
export const run = async (project: Project) => {
|
||||||
const templateModule = await import('../mod_template/index.js');
|
const templateModule = await import('../mod_template/index.js');
|
||||||
|
|
||||||
// update tslint
|
|
||||||
// getting template
|
|
||||||
const tslintTemplate = await templateModule.getTemplate('tslint');
|
|
||||||
await tslintTemplate.writeToDisk(paths.cwd);
|
|
||||||
logger.log('info', 'Updated tslint.json!');
|
|
||||||
|
|
||||||
// update vscode
|
// update vscode
|
||||||
const vscodeTemplate = await templateModule.getTemplate('vscode');
|
const vscodeTemplate = await templateModule.getTemplate('vscode');
|
||||||
await vscodeTemplate.writeToDisk(paths.cwd);
|
await vscodeTemplate.writeToDisk(paths.cwd);
|
||||||
|
|||||||
@@ -7,10 +7,11 @@ import { Project } from '../classes.project.js';
|
|||||||
export const run = async (projectArg: Project) => {
|
export const run = async (projectArg: Project) => {
|
||||||
// lets care about tsconfig.json
|
// lets care about tsconfig.json
|
||||||
logger.log('info', 'Formatting tsconfig.json...');
|
logger.log('info', 'Formatting tsconfig.json...');
|
||||||
const tsconfigSmartfile = await plugins.smartfile.SmartFile.fromFilePath(
|
const factory = plugins.smartfile.SmartFileFactory.nodeFs();
|
||||||
|
const tsconfigSmartfile = await factory.fromFilePath(
|
||||||
plugins.path.join(paths.cwd, 'tsconfig.json'),
|
plugins.path.join(paths.cwd, 'tsconfig.json'),
|
||||||
);
|
);
|
||||||
const tsconfigObject = JSON.parse(tsconfigSmartfile.contentBuffer.toString());
|
const tsconfigObject = JSON.parse(tsconfigSmartfile.parseContentAsString());
|
||||||
tsconfigObject.compilerOptions = tsconfigObject.compilerOptions || {};
|
tsconfigObject.compilerOptions = tsconfigObject.compilerOptions || {};
|
||||||
tsconfigObject.compilerOptions.baseUrl = '.';
|
tsconfigObject.compilerOptions.baseUrl = '.';
|
||||||
tsconfigObject.compilerOptions.paths = {};
|
tsconfigObject.compilerOptions.paths = {};
|
||||||
@@ -23,8 +24,8 @@ export const run = async (projectArg: Project) => {
|
|||||||
`./${publishModule}/index.js`,
|
`./${publishModule}/index.js`,
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
tsconfigSmartfile.setContentsFromString(
|
await tsconfigSmartfile.editContentAsString(async () => {
|
||||||
JSON.stringify(tsconfigObject, null, 2),
|
return JSON.stringify(tsconfigObject, null, 2);
|
||||||
);
|
});
|
||||||
await tsconfigSmartfile.write();
|
await tsconfigSmartfile.write();
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ export class CleanupFormatter extends BaseFormatter {
|
|||||||
];
|
];
|
||||||
|
|
||||||
for (const file of filesToRemove) {
|
for (const file of filesToRemove) {
|
||||||
const exists = await plugins.smartfile.fs.fileExists(file);
|
const exists = await plugins.smartfs.file(file).exists();
|
||||||
if (exists) {
|
if (exists) {
|
||||||
changes.push({
|
changes.push({
|
||||||
type: 'delete',
|
type: 'delete',
|
||||||
|
|||||||
@@ -29,8 +29,9 @@ export class PrettierFormatter extends BaseFormatter {
|
|||||||
'README.md',
|
'README.md',
|
||||||
'changelog.md',
|
'changelog.md',
|
||||||
'CHANGELOG.md',
|
'CHANGELOG.md',
|
||||||
'license',
|
// Skip files without extensions as prettier can't infer parser
|
||||||
'LICENSE',
|
// 'license',
|
||||||
|
// 'LICENSE',
|
||||||
'*.md',
|
'*.md',
|
||||||
];
|
];
|
||||||
|
|
||||||
@@ -40,16 +41,23 @@ export class PrettierFormatter extends BaseFormatter {
|
|||||||
// Add files from TypeScript directories
|
// Add files from TypeScript directories
|
||||||
for (const dir of includeDirs) {
|
for (const dir of includeDirs) {
|
||||||
const globPattern = `${dir}/**/*.${extensions}`;
|
const globPattern = `${dir}/**/*.${extensions}`;
|
||||||
const dirFiles = await plugins.smartfile.fs.listFileTree(
|
const dirEntries = await plugins.smartfs
|
||||||
'.',
|
.directory('.')
|
||||||
globPattern,
|
.recursive()
|
||||||
);
|
.filter(globPattern)
|
||||||
|
.list();
|
||||||
|
const dirFiles = dirEntries.map((entry) => entry.path);
|
||||||
allFiles.push(...dirFiles);
|
allFiles.push(...dirFiles);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add root config files
|
// Add root config files
|
||||||
for (const pattern of rootConfigFiles) {
|
for (const pattern of rootConfigFiles) {
|
||||||
const rootFiles = await plugins.smartfile.fs.listFileTree('.', pattern);
|
const rootEntries = await plugins.smartfs
|
||||||
|
.directory('.')
|
||||||
|
.recursive()
|
||||||
|
.filter(pattern)
|
||||||
|
.list();
|
||||||
|
const rootFiles = rootEntries.map((entry) => entry.path);
|
||||||
// Only include files at root level (no slashes in path)
|
// Only include files at root level (no slashes in path)
|
||||||
const rootLevelFiles = rootFiles.filter((f) => !f.includes('/'));
|
const rootLevelFiles = rootFiles.filter((f) => !f.includes('/'));
|
||||||
allFiles.push(...rootLevelFiles);
|
allFiles.push(...rootLevelFiles);
|
||||||
@@ -65,8 +73,8 @@ export class PrettierFormatter extends BaseFormatter {
|
|||||||
const validFiles: string[] = [];
|
const validFiles: string[] = [];
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
try {
|
try {
|
||||||
const stats = await plugins.smartfile.fs.stat(file);
|
const stats = await plugins.smartfs.file(file).stat();
|
||||||
if (!stats.isDirectory()) {
|
if (!stats.isDirectory) {
|
||||||
validFiles.push(file);
|
validFiles.push(file);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -102,26 +110,15 @@ export class PrettierFormatter extends BaseFormatter {
|
|||||||
try {
|
try {
|
||||||
await this.preExecute();
|
await this.preExecute();
|
||||||
|
|
||||||
// Batch process files
|
logVerbose(`Processing ${changes.length} files sequentially`);
|
||||||
const batchSize = 10; // Process 10 files at a time
|
|
||||||
const batches: IPlannedChange[][] = [];
|
|
||||||
|
|
||||||
for (let i = 0; i < changes.length; i += batchSize) {
|
|
||||||
batches.push(changes.slice(i, i + batchSize));
|
|
||||||
}
|
|
||||||
|
|
||||||
|
// Process files sequentially to avoid prettier cache/state issues
|
||||||
|
for (let i = 0; i < changes.length; i++) {
|
||||||
|
const change = changes[i];
|
||||||
logVerbose(
|
logVerbose(
|
||||||
`Processing ${changes.length} files in ${batches.length} batches`,
|
`Processing file ${i + 1}/${changes.length}: ${change.path}`,
|
||||||
);
|
);
|
||||||
|
|
||||||
for (let i = 0; i < batches.length; i++) {
|
|
||||||
const batch = batches[i];
|
|
||||||
logVerbose(
|
|
||||||
`Processing batch ${i + 1}/${batches.length} (${batch.length} files)`,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Process batch in parallel
|
|
||||||
const promises = batch.map(async (change) => {
|
|
||||||
try {
|
try {
|
||||||
await this.applyChange(change);
|
await this.applyChange(change);
|
||||||
this.stats.recordFileOperation(this.name, change.type, true);
|
this.stats.recordFileOperation(this.name, change.type, true);
|
||||||
@@ -133,9 +130,6 @@ export class PrettierFormatter extends BaseFormatter {
|
|||||||
);
|
);
|
||||||
// Don't throw - continue with other files
|
// Don't throw - continue with other files
|
||||||
}
|
}
|
||||||
});
|
|
||||||
|
|
||||||
await Promise.all(promises);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
await this.postExecute();
|
await this.postExecute();
|
||||||
@@ -151,11 +145,35 @@ export class PrettierFormatter extends BaseFormatter {
|
|||||||
if (change.type !== 'modify') return;
|
if (change.type !== 'modify') return;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
// Validate the path before processing
|
||||||
|
if (!change.path || change.path.trim() === '') {
|
||||||
|
logger.log(
|
||||||
|
'error',
|
||||||
|
`Invalid empty path in change: ${JSON.stringify(change)}`,
|
||||||
|
);
|
||||||
|
throw new Error('Invalid empty path');
|
||||||
|
}
|
||||||
|
|
||||||
// Read current content
|
// Read current content
|
||||||
const content = plugins.smartfile.fs.toStringSync(change.path);
|
const content = (await plugins.smartfs
|
||||||
|
.file(change.path)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
|
|
||||||
// Format with prettier
|
// Format with prettier
|
||||||
const prettier = await import('prettier');
|
const prettier = await import('prettier');
|
||||||
|
|
||||||
|
// Skip files that prettier can't parse without explicit parser
|
||||||
|
const fileExt = plugins.path.extname(change.path).toLowerCase();
|
||||||
|
if (!fileExt || fileExt === '') {
|
||||||
|
// Files without extensions need explicit parser
|
||||||
|
logVerbose(
|
||||||
|
`Skipping ${change.path} - no file extension for parser inference`,
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
const formatted = await prettier.format(content, {
|
const formatted = await prettier.format(content, {
|
||||||
filepath: change.path,
|
filepath: change.path,
|
||||||
...(await this.getPrettierConfig()),
|
...(await this.getPrettierConfig()),
|
||||||
@@ -163,13 +181,38 @@ export class PrettierFormatter extends BaseFormatter {
|
|||||||
|
|
||||||
// Only write if content actually changed
|
// Only write if content actually changed
|
||||||
if (formatted !== content) {
|
if (formatted !== content) {
|
||||||
|
// Debug: log the path being written
|
||||||
|
logVerbose(`Writing formatted content to: ${change.path}`);
|
||||||
await this.modifyFile(change.path, formatted);
|
await this.modifyFile(change.path, formatted);
|
||||||
logVerbose(`Formatted ${change.path}`);
|
logVerbose(`Formatted ${change.path}`);
|
||||||
} else {
|
} else {
|
||||||
logVerbose(`No formatting changes for ${change.path}`);
|
logVerbose(`No formatting changes for ${change.path}`);
|
||||||
}
|
}
|
||||||
|
} catch (prettierError) {
|
||||||
|
// Check if it's a parser error
|
||||||
|
if (
|
||||||
|
prettierError.message &&
|
||||||
|
prettierError.message.includes('No parser could be inferred')
|
||||||
|
) {
|
||||||
|
logVerbose(`Skipping ${change.path} - ${prettierError.message}`);
|
||||||
|
return; // Skip this file silently
|
||||||
|
}
|
||||||
|
throw prettierError;
|
||||||
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.log('error', `Failed to format ${change.path}: ${error.message}`);
|
// Log the full error stack for debugging mkdir issues
|
||||||
|
if (error.message && error.message.includes('mkdir')) {
|
||||||
|
logger.log(
|
||||||
|
'error',
|
||||||
|
`Failed to format ${change.path}: ${error.message}`,
|
||||||
|
);
|
||||||
|
logger.log('error', `Error stack: ${error.stack}`);
|
||||||
|
} else {
|
||||||
|
logger.log(
|
||||||
|
'error',
|
||||||
|
`Failed to format ${change.path}: ${error.message}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
throw error;
|
throw error;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -101,7 +101,12 @@ export let run = async (
|
|||||||
// Plan phase
|
// Plan phase
|
||||||
logger.log('info', 'Analyzing project for format operations...');
|
logger.log('info', 'Analyzing project for format operations...');
|
||||||
let plan = options.fromPlan
|
let plan = options.fromPlan
|
||||||
? JSON.parse(await plugins.smartfile.fs.toStringSync(options.fromPlan))
|
? JSON.parse(
|
||||||
|
(await plugins.smartfs
|
||||||
|
.file(options.fromPlan)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string,
|
||||||
|
)
|
||||||
: await planner.planFormat(activeFormatters);
|
: await planner.planFormat(activeFormatters);
|
||||||
|
|
||||||
// Display plan
|
// Display plan
|
||||||
@@ -109,10 +114,10 @@ export let run = async (
|
|||||||
|
|
||||||
// Save plan if requested
|
// Save plan if requested
|
||||||
if (options.savePlan) {
|
if (options.savePlan) {
|
||||||
await plugins.smartfile.memory.toFs(
|
await plugins.smartfs
|
||||||
JSON.stringify(plan, null, 2),
|
.file(options.savePlan)
|
||||||
options.savePlan,
|
.encoding('utf8')
|
||||||
);
|
.write(JSON.stringify(plan, null, 2));
|
||||||
logger.log('info', `Plan saved to ${options.savePlan}`);
|
logger.log('info', `Plan saved to ${options.savePlan}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -185,5 +190,8 @@ export const handleListBackups = async (): Promise<void> => {
|
|||||||
};
|
};
|
||||||
|
|
||||||
export const handleCleanBackups = async (): Promise<void> => {
|
export const handleCleanBackups = async (): Promise<void> => {
|
||||||
logger.log('info', 'Backup cleaning has been disabled - backup system removed');
|
logger.log(
|
||||||
|
'info',
|
||||||
|
'Backup cleaning has been disabled - backup system removed',
|
||||||
|
);
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -48,15 +48,17 @@ export class Meta {
|
|||||||
public async readDirectory() {
|
public async readDirectory() {
|
||||||
await this.syncToRemote(true);
|
await this.syncToRemote(true);
|
||||||
logger.log('info', `reading directory`);
|
logger.log('info', `reading directory`);
|
||||||
const metaFileExists = plugins.smartfile.fs.fileExistsSync(
|
const metaFileExists = await plugins.smartfs
|
||||||
this.filePaths.metaJson,
|
.file(this.filePaths.metaJson)
|
||||||
);
|
.exists();
|
||||||
if (!metaFileExists) {
|
if (!metaFileExists) {
|
||||||
throw new Error(`meta file does not exist at ${this.filePaths.metaJson}`);
|
throw new Error(`meta file does not exist at ${this.filePaths.metaJson}`);
|
||||||
}
|
}
|
||||||
this.metaRepoData = plugins.smartfile.fs.toObjectSync(
|
const content = (await plugins.smartfs
|
||||||
this.filePaths.metaJson,
|
.file(this.filePaths.metaJson)
|
||||||
);
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
|
this.metaRepoData = JSON.parse(content);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -78,15 +80,15 @@ export class Meta {
|
|||||||
*/
|
*/
|
||||||
public async writeToDisk() {
|
public async writeToDisk() {
|
||||||
// write .meta.json to disk
|
// write .meta.json to disk
|
||||||
plugins.smartfile.memory.toFsSync(
|
await plugins.smartfs
|
||||||
JSON.stringify(this.metaRepoData, null, 2),
|
.file(this.filePaths.metaJson)
|
||||||
this.filePaths.metaJson,
|
.encoding('utf8')
|
||||||
);
|
.write(JSON.stringify(this.metaRepoData, null, 2));
|
||||||
// write .gitignore to disk
|
// write .gitignore to disk
|
||||||
plugins.smartfile.memory.toFsSync(
|
await plugins.smartfs
|
||||||
await this.generateGitignore(),
|
.file(this.filePaths.gitIgnore)
|
||||||
this.filePaths.gitIgnore,
|
.encoding('utf8')
|
||||||
);
|
.write(await this.generateGitignore());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -112,10 +114,25 @@ export class Meta {
|
|||||||
*/
|
*/
|
||||||
public async updateLocalRepos() {
|
public async updateLocalRepos() {
|
||||||
await this.syncToRemote();
|
await this.syncToRemote();
|
||||||
const projects = plugins.smartfile.fs.toObjectSync(
|
const metaContent = (await plugins.smartfs
|
||||||
this.filePaths.metaJson,
|
.file(this.filePaths.metaJson)
|
||||||
).projects;
|
.encoding('utf8')
|
||||||
const preExistingFolders = plugins.smartfile.fs.listFoldersSync(this.cwd);
|
.read()) as string;
|
||||||
|
const projects = JSON.parse(metaContent).projects;
|
||||||
|
const entries = await plugins.smartfs.directory(this.cwd).list();
|
||||||
|
const preExistingFolders: string[] = [];
|
||||||
|
for (const entry of entries) {
|
||||||
|
try {
|
||||||
|
const stats = await plugins.smartfs
|
||||||
|
.file(plugins.path.join(this.cwd, entry.path))
|
||||||
|
.stat();
|
||||||
|
if (stats.isDirectory) {
|
||||||
|
preExistingFolders.push(entry.name);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Skip entries that can't be accessed
|
||||||
|
}
|
||||||
|
}
|
||||||
for (const preExistingFolderArg of preExistingFolders) {
|
for (const preExistingFolderArg of preExistingFolders) {
|
||||||
if (
|
if (
|
||||||
preExistingFolderArg !== '.git' &&
|
preExistingFolderArg !== '.git' &&
|
||||||
@@ -143,9 +160,17 @@ export class Meta {
|
|||||||
await this.sortMetaRepoData();
|
await this.sortMetaRepoData();
|
||||||
const missingRepos: string[] = [];
|
const missingRepos: string[] = [];
|
||||||
for (const key of Object.keys(this.metaRepoData.projects)) {
|
for (const key of Object.keys(this.metaRepoData.projects)) {
|
||||||
plugins.smartfile.fs.isDirectory(key)
|
const fullPath = plugins.path.join(this.cwd, key);
|
||||||
? logger.log('ok', `${key} -> is already cloned`)
|
try {
|
||||||
: missingRepos.push(key);
|
const stats = await plugins.smartfs.file(fullPath).stat();
|
||||||
|
if (stats.isDirectory) {
|
||||||
|
logger.log('ok', `${key} -> is already cloned`);
|
||||||
|
} else {
|
||||||
|
missingRepos.push(key);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
missingRepos.push(key);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.log('info', `found ${missingRepos.length} missing repos`);
|
logger.log('info', `found ${missingRepos.length} missing repos`);
|
||||||
@@ -165,7 +190,20 @@ export class Meta {
|
|||||||
await this.syncToRemote();
|
await this.syncToRemote();
|
||||||
|
|
||||||
// go recursive
|
// go recursive
|
||||||
const folders = await plugins.smartfile.fs.listFolders(this.cwd);
|
const listEntries = await plugins.smartfs.directory(this.cwd).list();
|
||||||
|
const folders: string[] = [];
|
||||||
|
for (const entry of listEntries) {
|
||||||
|
try {
|
||||||
|
const stats = await plugins.smartfs
|
||||||
|
.file(plugins.path.join(this.cwd, entry.path))
|
||||||
|
.stat();
|
||||||
|
if (stats.isDirectory) {
|
||||||
|
folders.push(entry.name);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Skip entries that can't be accessed
|
||||||
|
}
|
||||||
|
}
|
||||||
const childMetaRepositories: string[] = [];
|
const childMetaRepositories: string[] = [];
|
||||||
for (const folder of folders) {
|
for (const folder of folders) {
|
||||||
logger.log('info', folder);
|
logger.log('info', folder);
|
||||||
@@ -180,26 +218,30 @@ export class Meta {
|
|||||||
*/
|
*/
|
||||||
public async initProject() {
|
public async initProject() {
|
||||||
await this.syncToRemote(true);
|
await this.syncToRemote(true);
|
||||||
const fileExists = await plugins.smartfile.fs.fileExists(
|
const fileExists = await plugins.smartfs
|
||||||
this.filePaths.metaJson,
|
.file(this.filePaths.metaJson)
|
||||||
);
|
.exists();
|
||||||
if (!fileExists) {
|
if (!fileExists) {
|
||||||
await plugins.smartfile.memory.toFs(
|
await plugins.smartfs
|
||||||
|
.file(this.filePaths.metaJson)
|
||||||
|
.encoding('utf8')
|
||||||
|
.write(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
projects: {},
|
projects: {},
|
||||||
}),
|
}),
|
||||||
this.filePaths.metaJson,
|
|
||||||
);
|
);
|
||||||
logger.log(
|
logger.log(
|
||||||
`success`,
|
`success`,
|
||||||
`created a new .meta.json in directory ${this.cwd}`,
|
`created a new .meta.json in directory ${this.cwd}`,
|
||||||
);
|
);
|
||||||
await plugins.smartfile.memory.toFs(
|
await plugins.smartfs
|
||||||
|
.file(this.filePaths.packageJson)
|
||||||
|
.encoding('utf8')
|
||||||
|
.write(
|
||||||
JSON.stringify({
|
JSON.stringify({
|
||||||
name: this.dirName,
|
name: this.dirName,
|
||||||
version: '1.0.0',
|
version: '1.0.0',
|
||||||
}),
|
}),
|
||||||
this.filePaths.packageJson,
|
|
||||||
);
|
);
|
||||||
logger.log(
|
logger.log(
|
||||||
`success`,
|
`success`,
|
||||||
@@ -264,9 +306,10 @@ export class Meta {
|
|||||||
await this.writeToDisk();
|
await this.writeToDisk();
|
||||||
|
|
||||||
logger.log('info', 'removing directory from cwd');
|
logger.log('info', 'removing directory from cwd');
|
||||||
await plugins.smartfile.fs.remove(
|
await plugins.smartfs
|
||||||
plugins.path.join(paths.cwd, projectNameArg),
|
.directory(plugins.path.join(paths.cwd, projectNameArg))
|
||||||
);
|
.recursive()
|
||||||
|
.delete();
|
||||||
await this.updateLocalRepos();
|
await this.updateLocalRepos();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
261
ts/mod_services/classes.dockercontainer.ts
Normal file
261
ts/mod_services/classes.dockercontainer.ts
Normal file
@@ -0,0 +1,261 @@
|
|||||||
|
import * as plugins from './mod.plugins.js';
|
||||||
|
import * as helpers from './helpers.js';
|
||||||
|
import { logger } from '../gitzone.logging.js';
|
||||||
|
|
||||||
|
export type ContainerStatus = 'running' | 'stopped' | 'not_exists';
|
||||||
|
|
||||||
|
export interface IDockerRunOptions {
|
||||||
|
name: string;
|
||||||
|
image: string;
|
||||||
|
ports?: { [key: string]: string };
|
||||||
|
volumes?: { [key: string]: string };
|
||||||
|
environment?: { [key: string]: string };
|
||||||
|
restart?: string;
|
||||||
|
command?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DockerContainer {
|
||||||
|
private smartshell: plugins.smartshell.Smartshell;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.smartshell = new plugins.smartshell.Smartshell({
|
||||||
|
executor: 'bash',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if Docker is installed and available
|
||||||
|
*/
|
||||||
|
public async checkDocker(): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const result = await this.smartshell.exec('docker --version');
|
||||||
|
return result.exitCode === 0;
|
||||||
|
} catch (error) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get container status
|
||||||
|
*/
|
||||||
|
public async getStatus(containerName: string): Promise<ContainerStatus> {
|
||||||
|
try {
|
||||||
|
// Check if running
|
||||||
|
const runningResult = await this.smartshell.exec(
|
||||||
|
`docker ps --format '{{.Names}}' | grep -q "^${containerName}$"`
|
||||||
|
);
|
||||||
|
|
||||||
|
if (runningResult.exitCode === 0) {
|
||||||
|
return 'running';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if exists but stopped
|
||||||
|
const existsResult = await this.smartshell.exec(
|
||||||
|
`docker ps -a --format '{{.Names}}' | grep -q "^${containerName}$"`
|
||||||
|
);
|
||||||
|
|
||||||
|
if (existsResult.exitCode === 0) {
|
||||||
|
return 'stopped';
|
||||||
|
}
|
||||||
|
|
||||||
|
return 'not_exists';
|
||||||
|
} catch (error) {
|
||||||
|
return 'not_exists';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start a container
|
||||||
|
*/
|
||||||
|
public async start(containerName: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const result = await this.smartshell.exec(`docker start ${containerName}`);
|
||||||
|
return result.exitCode === 0;
|
||||||
|
} catch (error) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop a container
|
||||||
|
*/
|
||||||
|
public async stop(containerName: string): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const result = await this.smartshell.exec(`docker stop ${containerName}`);
|
||||||
|
return result.exitCode === 0;
|
||||||
|
} catch (error) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove a container
|
||||||
|
*/
|
||||||
|
public async remove(containerName: string, force: boolean = false): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const forceFlag = force ? '-f' : '';
|
||||||
|
const result = await this.smartshell.exec(`docker rm ${forceFlag} ${containerName}`);
|
||||||
|
return result.exitCode === 0;
|
||||||
|
} catch (error) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Run a new container
|
||||||
|
*/
|
||||||
|
public async run(options: IDockerRunOptions): Promise<boolean> {
|
||||||
|
let command = 'docker run -d';
|
||||||
|
|
||||||
|
// Add name
|
||||||
|
command += ` --name ${options.name}`;
|
||||||
|
|
||||||
|
// Add ports
|
||||||
|
if (options.ports) {
|
||||||
|
for (const [hostPort, containerPort] of Object.entries(options.ports)) {
|
||||||
|
command += ` -p ${hostPort}:${containerPort}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add volumes
|
||||||
|
if (options.volumes) {
|
||||||
|
for (const [hostPath, containerPath] of Object.entries(options.volumes)) {
|
||||||
|
command += ` -v "${hostPath}:${containerPath}"`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add environment variables
|
||||||
|
if (options.environment) {
|
||||||
|
for (const [key, value] of Object.entries(options.environment)) {
|
||||||
|
command += ` -e ${key}="${value}"`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add restart policy
|
||||||
|
if (options.restart) {
|
||||||
|
command += ` --restart ${options.restart}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add image
|
||||||
|
command += ` ${options.image}`;
|
||||||
|
|
||||||
|
// Add command if provided
|
||||||
|
if (options.command) {
|
||||||
|
command += ` ${options.command}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await this.smartshell.exec(command);
|
||||||
|
return result.exitCode === 0;
|
||||||
|
} catch (error) {
|
||||||
|
logger.log('error', `Failed to run container: ${error.message}`);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute a command in a running container
|
||||||
|
*/
|
||||||
|
public async exec(containerName: string, command: string): Promise<string> {
|
||||||
|
try {
|
||||||
|
const result = await this.smartshell.exec(`docker exec ${containerName} ${command}`);
|
||||||
|
if (result.exitCode === 0) {
|
||||||
|
return result.stdout;
|
||||||
|
}
|
||||||
|
return '';
|
||||||
|
} catch (error) {
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get container logs
|
||||||
|
*/
|
||||||
|
public async logs(containerName: string, lines?: number): Promise<string> {
|
||||||
|
try {
|
||||||
|
const tailFlag = lines ? `--tail ${lines}` : '';
|
||||||
|
const result = await this.smartshell.exec(`docker logs ${tailFlag} ${containerName}`);
|
||||||
|
return result.stdout;
|
||||||
|
} catch (error) {
|
||||||
|
return `Error getting logs: ${error.message}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a container exists
|
||||||
|
*/
|
||||||
|
public async exists(containerName: string): Promise<boolean> {
|
||||||
|
const status = await this.getStatus(containerName);
|
||||||
|
return status !== 'not_exists';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a container is running
|
||||||
|
*/
|
||||||
|
public async isRunning(containerName: string): Promise<boolean> {
|
||||||
|
const status = await this.getStatus(containerName);
|
||||||
|
return status === 'running';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Wait for a container to be ready
|
||||||
|
*/
|
||||||
|
public async waitForReady(containerName: string, maxAttempts: number = 30): Promise<boolean> {
|
||||||
|
for (let i = 0; i < maxAttempts; i++) {
|
||||||
|
if (await this.isRunning(containerName)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
await plugins.smartdelay.delayFor(1000);
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get container information
|
||||||
|
*/
|
||||||
|
public async inspect(containerName: string): Promise<any> {
|
||||||
|
try {
|
||||||
|
const result = await this.smartshell.execSilent(`docker inspect ${containerName}`);
|
||||||
|
if (result.exitCode === 0) {
|
||||||
|
return JSON.parse(result.stdout);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
} catch (error) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get port mappings for a container
|
||||||
|
*/
|
||||||
|
public async getPortMappings(containerName: string): Promise<{ [key: string]: string } | null> {
|
||||||
|
try {
|
||||||
|
// Use docker inspect without format to get full JSON, then extract PortBindings
|
||||||
|
const result = await this.smartshell.execSilent(`docker inspect ${containerName}`);
|
||||||
|
|
||||||
|
if (result.exitCode === 0 && result.stdout) {
|
||||||
|
const inspectData = JSON.parse(result.stdout);
|
||||||
|
if (inspectData && inspectData[0] && inspectData[0].HostConfig && inspectData[0].HostConfig.PortBindings) {
|
||||||
|
const portBindings = inspectData[0].HostConfig.PortBindings;
|
||||||
|
const mappings: { [key: string]: string } = {};
|
||||||
|
|
||||||
|
// Convert Docker's port binding format to simple host:container mapping
|
||||||
|
for (const [containerPort, hostBindings] of Object.entries(portBindings)) {
|
||||||
|
if (Array.isArray(hostBindings) && hostBindings.length > 0) {
|
||||||
|
const hostPort = (hostBindings[0] as any).HostPort;
|
||||||
|
if (hostPort) {
|
||||||
|
mappings[containerPort.replace('/tcp', '').replace('/udp', '')] = hostPort;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return mappings;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
} catch (error) {
|
||||||
|
// Silently fail - container might not exist
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
190
ts/mod_services/classes.globalregistry.ts
Normal file
190
ts/mod_services/classes.globalregistry.ts
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import { DockerContainer } from './classes.dockercontainer.js';
|
||||||
|
import { logger } from '../gitzone.logging.js';
|
||||||
|
|
||||||
|
export interface IRegisteredProject {
|
||||||
|
projectPath: string;
|
||||||
|
projectName: string;
|
||||||
|
containers: {
|
||||||
|
mongo?: string;
|
||||||
|
minio?: string;
|
||||||
|
elasticsearch?: string;
|
||||||
|
};
|
||||||
|
ports: {
|
||||||
|
mongo?: number;
|
||||||
|
s3?: number;
|
||||||
|
s3Console?: number;
|
||||||
|
elasticsearch?: number;
|
||||||
|
};
|
||||||
|
enabledServices: string[];
|
||||||
|
lastActive: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IGlobalRegistryData {
|
||||||
|
projects: { [projectPath: string]: IRegisteredProject };
|
||||||
|
}
|
||||||
|
|
||||||
|
export class GlobalRegistry {
|
||||||
|
private static instance: GlobalRegistry | null = null;
|
||||||
|
private kvStore: plugins.npmextra.KeyValueStore<IGlobalRegistryData>;
|
||||||
|
private docker: DockerContainer;
|
||||||
|
|
||||||
|
private constructor() {
|
||||||
|
this.kvStore = new plugins.npmextra.KeyValueStore({
|
||||||
|
typeArg: 'userHomeDir',
|
||||||
|
identityArg: 'gitzone-services',
|
||||||
|
});
|
||||||
|
this.docker = new DockerContainer();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the singleton instance
|
||||||
|
*/
|
||||||
|
public static getInstance(): GlobalRegistry {
|
||||||
|
if (!GlobalRegistry.instance) {
|
||||||
|
GlobalRegistry.instance = new GlobalRegistry();
|
||||||
|
}
|
||||||
|
return GlobalRegistry.instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register or update a project in the global registry
|
||||||
|
*/
|
||||||
|
public async registerProject(data: Omit<IRegisteredProject, 'lastActive'>): Promise<void> {
|
||||||
|
const allData = await this.kvStore.readAll();
|
||||||
|
const projects = allData.projects || {};
|
||||||
|
|
||||||
|
projects[data.projectPath] = {
|
||||||
|
...data,
|
||||||
|
lastActive: Date.now(),
|
||||||
|
};
|
||||||
|
|
||||||
|
await this.kvStore.writeKey('projects', projects);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove a project from the registry
|
||||||
|
*/
|
||||||
|
public async unregisterProject(projectPath: string): Promise<void> {
|
||||||
|
const allData = await this.kvStore.readAll();
|
||||||
|
const projects = allData.projects || {};
|
||||||
|
|
||||||
|
if (projects[projectPath]) {
|
||||||
|
delete projects[projectPath];
|
||||||
|
await this.kvStore.writeKey('projects', projects);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update the lastActive timestamp for a project
|
||||||
|
*/
|
||||||
|
public async touchProject(projectPath: string): Promise<void> {
|
||||||
|
const allData = await this.kvStore.readAll();
|
||||||
|
const projects = allData.projects || {};
|
||||||
|
|
||||||
|
if (projects[projectPath]) {
|
||||||
|
projects[projectPath].lastActive = Date.now();
|
||||||
|
await this.kvStore.writeKey('projects', projects);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all registered projects
|
||||||
|
*/
|
||||||
|
public async getAllProjects(): Promise<{ [path: string]: IRegisteredProject }> {
|
||||||
|
const allData = await this.kvStore.readAll();
|
||||||
|
return allData.projects || {};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a project is registered
|
||||||
|
*/
|
||||||
|
public async isRegistered(projectPath: string): Promise<boolean> {
|
||||||
|
const projects = await this.getAllProjects();
|
||||||
|
return !!projects[projectPath];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get status of all containers across all registered projects
|
||||||
|
*/
|
||||||
|
public async getGlobalStatus(): Promise<
|
||||||
|
Array<{
|
||||||
|
projectPath: string;
|
||||||
|
projectName: string;
|
||||||
|
containers: Array<{ name: string; status: string }>;
|
||||||
|
lastActive: number;
|
||||||
|
}>
|
||||||
|
> {
|
||||||
|
const projects = await this.getAllProjects();
|
||||||
|
const result: Array<{
|
||||||
|
projectPath: string;
|
||||||
|
projectName: string;
|
||||||
|
containers: Array<{ name: string; status: string }>;
|
||||||
|
lastActive: number;
|
||||||
|
}> = [];
|
||||||
|
|
||||||
|
for (const [path, project] of Object.entries(projects)) {
|
||||||
|
const containerStatuses: Array<{ name: string; status: string }> = [];
|
||||||
|
|
||||||
|
for (const containerName of Object.values(project.containers)) {
|
||||||
|
if (containerName) {
|
||||||
|
const status = await this.docker.getStatus(containerName);
|
||||||
|
containerStatuses.push({ name: containerName, status });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result.push({
|
||||||
|
projectPath: path,
|
||||||
|
projectName: project.projectName,
|
||||||
|
containers: containerStatuses,
|
||||||
|
lastActive: project.lastActive,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop all containers across all registered projects
|
||||||
|
*/
|
||||||
|
public async stopAll(): Promise<{ stopped: string[]; failed: string[] }> {
|
||||||
|
const projects = await this.getAllProjects();
|
||||||
|
const stopped: string[] = [];
|
||||||
|
const failed: string[] = [];
|
||||||
|
|
||||||
|
for (const project of Object.values(projects)) {
|
||||||
|
for (const containerName of Object.values(project.containers)) {
|
||||||
|
if (containerName) {
|
||||||
|
const status = await this.docker.getStatus(containerName);
|
||||||
|
if (status === 'running') {
|
||||||
|
if (await this.docker.stop(containerName)) {
|
||||||
|
stopped.push(containerName);
|
||||||
|
} else {
|
||||||
|
failed.push(containerName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { stopped, failed };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove stale registry entries (projects that no longer exist on disk)
|
||||||
|
*/
|
||||||
|
public async cleanup(): Promise<string[]> {
|
||||||
|
const projects = await this.getAllProjects();
|
||||||
|
const removed: string[] = [];
|
||||||
|
|
||||||
|
for (const projectPath of Object.keys(projects)) {
|
||||||
|
const exists = await plugins.smartfs.directory(projectPath).exists();
|
||||||
|
if (!exists) {
|
||||||
|
await this.unregisterProject(projectPath);
|
||||||
|
removed.push(projectPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return removed;
|
||||||
|
}
|
||||||
|
}
|
||||||
518
ts/mod_services/classes.serviceconfiguration.ts
Normal file
518
ts/mod_services/classes.serviceconfiguration.ts
Normal file
@@ -0,0 +1,518 @@
|
|||||||
|
import * as plugins from './mod.plugins.js';
|
||||||
|
import * as helpers from './helpers.js';
|
||||||
|
import { logger } from '../gitzone.logging.js';
|
||||||
|
import { DockerContainer } from './classes.dockercontainer.js';
|
||||||
|
|
||||||
|
export interface IServiceConfig {
|
||||||
|
PROJECT_NAME: string;
|
||||||
|
MONGODB_HOST: string;
|
||||||
|
MONGODB_NAME: string;
|
||||||
|
MONGODB_PORT: string;
|
||||||
|
MONGODB_USER: string;
|
||||||
|
MONGODB_PASS: string;
|
||||||
|
MONGODB_URL: string;
|
||||||
|
S3_HOST: string;
|
||||||
|
S3_PORT: string;
|
||||||
|
S3_CONSOLE_PORT: string;
|
||||||
|
S3_ACCESSKEY: string;
|
||||||
|
S3_SECRETKEY: string;
|
||||||
|
S3_BUCKET: string;
|
||||||
|
S3_ENDPOINT: string;
|
||||||
|
S3_USESSL: boolean;
|
||||||
|
ELASTICSEARCH_HOST: string;
|
||||||
|
ELASTICSEARCH_PORT: string;
|
||||||
|
ELASTICSEARCH_USER: string;
|
||||||
|
ELASTICSEARCH_PASS: string;
|
||||||
|
ELASTICSEARCH_URL: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ServiceConfiguration {
|
||||||
|
private configPath: string;
|
||||||
|
private config: IServiceConfig;
|
||||||
|
private docker: DockerContainer;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.configPath = plugins.path.join(process.cwd(), '.nogit', 'env.json');
|
||||||
|
this.docker = new DockerContainer();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load or create the configuration
|
||||||
|
*/
|
||||||
|
public async loadOrCreate(): Promise<IServiceConfig> {
|
||||||
|
await this.ensureNogitDirectory();
|
||||||
|
|
||||||
|
if (await this.configExists()) {
|
||||||
|
await this.loadConfig();
|
||||||
|
await this.updateMissingFields();
|
||||||
|
} else {
|
||||||
|
await this.createDefaultConfig();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sync ports from existing Docker containers if they exist
|
||||||
|
await this.syncPortsFromDocker();
|
||||||
|
|
||||||
|
return this.config;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the current configuration
|
||||||
|
*/
|
||||||
|
public getConfig(): IServiceConfig {
|
||||||
|
return this.config;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save the configuration to file
|
||||||
|
*/
|
||||||
|
public async saveConfig(): Promise<void> {
|
||||||
|
await plugins.smartfs
|
||||||
|
.file(this.configPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.write(JSON.stringify(this.config, null, 2));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ensure .nogit directory exists
|
||||||
|
*/
|
||||||
|
private async ensureNogitDirectory(): Promise<void> {
|
||||||
|
const nogitPath = plugins.path.join(process.cwd(), '.nogit');
|
||||||
|
await plugins.smartfs.directory(nogitPath).recursive().create();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if configuration file exists
|
||||||
|
*/
|
||||||
|
private async configExists(): Promise<boolean> {
|
||||||
|
return plugins.smartfs.file(this.configPath).exists();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load configuration from file
|
||||||
|
*/
|
||||||
|
private async loadConfig(): Promise<void> {
|
||||||
|
const configContent = (await plugins.smartfs
|
||||||
|
.file(this.configPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
|
this.config = JSON.parse(configContent);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create default configuration
|
||||||
|
*/
|
||||||
|
private async createDefaultConfig(): Promise<void> {
|
||||||
|
const projectName = await helpers.getProjectName();
|
||||||
|
const mongoPort = await helpers.getRandomAvailablePort();
|
||||||
|
const s3Port = await helpers.getRandomAvailablePort();
|
||||||
|
let s3ConsolePort = s3Port + 1;
|
||||||
|
|
||||||
|
// Ensure console port is also available
|
||||||
|
while (!(await helpers.isPortAvailable(s3ConsolePort))) {
|
||||||
|
s3ConsolePort++;
|
||||||
|
}
|
||||||
|
|
||||||
|
const mongoUser = 'defaultadmin';
|
||||||
|
const mongoPass = 'defaultpass';
|
||||||
|
const mongoHost = 'localhost';
|
||||||
|
const mongoName = projectName;
|
||||||
|
const mongoPortStr = mongoPort.toString();
|
||||||
|
const s3Host = 'localhost';
|
||||||
|
const s3PortStr = s3Port.toString();
|
||||||
|
const esHost = 'localhost';
|
||||||
|
const esPort = '9200';
|
||||||
|
const esUser = 'elastic';
|
||||||
|
const esPass = 'elastic';
|
||||||
|
|
||||||
|
this.config = {
|
||||||
|
PROJECT_NAME: projectName,
|
||||||
|
MONGODB_HOST: mongoHost,
|
||||||
|
MONGODB_NAME: mongoName,
|
||||||
|
MONGODB_PORT: mongoPortStr,
|
||||||
|
MONGODB_USER: mongoUser,
|
||||||
|
MONGODB_PASS: mongoPass,
|
||||||
|
MONGODB_URL: `mongodb://${mongoUser}:${mongoPass}@${mongoHost}:${mongoPortStr}/${mongoName}?authSource=admin`,
|
||||||
|
S3_HOST: s3Host,
|
||||||
|
S3_PORT: s3PortStr,
|
||||||
|
S3_CONSOLE_PORT: s3ConsolePort.toString(),
|
||||||
|
S3_ACCESSKEY: 'defaultadmin',
|
||||||
|
S3_SECRETKEY: 'defaultpass',
|
||||||
|
S3_BUCKET: `${projectName}-documents`,
|
||||||
|
S3_ENDPOINT: s3Host,
|
||||||
|
S3_USESSL: false,
|
||||||
|
ELASTICSEARCH_HOST: esHost,
|
||||||
|
ELASTICSEARCH_PORT: esPort,
|
||||||
|
ELASTICSEARCH_USER: esUser,
|
||||||
|
ELASTICSEARCH_PASS: esPass,
|
||||||
|
ELASTICSEARCH_URL: `http://${esUser}:${esPass}@${esHost}:${esPort}`
|
||||||
|
};
|
||||||
|
|
||||||
|
await this.saveConfig();
|
||||||
|
|
||||||
|
logger.log('ok', '✅ Created .nogit/env.json with project defaults');
|
||||||
|
logger.log('info', `📍 MongoDB port: ${mongoPort}`);
|
||||||
|
logger.log('info', `📍 S3 API port: ${s3Port}`);
|
||||||
|
logger.log('info', `📍 S3 Console port: ${s3ConsolePort}`);
|
||||||
|
logger.log('info', `📍 Elasticsearch port: ${esPort}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update missing fields in existing configuration
|
||||||
|
*/
|
||||||
|
private async updateMissingFields(): Promise<void> {
|
||||||
|
const projectName = await helpers.getProjectName();
|
||||||
|
let updated = false;
|
||||||
|
const fieldsAdded: string[] = [];
|
||||||
|
|
||||||
|
// Check and add missing fields
|
||||||
|
if (!this.config.PROJECT_NAME) {
|
||||||
|
this.config.PROJECT_NAME = projectName;
|
||||||
|
fieldsAdded.push('PROJECT_NAME');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.config.MONGODB_HOST) {
|
||||||
|
this.config.MONGODB_HOST = 'localhost';
|
||||||
|
fieldsAdded.push('MONGODB_HOST');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.config.MONGODB_NAME) {
|
||||||
|
this.config.MONGODB_NAME = projectName;
|
||||||
|
fieldsAdded.push('MONGODB_NAME');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.config.MONGODB_PORT) {
|
||||||
|
const port = await helpers.getRandomAvailablePort();
|
||||||
|
this.config.MONGODB_PORT = port.toString();
|
||||||
|
fieldsAdded.push(`MONGODB_PORT(${port})`);
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.config.MONGODB_USER) {
|
||||||
|
this.config.MONGODB_USER = 'defaultadmin';
|
||||||
|
fieldsAdded.push('MONGODB_USER');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.config.MONGODB_PASS) {
|
||||||
|
this.config.MONGODB_PASS = 'defaultpass';
|
||||||
|
fieldsAdded.push('MONGODB_PASS');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Always update MONGODB_URL based on current settings
|
||||||
|
const oldUrl = this.config.MONGODB_URL;
|
||||||
|
this.config.MONGODB_URL = `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${this.config.MONGODB_HOST}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
||||||
|
if (oldUrl !== this.config.MONGODB_URL) {
|
||||||
|
fieldsAdded.push('MONGODB_URL');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.config.S3_HOST) {
|
||||||
|
this.config.S3_HOST = 'localhost';
|
||||||
|
fieldsAdded.push('S3_HOST');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.config.S3_PORT) {
|
||||||
|
const port = await helpers.getRandomAvailablePort();
|
||||||
|
this.config.S3_PORT = port.toString();
|
||||||
|
fieldsAdded.push(`S3_PORT(${port})`);
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.config.S3_CONSOLE_PORT) {
|
||||||
|
const s3Port = parseInt(this.config.S3_PORT);
|
||||||
|
let consolePort = s3Port + 1;
|
||||||
|
|
||||||
|
while (!(await helpers.isPortAvailable(consolePort))) {
|
||||||
|
consolePort++;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.config.S3_CONSOLE_PORT = consolePort.toString();
|
||||||
|
fieldsAdded.push(`S3_CONSOLE_PORT(${consolePort})`);
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.config.S3_ACCESSKEY) {
|
||||||
|
this.config.S3_ACCESSKEY = 'defaultadmin';
|
||||||
|
fieldsAdded.push('S3_ACCESSKEY');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.config.S3_SECRETKEY) {
|
||||||
|
this.config.S3_SECRETKEY = 'defaultpass';
|
||||||
|
fieldsAdded.push('S3_SECRETKEY');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.config.S3_BUCKET) {
|
||||||
|
this.config.S3_BUCKET = `${projectName}-documents`;
|
||||||
|
fieldsAdded.push('S3_BUCKET');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.config.S3_USESSL) {
|
||||||
|
this.config.S3_USESSL = false;
|
||||||
|
fieldsAdded.push('S3_USESSL');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Always update S3_ENDPOINT based on current settings
|
||||||
|
const oldEndpoint = this.config.S3_ENDPOINT;
|
||||||
|
this.config.S3_ENDPOINT = this.config.S3_HOST;
|
||||||
|
if (oldEndpoint !== this.config.S3_ENDPOINT) {
|
||||||
|
fieldsAdded.push('S3_ENDPOINT');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.config.ELASTICSEARCH_HOST) {
|
||||||
|
this.config.ELASTICSEARCH_HOST = 'localhost';
|
||||||
|
fieldsAdded.push('ELASTICSEARCH_HOST');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.config.ELASTICSEARCH_PORT) {
|
||||||
|
this.config.ELASTICSEARCH_PORT = '9200';
|
||||||
|
fieldsAdded.push('ELASTICSEARCH_PORT');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.config.ELASTICSEARCH_USER) {
|
||||||
|
this.config.ELASTICSEARCH_USER = 'elastic';
|
||||||
|
fieldsAdded.push('ELASTICSEARCH_USER');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.config.ELASTICSEARCH_PASS) {
|
||||||
|
this.config.ELASTICSEARCH_PASS = 'elastic';
|
||||||
|
fieldsAdded.push('ELASTICSEARCH_PASS');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Always update ELASTICSEARCH_URL based on current settings
|
||||||
|
const oldEsUrl = this.config.ELASTICSEARCH_URL;
|
||||||
|
this.config.ELASTICSEARCH_URL = `http://${this.config.ELASTICSEARCH_USER}:${this.config.ELASTICSEARCH_PASS}@${this.config.ELASTICSEARCH_HOST}:${this.config.ELASTICSEARCH_PORT}`;
|
||||||
|
if (oldEsUrl !== this.config.ELASTICSEARCH_URL) {
|
||||||
|
fieldsAdded.push('ELASTICSEARCH_URL');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (updated) {
|
||||||
|
await this.saveConfig();
|
||||||
|
logger.log('ok', `✅ Added missing fields: ${fieldsAdded.join(', ')}`);
|
||||||
|
} else {
|
||||||
|
logger.log('ok', '✅ Configuration complete');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get MongoDB connection string
|
||||||
|
*/
|
||||||
|
public getMongoConnectionString(useNetworkIp: boolean = false): string {
|
||||||
|
const host = useNetworkIp ? '${networkIp}' : this.config.MONGODB_HOST;
|
||||||
|
return `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${host}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get container names
|
||||||
|
*/
|
||||||
|
public getContainerNames() {
|
||||||
|
return {
|
||||||
|
mongo: `${this.config.PROJECT_NAME}-mongodb`,
|
||||||
|
minio: `${this.config.PROJECT_NAME}-minio`,
|
||||||
|
elasticsearch: `${this.config.PROJECT_NAME}-elasticsearch`
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get data directories
|
||||||
|
*/
|
||||||
|
public getDataDirectories() {
|
||||||
|
return {
|
||||||
|
mongo: plugins.path.join(process.cwd(), '.nogit', 'mongodata'),
|
||||||
|
minio: plugins.path.join(process.cwd(), '.nogit', 'miniodata'),
|
||||||
|
elasticsearch: plugins.path.join(process.cwd(), '.nogit', 'esdata')
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sync port configuration from existing Docker containers
|
||||||
|
*/
|
||||||
|
private async syncPortsFromDocker(): Promise<void> {
|
||||||
|
const containers = this.getContainerNames();
|
||||||
|
let updated = false;
|
||||||
|
|
||||||
|
// Check MongoDB container
|
||||||
|
const mongoStatus = await this.docker.getStatus(containers.mongo);
|
||||||
|
if (mongoStatus !== 'not_exists') {
|
||||||
|
const portMappings = await this.docker.getPortMappings(containers.mongo);
|
||||||
|
if (portMappings && portMappings['27017']) {
|
||||||
|
const dockerPort = portMappings['27017'];
|
||||||
|
if (this.config.MONGODB_PORT !== dockerPort) {
|
||||||
|
logger.log('note', `📍 Syncing MongoDB port from Docker: ${dockerPort}`);
|
||||||
|
this.config.MONGODB_PORT = dockerPort;
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check MinIO container
|
||||||
|
const minioStatus = await this.docker.getStatus(containers.minio);
|
||||||
|
if (minioStatus !== 'not_exists') {
|
||||||
|
const portMappings = await this.docker.getPortMappings(containers.minio);
|
||||||
|
if (portMappings) {
|
||||||
|
if (portMappings['9000']) {
|
||||||
|
const dockerPort = portMappings['9000'];
|
||||||
|
if (this.config.S3_PORT !== dockerPort) {
|
||||||
|
logger.log('note', `📍 Syncing S3 API port from Docker: ${dockerPort}`);
|
||||||
|
this.config.S3_PORT = dockerPort;
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (portMappings['9001']) {
|
||||||
|
const dockerPort = portMappings['9001'];
|
||||||
|
if (this.config.S3_CONSOLE_PORT !== dockerPort) {
|
||||||
|
logger.log('note', `📍 Syncing S3 Console port from Docker: ${dockerPort}`);
|
||||||
|
this.config.S3_CONSOLE_PORT = dockerPort;
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check Elasticsearch container
|
||||||
|
const esStatus = await this.docker.getStatus(containers.elasticsearch);
|
||||||
|
if (esStatus !== 'not_exists') {
|
||||||
|
const portMappings = await this.docker.getPortMappings(containers.elasticsearch);
|
||||||
|
if (portMappings && portMappings['9200']) {
|
||||||
|
const dockerPort = portMappings['9200'];
|
||||||
|
if (this.config.ELASTICSEARCH_PORT !== dockerPort) {
|
||||||
|
logger.log('note', `📍 Syncing Elasticsearch port from Docker: ${dockerPort}`);
|
||||||
|
this.config.ELASTICSEARCH_PORT = dockerPort;
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (updated) {
|
||||||
|
// Update derived fields
|
||||||
|
this.config.MONGODB_URL = `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${this.config.MONGODB_HOST}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
||||||
|
this.config.S3_ENDPOINT = this.config.S3_HOST;
|
||||||
|
this.config.ELASTICSEARCH_URL = `http://${this.config.ELASTICSEARCH_USER}:${this.config.ELASTICSEARCH_PASS}@${this.config.ELASTICSEARCH_HOST}:${this.config.ELASTICSEARCH_PORT}`;
|
||||||
|
|
||||||
|
await this.saveConfig();
|
||||||
|
logger.log('ok', '✅ Configuration synced with Docker containers');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate and update ports if they're not available
|
||||||
|
*/
|
||||||
|
public async validateAndUpdatePorts(): Promise<boolean> {
|
||||||
|
let updated = false;
|
||||||
|
const containers = this.getContainerNames();
|
||||||
|
|
||||||
|
// Check if containers exist - if they do, ports are fine
|
||||||
|
const mongoExists = await this.docker.exists(containers.mongo);
|
||||||
|
const minioExists = await this.docker.exists(containers.minio);
|
||||||
|
const esExists = await this.docker.exists(containers.elasticsearch);
|
||||||
|
|
||||||
|
// Only check port availability if containers don't exist
|
||||||
|
if (!mongoExists) {
|
||||||
|
const mongoPort = parseInt(this.config.MONGODB_PORT);
|
||||||
|
if (!(await helpers.isPortAvailable(mongoPort))) {
|
||||||
|
logger.log('note', `⚠️ MongoDB port ${mongoPort} is in use, finding new port...`);
|
||||||
|
const newPort = await helpers.getRandomAvailablePort();
|
||||||
|
this.config.MONGODB_PORT = newPort.toString();
|
||||||
|
logger.log('ok', `✅ New MongoDB port: ${newPort}`);
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!minioExists) {
|
||||||
|
const s3Port = parseInt(this.config.S3_PORT);
|
||||||
|
const s3ConsolePort = parseInt(this.config.S3_CONSOLE_PORT);
|
||||||
|
|
||||||
|
if (!(await helpers.isPortAvailable(s3Port))) {
|
||||||
|
logger.log('note', `⚠️ S3 API port ${s3Port} is in use, finding new port...`);
|
||||||
|
const newPort = await helpers.getRandomAvailablePort();
|
||||||
|
this.config.S3_PORT = newPort.toString();
|
||||||
|
logger.log('ok', `✅ New S3 API port: ${newPort}`);
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(await helpers.isPortAvailable(s3ConsolePort))) {
|
||||||
|
logger.log('note', `⚠️ S3 Console port ${s3ConsolePort} is in use, finding new port...`);
|
||||||
|
let newPort = parseInt(this.config.S3_PORT) + 1;
|
||||||
|
while (!(await helpers.isPortAvailable(newPort))) {
|
||||||
|
newPort++;
|
||||||
|
}
|
||||||
|
this.config.S3_CONSOLE_PORT = newPort.toString();
|
||||||
|
logger.log('ok', `✅ New S3 Console port: ${newPort}`);
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!esExists) {
|
||||||
|
const esPort = parseInt(this.config.ELASTICSEARCH_PORT);
|
||||||
|
if (!(await helpers.isPortAvailable(esPort))) {
|
||||||
|
logger.log('note', `⚠️ Elasticsearch port ${esPort} is in use, finding new port...`);
|
||||||
|
const newPort = await helpers.getRandomAvailablePort();
|
||||||
|
this.config.ELASTICSEARCH_PORT = newPort.toString();
|
||||||
|
logger.log('ok', `✅ New Elasticsearch port: ${newPort}`);
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (updated) {
|
||||||
|
// Update derived fields
|
||||||
|
this.config.MONGODB_URL = `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${this.config.MONGODB_HOST}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
||||||
|
this.config.S3_ENDPOINT = this.config.S3_HOST;
|
||||||
|
this.config.ELASTICSEARCH_URL = `http://${this.config.ELASTICSEARCH_USER}:${this.config.ELASTICSEARCH_PASS}@${this.config.ELASTICSEARCH_HOST}:${this.config.ELASTICSEARCH_PORT}`;
|
||||||
|
|
||||||
|
await this.saveConfig();
|
||||||
|
}
|
||||||
|
|
||||||
|
return updated;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Force reconfigure all ports with new available ones
|
||||||
|
*/
|
||||||
|
public async reconfigurePorts(): Promise<void> {
|
||||||
|
logger.log('note', '🔄 Finding new available ports...');
|
||||||
|
|
||||||
|
const mongoPort = await helpers.getRandomAvailablePort();
|
||||||
|
const s3Port = await helpers.getRandomAvailablePort();
|
||||||
|
let s3ConsolePort = s3Port + 1;
|
||||||
|
|
||||||
|
// Ensure console port is also available
|
||||||
|
while (!(await helpers.isPortAvailable(s3ConsolePort))) {
|
||||||
|
s3ConsolePort++;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Elasticsearch uses standard port 9200
|
||||||
|
const esPort = '9200';
|
||||||
|
|
||||||
|
this.config.MONGODB_PORT = mongoPort.toString();
|
||||||
|
this.config.S3_PORT = s3Port.toString();
|
||||||
|
this.config.S3_CONSOLE_PORT = s3ConsolePort.toString();
|
||||||
|
this.config.ELASTICSEARCH_PORT = esPort;
|
||||||
|
|
||||||
|
// Update derived fields
|
||||||
|
this.config.MONGODB_URL = `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${this.config.MONGODB_HOST}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
||||||
|
this.config.S3_ENDPOINT = this.config.S3_HOST;
|
||||||
|
this.config.ELASTICSEARCH_URL = `http://${this.config.ELASTICSEARCH_USER}:${this.config.ELASTICSEARCH_PASS}@${this.config.ELASTICSEARCH_HOST}:${this.config.ELASTICSEARCH_PORT}`;
|
||||||
|
|
||||||
|
await this.saveConfig();
|
||||||
|
|
||||||
|
logger.log('ok', '✅ New port configuration:');
|
||||||
|
logger.log('info', ` 📍 MongoDB: ${mongoPort}`);
|
||||||
|
logger.log('info', ` 📍 S3 API: ${s3Port}`);
|
||||||
|
logger.log('info', ` 📍 S3 Console: ${s3ConsolePort}`);
|
||||||
|
logger.log('info', ` 📍 Elasticsearch: ${esPort}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
956
ts/mod_services/classes.servicemanager.ts
Normal file
956
ts/mod_services/classes.servicemanager.ts
Normal file
@@ -0,0 +1,956 @@
|
|||||||
|
import * as plugins from './mod.plugins.js';
|
||||||
|
import * as helpers from './helpers.js';
|
||||||
|
import { ServiceConfiguration } from './classes.serviceconfiguration.js';
|
||||||
|
import { DockerContainer } from './classes.dockercontainer.js';
|
||||||
|
import { GlobalRegistry } from './classes.globalregistry.js';
|
||||||
|
import { logger } from '../gitzone.logging.js';
|
||||||
|
|
||||||
|
export class ServiceManager {
|
||||||
|
private config: ServiceConfiguration;
|
||||||
|
private docker: DockerContainer;
|
||||||
|
private enabledServices: string[] | null = null;
|
||||||
|
private globalRegistry: GlobalRegistry;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.config = new ServiceConfiguration();
|
||||||
|
this.docker = new DockerContainer();
|
||||||
|
this.globalRegistry = GlobalRegistry.getInstance();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize the service manager
|
||||||
|
*/
|
||||||
|
public async init(): Promise<void> {
|
||||||
|
// Check Docker availability
|
||||||
|
if (!(await this.docker.checkDocker())) {
|
||||||
|
logger.log('error', 'Error: Docker is not installed. Please install Docker first.');
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load or create configuration
|
||||||
|
await this.config.loadOrCreate();
|
||||||
|
logger.log('info', `📋 Project: ${this.config.getConfig().PROJECT_NAME}`);
|
||||||
|
|
||||||
|
// Load service selection from npmextra.json
|
||||||
|
await this.loadServiceConfiguration();
|
||||||
|
|
||||||
|
// Validate and update ports if needed
|
||||||
|
await this.config.validateAndUpdatePorts();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load service configuration from npmextra.json
|
||||||
|
*/
|
||||||
|
private async loadServiceConfiguration(): Promise<void> {
|
||||||
|
const npmextraConfig = new plugins.npmextra.Npmextra(process.cwd());
|
||||||
|
const gitzoneConfig = npmextraConfig.dataFor<any>('gitzone', {});
|
||||||
|
|
||||||
|
// Check if services array exists
|
||||||
|
if (!gitzoneConfig.services || !Array.isArray(gitzoneConfig.services) || gitzoneConfig.services.length === 0) {
|
||||||
|
// Prompt user to select services
|
||||||
|
const smartinteract = new plugins.smartinteract.SmartInteract();
|
||||||
|
const response = await smartinteract.askQuestion({
|
||||||
|
name: 'services',
|
||||||
|
type: 'checkbox',
|
||||||
|
message: 'Which services do you want to enable for this project?',
|
||||||
|
choices: [
|
||||||
|
{ name: 'MongoDB', value: 'mongodb' },
|
||||||
|
{ name: 'MinIO (S3)', value: 'minio' },
|
||||||
|
{ name: 'Elasticsearch', value: 'elasticsearch' }
|
||||||
|
],
|
||||||
|
default: ['mongodb', 'minio', 'elasticsearch']
|
||||||
|
});
|
||||||
|
|
||||||
|
this.enabledServices = response.value || ['mongodb', 'minio', 'elasticsearch'];
|
||||||
|
|
||||||
|
// Save to npmextra.json
|
||||||
|
await this.saveServiceConfiguration(this.enabledServices);
|
||||||
|
} else {
|
||||||
|
this.enabledServices = gitzoneConfig.services;
|
||||||
|
logger.log('info', `🔧 Enabled services: ${this.enabledServices.join(', ')}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save service configuration to npmextra.json
|
||||||
|
*/
|
||||||
|
private async saveServiceConfiguration(services: string[]): Promise<void> {
|
||||||
|
const npmextraPath = plugins.path.join(process.cwd(), 'npmextra.json');
|
||||||
|
let npmextraData: any = {};
|
||||||
|
|
||||||
|
// Read existing npmextra.json if it exists
|
||||||
|
if (await plugins.smartfs.file(npmextraPath).exists()) {
|
||||||
|
const content = await plugins.smartfs.file(npmextraPath).encoding('utf8').read();
|
||||||
|
npmextraData = JSON.parse(content as string);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update gitzone.services
|
||||||
|
if (!npmextraData.gitzone) {
|
||||||
|
npmextraData.gitzone = {};
|
||||||
|
}
|
||||||
|
npmextraData.gitzone.services = services;
|
||||||
|
|
||||||
|
// Write back to npmextra.json
|
||||||
|
await plugins.smartfs
|
||||||
|
.file(npmextraPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.write(JSON.stringify(npmextraData, null, 2));
|
||||||
|
|
||||||
|
logger.log('ok', `✅ Saved service configuration to npmextra.json`);
|
||||||
|
logger.log('info', `🔧 Enabled services: ${services.join(', ')}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a service is enabled
|
||||||
|
*/
|
||||||
|
private isServiceEnabled(service: string): boolean {
|
||||||
|
if (!this.enabledServices) {
|
||||||
|
return true; // If no configuration, enable all
|
||||||
|
}
|
||||||
|
return this.enabledServices.includes(service);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register this project with the global registry
|
||||||
|
*/
|
||||||
|
private async registerWithGlobalRegistry(): Promise<void> {
|
||||||
|
const config = this.config.getConfig();
|
||||||
|
const containers = this.config.getContainerNames();
|
||||||
|
|
||||||
|
await this.globalRegistry.registerProject({
|
||||||
|
projectPath: process.cwd(),
|
||||||
|
projectName: config.PROJECT_NAME,
|
||||||
|
containers: {
|
||||||
|
mongo: containers.mongo,
|
||||||
|
minio: containers.minio,
|
||||||
|
elasticsearch: containers.elasticsearch,
|
||||||
|
},
|
||||||
|
ports: {
|
||||||
|
mongo: parseInt(config.MONGODB_PORT),
|
||||||
|
s3: parseInt(config.S3_PORT),
|
||||||
|
s3Console: parseInt(config.S3_CONSOLE_PORT),
|
||||||
|
elasticsearch: parseInt(config.ELASTICSEARCH_PORT),
|
||||||
|
},
|
||||||
|
enabledServices: this.enabledServices || ['mongodb', 'minio', 'elasticsearch'],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start all enabled services
|
||||||
|
*/
|
||||||
|
public async startAll(): Promise<void> {
|
||||||
|
let first = true;
|
||||||
|
if (this.isServiceEnabled('mongodb')) {
|
||||||
|
if (!first) console.log();
|
||||||
|
await this.startMongoDB();
|
||||||
|
first = false;
|
||||||
|
}
|
||||||
|
if (this.isServiceEnabled('minio')) {
|
||||||
|
if (!first) console.log();
|
||||||
|
await this.startMinIO();
|
||||||
|
first = false;
|
||||||
|
}
|
||||||
|
if (this.isServiceEnabled('elasticsearch')) {
|
||||||
|
if (!first) console.log();
|
||||||
|
await this.startElasticsearch();
|
||||||
|
first = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Register with global registry
|
||||||
|
await this.registerWithGlobalRegistry();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop all enabled services
|
||||||
|
*/
|
||||||
|
public async stopAll(): Promise<void> {
|
||||||
|
let first = true;
|
||||||
|
if (this.isServiceEnabled('mongodb')) {
|
||||||
|
if (!first) console.log();
|
||||||
|
await this.stopMongoDB();
|
||||||
|
first = false;
|
||||||
|
}
|
||||||
|
if (this.isServiceEnabled('minio')) {
|
||||||
|
if (!first) console.log();
|
||||||
|
await this.stopMinIO();
|
||||||
|
first = false;
|
||||||
|
}
|
||||||
|
if (this.isServiceEnabled('elasticsearch')) {
|
||||||
|
if (!first) console.log();
|
||||||
|
await this.stopElasticsearch();
|
||||||
|
first = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start MongoDB service
|
||||||
|
*/
|
||||||
|
public async startMongoDB(): Promise<void> {
|
||||||
|
logger.log('note', '📦 MongoDB:');
|
||||||
|
|
||||||
|
const config = this.config.getConfig();
|
||||||
|
const containers = this.config.getContainerNames();
|
||||||
|
const directories = this.config.getDataDirectories();
|
||||||
|
|
||||||
|
// Ensure data directory exists
|
||||||
|
await plugins.smartfs.directory(directories.mongo).recursive().create();
|
||||||
|
|
||||||
|
const status = await this.docker.getStatus(containers.mongo);
|
||||||
|
|
||||||
|
switch (status) {
|
||||||
|
case 'running':
|
||||||
|
logger.log('ok', ' Already running ✓');
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'stopped':
|
||||||
|
// Check if port mapping matches config
|
||||||
|
const mongoPortMappings = await this.docker.getPortMappings(containers.mongo);
|
||||||
|
if (mongoPortMappings && mongoPortMappings['27017'] !== config.MONGODB_PORT) {
|
||||||
|
logger.log('note', ' Port configuration changed, recreating container...');
|
||||||
|
await this.docker.remove(containers.mongo, true);
|
||||||
|
// Fall through to create new container
|
||||||
|
const success = await this.docker.run({
|
||||||
|
name: containers.mongo,
|
||||||
|
image: 'mongo:7.0',
|
||||||
|
ports: {
|
||||||
|
[`0.0.0.0:${config.MONGODB_PORT}`]: '27017'
|
||||||
|
},
|
||||||
|
volumes: {
|
||||||
|
[directories.mongo]: '/data/db'
|
||||||
|
},
|
||||||
|
environment: {
|
||||||
|
MONGO_INITDB_ROOT_USERNAME: config.MONGODB_USER,
|
||||||
|
MONGO_INITDB_ROOT_PASSWORD: config.MONGODB_PASS,
|
||||||
|
MONGO_INITDB_DATABASE: config.MONGODB_NAME
|
||||||
|
},
|
||||||
|
restart: 'unless-stopped',
|
||||||
|
command: '--bind_ip_all'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (success) {
|
||||||
|
logger.log('ok', ' Recreated with new port ✓');
|
||||||
|
} else {
|
||||||
|
logger.log('error', ' Failed to recreate container');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Ports match, just start the container
|
||||||
|
if (await this.docker.start(containers.mongo)) {
|
||||||
|
logger.log('ok', ' Started ✓');
|
||||||
|
} else {
|
||||||
|
logger.log('error', ' Failed to start');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'not_exists':
|
||||||
|
logger.log('note', ' Creating container...');
|
||||||
|
|
||||||
|
const success = await this.docker.run({
|
||||||
|
name: containers.mongo,
|
||||||
|
image: 'mongo:7.0',
|
||||||
|
ports: {
|
||||||
|
[`0.0.0.0:${config.MONGODB_PORT}`]: '27017'
|
||||||
|
},
|
||||||
|
volumes: {
|
||||||
|
[directories.mongo]: '/data/db'
|
||||||
|
},
|
||||||
|
environment: {
|
||||||
|
MONGO_INITDB_ROOT_USERNAME: config.MONGODB_USER,
|
||||||
|
MONGO_INITDB_ROOT_PASSWORD: config.MONGODB_PASS,
|
||||||
|
MONGO_INITDB_DATABASE: config.MONGODB_NAME
|
||||||
|
},
|
||||||
|
restart: 'unless-stopped',
|
||||||
|
command: '--bind_ip_all'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (success) {
|
||||||
|
logger.log('ok', ' Created and started ✓');
|
||||||
|
} else {
|
||||||
|
logger.log('error', ' Failed to create container');
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log('info', ` Container: ${containers.mongo}`);
|
||||||
|
logger.log('info', ` Port: ${config.MONGODB_PORT}`);
|
||||||
|
logger.log('info', ` Connection: ${this.config.getMongoConnectionString()}`);
|
||||||
|
|
||||||
|
// Show Compass connection string
|
||||||
|
const networkIp = await helpers.getLocalNetworkIp();
|
||||||
|
const compassString = `mongodb://${config.MONGODB_USER}:${config.MONGODB_PASS}@${networkIp}:${config.MONGODB_PORT}/${config.MONGODB_NAME}?authSource=admin`;
|
||||||
|
logger.log('ok', ` Compass: ${compassString}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start MinIO service
|
||||||
|
*/
|
||||||
|
public async startMinIO(): Promise<void> {
|
||||||
|
logger.log('note', '📦 S3/MinIO:');
|
||||||
|
|
||||||
|
const config = this.config.getConfig();
|
||||||
|
const containers = this.config.getContainerNames();
|
||||||
|
const directories = this.config.getDataDirectories();
|
||||||
|
|
||||||
|
// Ensure data directory exists
|
||||||
|
await plugins.smartfs.directory(directories.minio).recursive().create();
|
||||||
|
|
||||||
|
const status = await this.docker.getStatus(containers.minio);
|
||||||
|
|
||||||
|
switch (status) {
|
||||||
|
case 'running':
|
||||||
|
logger.log('ok', ' Already running ✓');
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'stopped':
|
||||||
|
// Check if port mapping matches config
|
||||||
|
const minioPortMappings = await this.docker.getPortMappings(containers.minio);
|
||||||
|
if (minioPortMappings &&
|
||||||
|
(minioPortMappings['9000'] !== config.S3_PORT ||
|
||||||
|
minioPortMappings['9001'] !== config.S3_CONSOLE_PORT)) {
|
||||||
|
logger.log('note', ' Port configuration changed, recreating container...');
|
||||||
|
await this.docker.remove(containers.minio, true);
|
||||||
|
// Fall through to create new container
|
||||||
|
const success = await this.docker.run({
|
||||||
|
name: containers.minio,
|
||||||
|
image: 'minio/minio',
|
||||||
|
ports: {
|
||||||
|
[config.S3_PORT]: '9000',
|
||||||
|
[config.S3_CONSOLE_PORT]: '9001'
|
||||||
|
},
|
||||||
|
volumes: {
|
||||||
|
[directories.minio]: '/data'
|
||||||
|
},
|
||||||
|
environment: {
|
||||||
|
MINIO_ROOT_USER: config.S3_ACCESSKEY,
|
||||||
|
MINIO_ROOT_PASSWORD: config.S3_SECRETKEY
|
||||||
|
},
|
||||||
|
restart: 'unless-stopped',
|
||||||
|
command: 'server /data --console-address ":9001"'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (success) {
|
||||||
|
logger.log('ok', ' Recreated with new ports ✓');
|
||||||
|
|
||||||
|
// Wait for MinIO to be ready
|
||||||
|
await plugins.smartdelay.delayFor(3000);
|
||||||
|
|
||||||
|
// Create default bucket
|
||||||
|
await this.docker.exec(
|
||||||
|
containers.minio,
|
||||||
|
`mc alias set local http://localhost:9000 ${config.S3_ACCESSKEY} ${config.S3_SECRETKEY}`
|
||||||
|
);
|
||||||
|
|
||||||
|
await this.docker.exec(
|
||||||
|
containers.minio,
|
||||||
|
`mc mb local/${config.S3_BUCKET}`
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.log('ok', ` Bucket '${config.S3_BUCKET}' created ✓`);
|
||||||
|
} else {
|
||||||
|
logger.log('error', ' Failed to recreate container');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Ports match, just start the container
|
||||||
|
if (await this.docker.start(containers.minio)) {
|
||||||
|
logger.log('ok', ' Started ✓');
|
||||||
|
} else {
|
||||||
|
logger.log('error', ' Failed to start');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'not_exists':
|
||||||
|
logger.log('note', ' Creating container...');
|
||||||
|
|
||||||
|
const success = await this.docker.run({
|
||||||
|
name: containers.minio,
|
||||||
|
image: 'minio/minio',
|
||||||
|
ports: {
|
||||||
|
[config.S3_PORT]: '9000',
|
||||||
|
[config.S3_CONSOLE_PORT]: '9001'
|
||||||
|
},
|
||||||
|
volumes: {
|
||||||
|
[directories.minio]: '/data'
|
||||||
|
},
|
||||||
|
environment: {
|
||||||
|
MINIO_ROOT_USER: config.S3_ACCESSKEY,
|
||||||
|
MINIO_ROOT_PASSWORD: config.S3_SECRETKEY
|
||||||
|
},
|
||||||
|
restart: 'unless-stopped',
|
||||||
|
command: 'server /data --console-address ":9001"'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (success) {
|
||||||
|
logger.log('ok', ' Created and started ✓');
|
||||||
|
|
||||||
|
// Wait for MinIO to be ready
|
||||||
|
await plugins.smartdelay.delayFor(3000);
|
||||||
|
|
||||||
|
// Create default bucket
|
||||||
|
await this.docker.exec(
|
||||||
|
containers.minio,
|
||||||
|
`mc alias set local http://localhost:9000 ${config.S3_ACCESSKEY} ${config.S3_SECRETKEY}`
|
||||||
|
);
|
||||||
|
|
||||||
|
await this.docker.exec(
|
||||||
|
containers.minio,
|
||||||
|
`mc mb local/${config.S3_BUCKET}`
|
||||||
|
);
|
||||||
|
|
||||||
|
logger.log('ok', ` Bucket '${config.S3_BUCKET}' created ✓`);
|
||||||
|
} else {
|
||||||
|
logger.log('error', ' Failed to create container');
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log('info', ` Container: ${containers.minio}`);
|
||||||
|
logger.log('info', ` Port: ${config.S3_PORT}`);
|
||||||
|
logger.log('info', ` Bucket: ${config.S3_BUCKET}`);
|
||||||
|
logger.log('info', ` API: http://${config.S3_HOST}:${config.S3_PORT}`);
|
||||||
|
logger.log('info', ` Console: http://${config.S3_HOST}:${config.S3_CONSOLE_PORT} (login: ${config.S3_ACCESSKEY}/***)`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start Elasticsearch service
|
||||||
|
*/
|
||||||
|
public async startElasticsearch(): Promise<void> {
|
||||||
|
logger.log('note', '📦 Elasticsearch:');
|
||||||
|
|
||||||
|
const config = this.config.getConfig();
|
||||||
|
const containers = this.config.getContainerNames();
|
||||||
|
const directories = this.config.getDataDirectories();
|
||||||
|
|
||||||
|
// Ensure data directory exists
|
||||||
|
await plugins.smartfs.directory(directories.elasticsearch).recursive().create();
|
||||||
|
|
||||||
|
const status = await this.docker.getStatus(containers.elasticsearch);
|
||||||
|
|
||||||
|
switch (status) {
|
||||||
|
case 'running':
|
||||||
|
logger.log('ok', ' Already running ✓');
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'stopped':
|
||||||
|
// Check if port mapping matches config
|
||||||
|
const esPortMappings = await this.docker.getPortMappings(containers.elasticsearch);
|
||||||
|
if (esPortMappings && esPortMappings['9200'] !== config.ELASTICSEARCH_PORT) {
|
||||||
|
logger.log('note', ' Port configuration changed, recreating container...');
|
||||||
|
await this.docker.remove(containers.elasticsearch, true);
|
||||||
|
// Fall through to create new container
|
||||||
|
const success = await this.docker.run({
|
||||||
|
name: containers.elasticsearch,
|
||||||
|
image: 'elasticsearch:8.11.0',
|
||||||
|
ports: {
|
||||||
|
[`0.0.0.0:${config.ELASTICSEARCH_PORT}`]: '9200'
|
||||||
|
},
|
||||||
|
volumes: {
|
||||||
|
[directories.elasticsearch]: '/usr/share/elasticsearch/data'
|
||||||
|
},
|
||||||
|
environment: {
|
||||||
|
'discovery.type': 'single-node',
|
||||||
|
'xpack.security.enabled': 'true',
|
||||||
|
'ELASTIC_PASSWORD': config.ELASTICSEARCH_PASS,
|
||||||
|
'ES_JAVA_OPTS': '-Xms512m -Xmx512m'
|
||||||
|
},
|
||||||
|
restart: 'unless-stopped'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (success) {
|
||||||
|
logger.log('ok', ' Recreated with new port ✓');
|
||||||
|
} else {
|
||||||
|
logger.log('error', ' Failed to recreate container');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Ports match, just start the container
|
||||||
|
if (await this.docker.start(containers.elasticsearch)) {
|
||||||
|
logger.log('ok', ' Started ✓');
|
||||||
|
} else {
|
||||||
|
logger.log('error', ' Failed to start');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'not_exists':
|
||||||
|
logger.log('note', ' Creating container...');
|
||||||
|
|
||||||
|
const success = await this.docker.run({
|
||||||
|
name: containers.elasticsearch,
|
||||||
|
image: 'elasticsearch:8.11.0',
|
||||||
|
ports: {
|
||||||
|
[`0.0.0.0:${config.ELASTICSEARCH_PORT}`]: '9200'
|
||||||
|
},
|
||||||
|
volumes: {
|
||||||
|
[directories.elasticsearch]: '/usr/share/elasticsearch/data'
|
||||||
|
},
|
||||||
|
environment: {
|
||||||
|
'discovery.type': 'single-node',
|
||||||
|
'xpack.security.enabled': 'true',
|
||||||
|
'ELASTIC_PASSWORD': config.ELASTICSEARCH_PASS,
|
||||||
|
'ES_JAVA_OPTS': '-Xms512m -Xmx512m'
|
||||||
|
},
|
||||||
|
restart: 'unless-stopped'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (success) {
|
||||||
|
logger.log('ok', ' Created and started ✓');
|
||||||
|
} else {
|
||||||
|
logger.log('error', ' Failed to create container');
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log('info', ` Container: ${containers.elasticsearch}`);
|
||||||
|
logger.log('info', ` Port: ${config.ELASTICSEARCH_PORT}`);
|
||||||
|
logger.log('info', ` Connection: ${config.ELASTICSEARCH_URL}`);
|
||||||
|
logger.log('info', ` Username: ${config.ELASTICSEARCH_USER}`);
|
||||||
|
logger.log('info', ` Password: ${config.ELASTICSEARCH_PASS}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop MongoDB service
|
||||||
|
*/
|
||||||
|
public async stopMongoDB(): Promise<void> {
|
||||||
|
logger.log('note', '📦 MongoDB:');
|
||||||
|
|
||||||
|
const containers = this.config.getContainerNames();
|
||||||
|
const status = await this.docker.getStatus(containers.mongo);
|
||||||
|
|
||||||
|
if (status === 'running') {
|
||||||
|
if (await this.docker.stop(containers.mongo)) {
|
||||||
|
logger.log('ok', ' Stopped ✓');
|
||||||
|
} else {
|
||||||
|
logger.log('error', ' Failed to stop');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.log('note', ' Not running');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop MinIO service
|
||||||
|
*/
|
||||||
|
public async stopMinIO(): Promise<void> {
|
||||||
|
logger.log('note', '📦 S3/MinIO:');
|
||||||
|
|
||||||
|
const containers = this.config.getContainerNames();
|
||||||
|
const status = await this.docker.getStatus(containers.minio);
|
||||||
|
|
||||||
|
if (status === 'running') {
|
||||||
|
if (await this.docker.stop(containers.minio)) {
|
||||||
|
logger.log('ok', ' Stopped ✓');
|
||||||
|
} else {
|
||||||
|
logger.log('error', ' Failed to stop');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.log('note', ' Not running');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop Elasticsearch service
|
||||||
|
*/
|
||||||
|
public async stopElasticsearch(): Promise<void> {
|
||||||
|
logger.log('note', '📦 Elasticsearch:');
|
||||||
|
|
||||||
|
const containers = this.config.getContainerNames();
|
||||||
|
const status = await this.docker.getStatus(containers.elasticsearch);
|
||||||
|
|
||||||
|
if (status === 'running') {
|
||||||
|
if (await this.docker.stop(containers.elasticsearch)) {
|
||||||
|
logger.log('ok', ' Stopped ✓');
|
||||||
|
} else {
|
||||||
|
logger.log('error', ' Failed to stop');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.log('note', ' Not running');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Show service status
|
||||||
|
*/
|
||||||
|
public async showStatus(): Promise<void> {
|
||||||
|
helpers.printHeader('Service Status');
|
||||||
|
|
||||||
|
const config = this.config.getConfig();
|
||||||
|
const containers = this.config.getContainerNames();
|
||||||
|
|
||||||
|
logger.log('info', `Project: ${config.PROJECT_NAME}`);
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
// MongoDB status
|
||||||
|
const mongoStatus = await this.docker.getStatus(containers.mongo);
|
||||||
|
switch (mongoStatus) {
|
||||||
|
case 'running':
|
||||||
|
logger.log('ok', '📦 MongoDB: 🟢 Running');
|
||||||
|
logger.log('info', ` ├─ Container: ${containers.mongo}`);
|
||||||
|
logger.log('info', ` ├─ Port: ${config.MONGODB_PORT}`);
|
||||||
|
logger.log('info', ` ├─ Connection: ${this.config.getMongoConnectionString()}`);
|
||||||
|
|
||||||
|
// Show Compass connection string
|
||||||
|
const networkIp = await helpers.getLocalNetworkIp();
|
||||||
|
const compassString = `mongodb://${config.MONGODB_USER}:${config.MONGODB_PASS}@${networkIp}:${config.MONGODB_PORT}/${config.MONGODB_NAME}?authSource=admin`;
|
||||||
|
logger.log('ok', ` └─ Compass: ${compassString}`);
|
||||||
|
break;
|
||||||
|
case 'stopped':
|
||||||
|
logger.log('note', '📦 MongoDB: 🟡 Stopped');
|
||||||
|
logger.log('info', ` ├─ Container: ${containers.mongo}`);
|
||||||
|
logger.log('info', ` └─ Port: ${config.MONGODB_PORT}`);
|
||||||
|
break;
|
||||||
|
case 'not_exists':
|
||||||
|
logger.log('info', '📦 MongoDB: ⚪ Not installed');
|
||||||
|
// Check port availability
|
||||||
|
const mongoPort = parseInt(config.MONGODB_PORT);
|
||||||
|
const mongoAvailable = await helpers.isPortAvailable(mongoPort);
|
||||||
|
if (!mongoAvailable) {
|
||||||
|
logger.log('error', ` └─ ⚠️ Port ${mongoPort} is in use by another process`);
|
||||||
|
} else {
|
||||||
|
logger.log('info', ` └─ Port ${mongoPort} is available`);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// MinIO status
|
||||||
|
const minioStatus = await this.docker.getStatus(containers.minio);
|
||||||
|
switch (minioStatus) {
|
||||||
|
case 'running':
|
||||||
|
logger.log('ok', '📦 S3/MinIO: 🟢 Running');
|
||||||
|
logger.log('info', ` ├─ Container: ${containers.minio}`);
|
||||||
|
logger.log('info', ` ├─ API: http://${config.S3_HOST}:${config.S3_PORT}`);
|
||||||
|
logger.log('info', ` ├─ Console: http://${config.S3_HOST}:${config.S3_CONSOLE_PORT}`);
|
||||||
|
logger.log('info', ` └─ Bucket: ${config.S3_BUCKET}`);
|
||||||
|
break;
|
||||||
|
case 'stopped':
|
||||||
|
logger.log('note', '📦 S3/MinIO: 🟡 Stopped');
|
||||||
|
logger.log('info', ` ├─ Container: ${containers.minio}`);
|
||||||
|
logger.log('info', ` ├─ API Port: ${config.S3_PORT}`);
|
||||||
|
logger.log('info', ` └─ Console Port: ${config.S3_CONSOLE_PORT}`);
|
||||||
|
break;
|
||||||
|
case 'not_exists':
|
||||||
|
logger.log('info', '📦 S3/MinIO: ⚪ Not installed');
|
||||||
|
// Check port availability
|
||||||
|
const s3Port = parseInt(config.S3_PORT);
|
||||||
|
const s3ConsolePort = parseInt(config.S3_CONSOLE_PORT);
|
||||||
|
const s3Available = await helpers.isPortAvailable(s3Port);
|
||||||
|
const consoleAvailable = await helpers.isPortAvailable(s3ConsolePort);
|
||||||
|
|
||||||
|
if (!s3Available || !consoleAvailable) {
|
||||||
|
if (!s3Available) {
|
||||||
|
logger.log('error', ` ├─ ⚠️ API Port ${s3Port} is in use`);
|
||||||
|
} else {
|
||||||
|
logger.log('info', ` ├─ API Port ${s3Port} is available`);
|
||||||
|
}
|
||||||
|
if (!consoleAvailable) {
|
||||||
|
logger.log('error', ` └─ ⚠️ Console Port ${s3ConsolePort} is in use`);
|
||||||
|
} else {
|
||||||
|
logger.log('info', ` └─ Console Port ${s3ConsolePort} is available`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.log('info', ` ├─ API Port ${s3Port} is available`);
|
||||||
|
logger.log('info', ` └─ Console Port ${s3ConsolePort} is available`);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Elasticsearch status
|
||||||
|
const esStatus = await this.docker.getStatus(containers.elasticsearch);
|
||||||
|
switch (esStatus) {
|
||||||
|
case 'running':
|
||||||
|
logger.log('ok', '📦 Elasticsearch: 🟢 Running');
|
||||||
|
logger.log('info', ` ├─ Container: ${containers.elasticsearch}`);
|
||||||
|
logger.log('info', ` ├─ Port: ${config.ELASTICSEARCH_PORT}`);
|
||||||
|
logger.log('info', ` ├─ Connection: ${config.ELASTICSEARCH_URL}`);
|
||||||
|
logger.log('info', ` └─ Credentials: ${config.ELASTICSEARCH_USER}/${config.ELASTICSEARCH_PASS}`);
|
||||||
|
break;
|
||||||
|
case 'stopped':
|
||||||
|
logger.log('note', '📦 Elasticsearch: 🟡 Stopped');
|
||||||
|
logger.log('info', ` ├─ Container: ${containers.elasticsearch}`);
|
||||||
|
logger.log('info', ` └─ Port: ${config.ELASTICSEARCH_PORT}`);
|
||||||
|
break;
|
||||||
|
case 'not_exists':
|
||||||
|
logger.log('info', '📦 Elasticsearch: ⚪ Not installed');
|
||||||
|
// Check port availability
|
||||||
|
const esPort = parseInt(config.ELASTICSEARCH_PORT);
|
||||||
|
const esAvailable = await helpers.isPortAvailable(esPort);
|
||||||
|
if (!esAvailable) {
|
||||||
|
logger.log('error', ` └─ ⚠️ Port ${esPort} is in use by another process`);
|
||||||
|
} else {
|
||||||
|
logger.log('info', ` └─ Port ${esPort} is available`);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Show configuration
|
||||||
|
*/
|
||||||
|
public async showConfig(): Promise<void> {
|
||||||
|
helpers.printHeader('Current Configuration');
|
||||||
|
|
||||||
|
const config = this.config.getConfig();
|
||||||
|
|
||||||
|
logger.log('info', `Project: ${config.PROJECT_NAME}`);
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
logger.log('note', 'MongoDB:');
|
||||||
|
logger.log('info', ` Host: ${config.MONGODB_HOST}:${config.MONGODB_PORT}`);
|
||||||
|
logger.log('info', ` Database: ${config.MONGODB_NAME}`);
|
||||||
|
logger.log('info', ` User: ${config.MONGODB_USER}`);
|
||||||
|
logger.log('info', ' Password: ***');
|
||||||
|
logger.log('info', ` Container: ${this.config.getContainerNames().mongo}`);
|
||||||
|
logger.log('info', ` Data: ${this.config.getDataDirectories().mongo}`);
|
||||||
|
logger.log('info', ` Connection: ${this.config.getMongoConnectionString()}`);
|
||||||
|
|
||||||
|
console.log();
|
||||||
|
logger.log('note', 'S3/MinIO:');
|
||||||
|
logger.log('info', ` Host: ${config.S3_HOST}`);
|
||||||
|
logger.log('info', ` API Port: ${config.S3_PORT}`);
|
||||||
|
logger.log('info', ` Console Port: ${config.S3_CONSOLE_PORT}`);
|
||||||
|
logger.log('info', ` Access Key: ${config.S3_ACCESSKEY}`);
|
||||||
|
logger.log('info', ' Secret Key: ***');
|
||||||
|
logger.log('info', ` Bucket: ${config.S3_BUCKET}`);
|
||||||
|
logger.log('info', ` Use SSL: ${config.S3_USESSL}`);
|
||||||
|
logger.log('info', ` Container: ${this.config.getContainerNames().minio}`);
|
||||||
|
logger.log('info', ` Data: ${this.config.getDataDirectories().minio}`);
|
||||||
|
logger.log('info', ` Endpoint: ${config.S3_ENDPOINT}`);
|
||||||
|
logger.log('info', ` Console URL: http://${config.S3_HOST}:${config.S3_CONSOLE_PORT}`);
|
||||||
|
|
||||||
|
console.log();
|
||||||
|
logger.log('note', 'Elasticsearch:');
|
||||||
|
logger.log('info', ` Host: ${config.ELASTICSEARCH_HOST}:${config.ELASTICSEARCH_PORT}`);
|
||||||
|
logger.log('info', ` User: ${config.ELASTICSEARCH_USER}`);
|
||||||
|
logger.log('info', ' Password: ***');
|
||||||
|
logger.log('info', ` Container: ${this.config.getContainerNames().elasticsearch}`);
|
||||||
|
logger.log('info', ` Data: ${this.config.getDataDirectories().elasticsearch}`);
|
||||||
|
logger.log('info', ` Connection: ${config.ELASTICSEARCH_URL}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Show MongoDB Compass connection string
|
||||||
|
*/
|
||||||
|
public async showCompassConnection(): Promise<void> {
|
||||||
|
helpers.printHeader('MongoDB Compass Connection');
|
||||||
|
|
||||||
|
const config = this.config.getConfig();
|
||||||
|
const networkIp = await helpers.getLocalNetworkIp();
|
||||||
|
|
||||||
|
const connectionString = `mongodb://${config.MONGODB_USER}:${config.MONGODB_PASS}@${networkIp}:${config.MONGODB_PORT}/${config.MONGODB_NAME}?authSource=admin`;
|
||||||
|
|
||||||
|
logger.log('info', 'MongoDB Compass is a GUI tool for MongoDB. To connect:');
|
||||||
|
console.log();
|
||||||
|
logger.log('info', '1. Download MongoDB Compass from:');
|
||||||
|
logger.log('info', ' https://www.mongodb.com/products/compass');
|
||||||
|
console.log();
|
||||||
|
logger.log('info', '2. Open Compass and paste this connection string:');
|
||||||
|
logger.log('ok', ` ${connectionString}`);
|
||||||
|
console.log();
|
||||||
|
logger.log('note', 'Connection Details:');
|
||||||
|
logger.log('info', ` Network IP: ${networkIp}`);
|
||||||
|
logger.log('info', ` Port: ${config.MONGODB_PORT}`);
|
||||||
|
logger.log('info', ` Database: ${config.MONGODB_NAME}`);
|
||||||
|
logger.log('info', ` Username: ${config.MONGODB_USER}`);
|
||||||
|
logger.log('info', ` Auth Source: admin`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Show logs for a service
|
||||||
|
*/
|
||||||
|
public async showLogs(service: string, lines: number = 20): Promise<void> {
|
||||||
|
const containers = this.config.getContainerNames();
|
||||||
|
|
||||||
|
switch (service) {
|
||||||
|
case 'mongo':
|
||||||
|
case 'mongodb':
|
||||||
|
if (await this.docker.isRunning(containers.mongo)) {
|
||||||
|
helpers.printHeader(`MongoDB Logs (last ${lines} lines)`);
|
||||||
|
const logs = await this.docker.logs(containers.mongo, lines);
|
||||||
|
console.log(logs);
|
||||||
|
} else {
|
||||||
|
logger.log('note', 'MongoDB container is not running');
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'minio':
|
||||||
|
case 's3':
|
||||||
|
if (await this.docker.isRunning(containers.minio)) {
|
||||||
|
helpers.printHeader(`S3/MinIO Logs (last ${lines} lines)`);
|
||||||
|
const logs = await this.docker.logs(containers.minio, lines);
|
||||||
|
console.log(logs);
|
||||||
|
} else {
|
||||||
|
logger.log('note', 'S3/MinIO container is not running');
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'elasticsearch':
|
||||||
|
case 'es':
|
||||||
|
if (await this.docker.isRunning(containers.elasticsearch)) {
|
||||||
|
helpers.printHeader(`Elasticsearch Logs (last ${lines} lines)`);
|
||||||
|
const logs = await this.docker.logs(containers.elasticsearch, lines);
|
||||||
|
console.log(logs);
|
||||||
|
} else {
|
||||||
|
logger.log('note', 'Elasticsearch container is not running');
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'all':
|
||||||
|
case '':
|
||||||
|
await this.showLogs('mongo', lines);
|
||||||
|
console.log();
|
||||||
|
await this.showLogs('minio', lines);
|
||||||
|
console.log();
|
||||||
|
await this.showLogs('elasticsearch', lines);
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
logger.log('note', 'Usage: gitzone services logs [mongo|s3|elasticsearch|all] [lines]');
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove containers
|
||||||
|
*/
|
||||||
|
public async removeContainers(): Promise<void> {
|
||||||
|
const containers = this.config.getContainerNames();
|
||||||
|
let removed = false;
|
||||||
|
|
||||||
|
if (await this.docker.exists(containers.mongo)) {
|
||||||
|
if (await this.docker.remove(containers.mongo, true)) {
|
||||||
|
logger.log('ok', ' MongoDB container removed ✓');
|
||||||
|
removed = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (await this.docker.exists(containers.minio)) {
|
||||||
|
if (await this.docker.remove(containers.minio, true)) {
|
||||||
|
logger.log('ok', ' S3/MinIO container removed ✓');
|
||||||
|
removed = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (await this.docker.exists(containers.elasticsearch)) {
|
||||||
|
if (await this.docker.remove(containers.elasticsearch, true)) {
|
||||||
|
logger.log('ok', ' Elasticsearch container removed ✓');
|
||||||
|
removed = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!removed) {
|
||||||
|
logger.log('note', ' No containers to remove');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if all containers are gone, then unregister from global registry
|
||||||
|
const mongoExists = await this.docker.exists(containers.mongo);
|
||||||
|
const minioExists = await this.docker.exists(containers.minio);
|
||||||
|
const esExists = await this.docker.exists(containers.elasticsearch);
|
||||||
|
|
||||||
|
if (!mongoExists && !minioExists && !esExists) {
|
||||||
|
await this.globalRegistry.unregisterProject(process.cwd());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean data directories
|
||||||
|
*/
|
||||||
|
public async cleanData(): Promise<void> {
|
||||||
|
const directories = this.config.getDataDirectories();
|
||||||
|
let cleaned = false;
|
||||||
|
|
||||||
|
if (await plugins.smartfs.directory(directories.mongo).exists()) {
|
||||||
|
await plugins.smartfs.directory(directories.mongo).recursive().delete();
|
||||||
|
logger.log('ok', ' MongoDB data removed ✓');
|
||||||
|
cleaned = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (await plugins.smartfs.directory(directories.minio).exists()) {
|
||||||
|
await plugins.smartfs.directory(directories.minio).recursive().delete();
|
||||||
|
logger.log('ok', ' S3/MinIO data removed ✓');
|
||||||
|
cleaned = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (await plugins.smartfs.directory(directories.elasticsearch).exists()) {
|
||||||
|
await plugins.smartfs.directory(directories.elasticsearch).recursive().delete();
|
||||||
|
logger.log('ok', ' Elasticsearch data removed ✓');
|
||||||
|
cleaned = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!cleaned) {
|
||||||
|
logger.log('note', ' No data to clean');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configure which services are enabled
|
||||||
|
*/
|
||||||
|
public async configureServices(): Promise<void> {
|
||||||
|
logger.log('note', 'Select which services to enable for this project:');
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
const currentServices = this.enabledServices || ['mongodb', 'minio', 'elasticsearch'];
|
||||||
|
|
||||||
|
const smartinteract = new plugins.smartinteract.SmartInteract();
|
||||||
|
const response = await smartinteract.askQuestion({
|
||||||
|
name: 'services',
|
||||||
|
type: 'checkbox',
|
||||||
|
message: 'Which services do you want to enable?',
|
||||||
|
choices: [
|
||||||
|
{ name: 'MongoDB', value: 'mongodb' },
|
||||||
|
{ name: 'MinIO (S3)', value: 'minio' },
|
||||||
|
{ name: 'Elasticsearch', value: 'elasticsearch' }
|
||||||
|
],
|
||||||
|
default: currentServices
|
||||||
|
});
|
||||||
|
|
||||||
|
this.enabledServices = response.value || ['mongodb', 'minio', 'elasticsearch'];
|
||||||
|
|
||||||
|
// Save to npmextra.json
|
||||||
|
await this.saveServiceConfiguration(this.enabledServices);
|
||||||
|
|
||||||
|
logger.log('ok', '✅ Service configuration updated');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reconfigure services with new ports
|
||||||
|
*/
|
||||||
|
public async reconfigure(): Promise<void> {
|
||||||
|
helpers.printHeader('Reconfiguring Services');
|
||||||
|
|
||||||
|
const containers = this.config.getContainerNames();
|
||||||
|
|
||||||
|
// Stop existing containers
|
||||||
|
logger.log('note', '🛑 Stopping existing containers...');
|
||||||
|
|
||||||
|
if (await this.docker.exists(containers.mongo)) {
|
||||||
|
await this.docker.stop(containers.mongo);
|
||||||
|
logger.log('ok', ' MongoDB stopped ✓');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (await this.docker.exists(containers.minio)) {
|
||||||
|
await this.docker.stop(containers.minio);
|
||||||
|
logger.log('ok', ' S3/MinIO stopped ✓');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (await this.docker.exists(containers.elasticsearch)) {
|
||||||
|
await this.docker.stop(containers.elasticsearch);
|
||||||
|
logger.log('ok', ' Elasticsearch stopped ✓');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Reconfigure ports
|
||||||
|
await this.config.reconfigurePorts();
|
||||||
|
|
||||||
|
// Ask if user wants to restart services
|
||||||
|
const smartinteract = new plugins.smartinteract.SmartInteract();
|
||||||
|
const response = await smartinteract.askQuestion({
|
||||||
|
name: 'restart',
|
||||||
|
type: 'confirm',
|
||||||
|
message: 'Do you want to start services with new ports?',
|
||||||
|
default: true
|
||||||
|
});
|
||||||
|
|
||||||
|
if (response.value) {
|
||||||
|
console.log();
|
||||||
|
await this.startAll();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
127
ts/mod_services/helpers.ts
Normal file
127
ts/mod_services/helpers.ts
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
import * as plugins from './mod.plugins.js';
|
||||||
|
import * as net from 'net';
|
||||||
|
import { logger } from '../gitzone.logging.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a port is available
|
||||||
|
*/
|
||||||
|
export const isPortAvailable = async (port: number): Promise<boolean> => {
|
||||||
|
return new Promise((resolve) => {
|
||||||
|
const server = net.createServer();
|
||||||
|
|
||||||
|
server.once('error', () => {
|
||||||
|
resolve(false);
|
||||||
|
});
|
||||||
|
|
||||||
|
server.once('listening', () => {
|
||||||
|
server.close();
|
||||||
|
resolve(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
server.listen(port, '0.0.0.0');
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a random available port between 20000 and 30000
|
||||||
|
*/
|
||||||
|
export const getRandomAvailablePort = async (): Promise<number> => {
|
||||||
|
const maxAttempts = 100;
|
||||||
|
|
||||||
|
for (let i = 0; i < maxAttempts; i++) {
|
||||||
|
const port = Math.floor(Math.random() * 10001) + 20000;
|
||||||
|
if (await isPortAvailable(port)) {
|
||||||
|
return port;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: let the system assign a port
|
||||||
|
return 0;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the project name from package.json or directory
|
||||||
|
*/
|
||||||
|
export const getProjectName = async (): Promise<string> => {
|
||||||
|
try {
|
||||||
|
const packageJsonPath = plugins.path.join(process.cwd(), 'package.json');
|
||||||
|
if (await plugins.smartfs.file(packageJsonPath).exists()) {
|
||||||
|
const content = (await plugins.smartfs
|
||||||
|
.file(packageJsonPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
|
const packageJson = JSON.parse(content);
|
||||||
|
if (packageJson.name) {
|
||||||
|
// Sanitize: @fin.cx/skr → fin-cx-skr
|
||||||
|
return packageJson.name.replace(/@/g, '').replace(/[\/\.]/g, '-');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// Ignore errors and fall back to directory name
|
||||||
|
}
|
||||||
|
|
||||||
|
return plugins.path.basename(process.cwd());
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Print a header with decorative lines
|
||||||
|
*/
|
||||||
|
export const printHeader = (title: string) => {
|
||||||
|
console.log();
|
||||||
|
logger.log('info', '═══════════════════════════════════════════════════════════════');
|
||||||
|
logger.log('info', ` ${title}`);
|
||||||
|
logger.log('info', '═══════════════════════════════════════════════════════════════');
|
||||||
|
console.log();
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format bytes to human readable string
|
||||||
|
*/
|
||||||
|
export const formatBytes = (bytes: number): string => {
|
||||||
|
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||||
|
let size = bytes;
|
||||||
|
let unitIndex = 0;
|
||||||
|
|
||||||
|
while (size >= 1024 && unitIndex < units.length - 1) {
|
||||||
|
size /= 1024;
|
||||||
|
unitIndex++;
|
||||||
|
}
|
||||||
|
|
||||||
|
return `${size.toFixed(2)} ${units[unitIndex]}`;
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the local network IP address
|
||||||
|
*/
|
||||||
|
export const getLocalNetworkIp = async (): Promise<string> => {
|
||||||
|
const smartnetworkInstance = new plugins.smartnetwork.SmartNetwork();
|
||||||
|
const gateways = await smartnetworkInstance.getGateways();
|
||||||
|
|
||||||
|
// Find the best local IP from network interfaces
|
||||||
|
for (const interfaceName of Object.keys(gateways)) {
|
||||||
|
const interfaces = gateways[interfaceName];
|
||||||
|
for (const iface of interfaces) {
|
||||||
|
// Skip loopback and internal interfaces
|
||||||
|
if (!iface.internal && iface.family === 'IPv4') {
|
||||||
|
const address = iface.address;
|
||||||
|
// Prefer LAN IPs
|
||||||
|
if (address.startsWith('192.168.') || address.startsWith('10.') || address.startsWith('172.')) {
|
||||||
|
return address;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fallback: try to get any non-internal IPv4
|
||||||
|
for (const interfaceName of Object.keys(gateways)) {
|
||||||
|
const interfaces = gateways[interfaceName];
|
||||||
|
for (const iface of interfaces) {
|
||||||
|
if (!iface.internal && iface.family === 'IPv4') {
|
||||||
|
return iface.address;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Last resort: localhost
|
||||||
|
return 'localhost';
|
||||||
|
};
|
||||||
433
ts/mod_services/index.ts
Normal file
433
ts/mod_services/index.ts
Normal file
@@ -0,0 +1,433 @@
|
|||||||
|
import * as plugins from './mod.plugins.js';
|
||||||
|
import * as helpers from './helpers.js';
|
||||||
|
import { ServiceManager } from './classes.servicemanager.js';
|
||||||
|
import { GlobalRegistry } from './classes.globalregistry.js';
|
||||||
|
import { logger } from '../gitzone.logging.js';
|
||||||
|
|
||||||
|
export const run = async (argvArg: any) => {
|
||||||
|
const isGlobal = argvArg.g || argvArg.global;
|
||||||
|
const command = argvArg._[1] || 'help';
|
||||||
|
|
||||||
|
// Handle global commands first
|
||||||
|
if (isGlobal) {
|
||||||
|
await handleGlobalCommand(command);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Local project commands
|
||||||
|
const serviceManager = new ServiceManager();
|
||||||
|
await serviceManager.init();
|
||||||
|
|
||||||
|
const service = argvArg._[2] || 'all';
|
||||||
|
|
||||||
|
switch (command) {
|
||||||
|
case 'start':
|
||||||
|
await handleStart(serviceManager, service);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'stop':
|
||||||
|
await handleStop(serviceManager, service);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'restart':
|
||||||
|
await handleRestart(serviceManager, service);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'status':
|
||||||
|
await serviceManager.showStatus();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'config':
|
||||||
|
if (service === 'services' || argvArg._[2] === 'services') {
|
||||||
|
await handleConfigureServices(serviceManager);
|
||||||
|
} else {
|
||||||
|
await serviceManager.showConfig();
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'compass':
|
||||||
|
await serviceManager.showCompassConnection();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'logs':
|
||||||
|
const lines = parseInt(argvArg._[3]) || 20;
|
||||||
|
await serviceManager.showLogs(service, lines);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'remove':
|
||||||
|
await handleRemove(serviceManager);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'clean':
|
||||||
|
await handleClean(serviceManager);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'reconfigure':
|
||||||
|
await serviceManager.reconfigure();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'help':
|
||||||
|
default:
|
||||||
|
showHelp();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
async function handleStart(serviceManager: ServiceManager, service: string) {
|
||||||
|
helpers.printHeader('Starting Services');
|
||||||
|
|
||||||
|
switch (service) {
|
||||||
|
case 'mongo':
|
||||||
|
case 'mongodb':
|
||||||
|
await serviceManager.startMongoDB();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'minio':
|
||||||
|
case 's3':
|
||||||
|
await serviceManager.startMinIO();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'elasticsearch':
|
||||||
|
case 'es':
|
||||||
|
await serviceManager.startElasticsearch();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'all':
|
||||||
|
case '':
|
||||||
|
await serviceManager.startAll();
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
logger.log('error', `Unknown service: ${service}`);
|
||||||
|
logger.log('note', 'Use: mongo, s3, elasticsearch, or all');
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleStop(serviceManager: ServiceManager, service: string) {
|
||||||
|
helpers.printHeader('Stopping Services');
|
||||||
|
|
||||||
|
switch (service) {
|
||||||
|
case 'mongo':
|
||||||
|
case 'mongodb':
|
||||||
|
await serviceManager.stopMongoDB();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'minio':
|
||||||
|
case 's3':
|
||||||
|
await serviceManager.stopMinIO();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'elasticsearch':
|
||||||
|
case 'es':
|
||||||
|
await serviceManager.stopElasticsearch();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'all':
|
||||||
|
case '':
|
||||||
|
await serviceManager.stopAll();
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
logger.log('error', `Unknown service: ${service}`);
|
||||||
|
logger.log('note', 'Use: mongo, s3, elasticsearch, or all');
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleRestart(serviceManager: ServiceManager, service: string) {
|
||||||
|
helpers.printHeader('Restarting Services');
|
||||||
|
|
||||||
|
switch (service) {
|
||||||
|
case 'mongo':
|
||||||
|
case 'mongodb':
|
||||||
|
await serviceManager.stopMongoDB();
|
||||||
|
await plugins.smartdelay.delayFor(2000);
|
||||||
|
await serviceManager.startMongoDB();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'minio':
|
||||||
|
case 's3':
|
||||||
|
await serviceManager.stopMinIO();
|
||||||
|
await plugins.smartdelay.delayFor(2000);
|
||||||
|
await serviceManager.startMinIO();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'elasticsearch':
|
||||||
|
case 'es':
|
||||||
|
await serviceManager.stopElasticsearch();
|
||||||
|
await plugins.smartdelay.delayFor(2000);
|
||||||
|
await serviceManager.startElasticsearch();
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'all':
|
||||||
|
case '':
|
||||||
|
await serviceManager.stopAll();
|
||||||
|
await plugins.smartdelay.delayFor(2000);
|
||||||
|
await serviceManager.startAll();
|
||||||
|
break;
|
||||||
|
|
||||||
|
default:
|
||||||
|
logger.log('error', `Unknown service: ${service}`);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleRemove(serviceManager: ServiceManager) {
|
||||||
|
helpers.printHeader('Removing Containers');
|
||||||
|
logger.log('note', '⚠️ This will remove containers but preserve data');
|
||||||
|
|
||||||
|
const shouldContinue = await plugins.smartinteract.SmartInteract.getCliConfirmation('Continue?', false);
|
||||||
|
|
||||||
|
if (shouldContinue) {
|
||||||
|
await serviceManager.removeContainers();
|
||||||
|
} else {
|
||||||
|
logger.log('note', 'Cancelled');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleClean(serviceManager: ServiceManager) {
|
||||||
|
helpers.printHeader('Clean All');
|
||||||
|
logger.log('error', '⚠️ WARNING: This will remove all containers and data!');
|
||||||
|
logger.log('error', 'This action cannot be undone!');
|
||||||
|
|
||||||
|
const smartinteraction = new plugins.smartinteract.SmartInteract();
|
||||||
|
const confirmAnswer = await smartinteraction.askQuestion({
|
||||||
|
name: 'confirm',
|
||||||
|
type: 'input',
|
||||||
|
message: 'Type "yes" to confirm:',
|
||||||
|
default: 'no'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (confirmAnswer.value === 'yes') {
|
||||||
|
await serviceManager.removeContainers();
|
||||||
|
console.log();
|
||||||
|
await serviceManager.cleanData();
|
||||||
|
logger.log('ok', 'All cleaned ✓');
|
||||||
|
} else {
|
||||||
|
logger.log('note', 'Cancelled');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleConfigureServices(serviceManager: ServiceManager) {
|
||||||
|
helpers.printHeader('Configure Services');
|
||||||
|
await serviceManager.configureServices();
|
||||||
|
}
|
||||||
|
|
||||||
|
function showHelp() {
|
||||||
|
helpers.printHeader('GitZone Services Manager');
|
||||||
|
|
||||||
|
logger.log('ok', 'Usage: gitzone services [command] [options]');
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
logger.log('note', 'Commands:');
|
||||||
|
logger.log('info', ' start [service] Start services (mongo|s3|elasticsearch|all)');
|
||||||
|
logger.log('info', ' stop [service] Stop services (mongo|s3|elasticsearch|all)');
|
||||||
|
logger.log('info', ' restart [service] Restart services (mongo|s3|elasticsearch|all)');
|
||||||
|
logger.log('info', ' status Show service status');
|
||||||
|
logger.log('info', ' config Show current configuration');
|
||||||
|
logger.log('info', ' config services Configure which services are enabled');
|
||||||
|
logger.log('info', ' compass Show MongoDB Compass connection string');
|
||||||
|
logger.log('info', ' logs [service] Show logs (mongo|s3|elasticsearch|all) [lines]');
|
||||||
|
logger.log('info', ' reconfigure Reassign ports and restart services');
|
||||||
|
logger.log('info', ' remove Remove all containers');
|
||||||
|
logger.log('info', ' clean Remove all containers and data ⚠️');
|
||||||
|
logger.log('info', ' help Show this help message');
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
logger.log('note', 'Available Services:');
|
||||||
|
logger.log('info', ' • MongoDB (mongo) - Document database');
|
||||||
|
logger.log('info', ' • MinIO (s3) - S3-compatible object storage');
|
||||||
|
logger.log('info', ' • Elasticsearch (elasticsearch) - Search and analytics engine');
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
logger.log('note', 'Features:');
|
||||||
|
logger.log('info', ' • Auto-creates .nogit/env.json with smart defaults');
|
||||||
|
logger.log('info', ' • Random ports (20000-30000) for MongoDB/MinIO to avoid conflicts');
|
||||||
|
logger.log('info', ' • Elasticsearch uses standard port 9200');
|
||||||
|
logger.log('info', ' • Project-specific containers for multi-project support');
|
||||||
|
logger.log('info', ' • Preserves custom configuration values');
|
||||||
|
logger.log('info', ' • MongoDB Compass connection support');
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
logger.log('note', 'Examples:');
|
||||||
|
logger.log('info', ' gitzone services start # Start all services');
|
||||||
|
logger.log('info', ' gitzone services start mongo # Start only MongoDB');
|
||||||
|
logger.log('info', ' gitzone services start elasticsearch # Start only Elasticsearch');
|
||||||
|
logger.log('info', ' gitzone services stop # Stop all services');
|
||||||
|
logger.log('info', ' gitzone services status # Check service status');
|
||||||
|
logger.log('info', ' gitzone services config # Show configuration');
|
||||||
|
logger.log('info', ' gitzone services compass # Get MongoDB Compass connection');
|
||||||
|
logger.log('info', ' gitzone services logs elasticsearch # Show Elasticsearch logs');
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
logger.log('note', 'Global Commands (-g/--global):');
|
||||||
|
logger.log('info', ' list -g List all registered projects');
|
||||||
|
logger.log('info', ' status -g Show status across all projects');
|
||||||
|
logger.log('info', ' stop -g Stop all containers across all projects');
|
||||||
|
logger.log('info', ' cleanup -g Remove stale registry entries');
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
logger.log('note', 'Global Examples:');
|
||||||
|
logger.log('info', ' gitzone services list -g # List all registered projects');
|
||||||
|
logger.log('info', ' gitzone services status -g # Show global container status');
|
||||||
|
logger.log('info', ' gitzone services stop -g # Stop all (prompts for confirmation)');
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==================== Global Command Handlers ====================
|
||||||
|
|
||||||
|
async function handleGlobalCommand(command: string) {
|
||||||
|
const globalRegistry = GlobalRegistry.getInstance();
|
||||||
|
|
||||||
|
switch (command) {
|
||||||
|
case 'list':
|
||||||
|
await handleGlobalList(globalRegistry);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'status':
|
||||||
|
await handleGlobalStatus(globalRegistry);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'stop':
|
||||||
|
await handleGlobalStop(globalRegistry);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'cleanup':
|
||||||
|
await handleGlobalCleanup(globalRegistry);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'help':
|
||||||
|
default:
|
||||||
|
showHelp();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleGlobalList(globalRegistry: GlobalRegistry) {
|
||||||
|
helpers.printHeader('Registered Projects (Global)');
|
||||||
|
|
||||||
|
const projects = await globalRegistry.getAllProjects();
|
||||||
|
const projectPaths = Object.keys(projects);
|
||||||
|
|
||||||
|
if (projectPaths.length === 0) {
|
||||||
|
logger.log('note', 'No projects registered');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const path of projectPaths) {
|
||||||
|
const project = projects[path];
|
||||||
|
const lastActive = new Date(project.lastActive).toLocaleString();
|
||||||
|
|
||||||
|
console.log();
|
||||||
|
logger.log('ok', `📁 ${project.projectName}`);
|
||||||
|
logger.log('info', ` Path: ${project.projectPath}`);
|
||||||
|
logger.log('info', ` Services: ${project.enabledServices.join(', ')}`);
|
||||||
|
logger.log('info', ` Last Active: ${lastActive}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleGlobalStatus(globalRegistry: GlobalRegistry) {
|
||||||
|
helpers.printHeader('Global Service Status');
|
||||||
|
|
||||||
|
const statuses = await globalRegistry.getGlobalStatus();
|
||||||
|
|
||||||
|
if (statuses.length === 0) {
|
||||||
|
logger.log('note', 'No projects registered');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let runningCount = 0;
|
||||||
|
let totalContainers = 0;
|
||||||
|
|
||||||
|
for (const project of statuses) {
|
||||||
|
console.log();
|
||||||
|
logger.log('ok', `📁 ${project.projectName}`);
|
||||||
|
logger.log('info', ` Path: ${project.projectPath}`);
|
||||||
|
|
||||||
|
if (project.containers.length === 0) {
|
||||||
|
logger.log('note', ' No containers configured');
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const container of project.containers) {
|
||||||
|
totalContainers++;
|
||||||
|
const statusIcon = container.status === 'running' ? '🟢' : container.status === 'exited' ? '🟡' : '⚪';
|
||||||
|
if (container.status === 'running') runningCount++;
|
||||||
|
logger.log('info', ` ${statusIcon} ${container.name}: ${container.status}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log();
|
||||||
|
logger.log('note', `Summary: ${runningCount}/${totalContainers} containers running across ${statuses.length} project(s)`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleGlobalStop(globalRegistry: GlobalRegistry) {
|
||||||
|
helpers.printHeader('Stop All Containers (Global)');
|
||||||
|
|
||||||
|
const statuses = await globalRegistry.getGlobalStatus();
|
||||||
|
|
||||||
|
// Count running containers
|
||||||
|
let runningCount = 0;
|
||||||
|
for (const project of statuses) {
|
||||||
|
for (const container of project.containers) {
|
||||||
|
if (container.status === 'running') runningCount++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (runningCount === 0) {
|
||||||
|
logger.log('note', 'No running containers found');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log('note', `Found ${runningCount} running container(s) across ${statuses.length} project(s)`);
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
// Show what will be stopped
|
||||||
|
for (const project of statuses) {
|
||||||
|
const runningContainers = project.containers.filter(c => c.status === 'running');
|
||||||
|
if (runningContainers.length > 0) {
|
||||||
|
logger.log('info', `${project.projectName}:`);
|
||||||
|
for (const container of runningContainers) {
|
||||||
|
logger.log('info', ` • ${container.name}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log();
|
||||||
|
const shouldContinue = await plugins.smartinteract.SmartInteract.getCliConfirmation(
|
||||||
|
'Stop all containers?',
|
||||||
|
false
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!shouldContinue) {
|
||||||
|
logger.log('note', 'Cancelled');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log('note', 'Stopping all containers...');
|
||||||
|
const result = await globalRegistry.stopAll();
|
||||||
|
|
||||||
|
if (result.stopped.length > 0) {
|
||||||
|
logger.log('ok', `Stopped: ${result.stopped.join(', ')}`);
|
||||||
|
}
|
||||||
|
if (result.failed.length > 0) {
|
||||||
|
logger.log('error', `Failed to stop: ${result.failed.join(', ')}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleGlobalCleanup(globalRegistry: GlobalRegistry) {
|
||||||
|
helpers.printHeader('Cleanup Registry (Global)');
|
||||||
|
|
||||||
|
logger.log('note', 'Checking for stale registry entries...');
|
||||||
|
const removed = await globalRegistry.cleanup();
|
||||||
|
|
||||||
|
if (removed.length === 0) {
|
||||||
|
logger.log('ok', 'No stale entries found');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log('ok', `Removed ${removed.length} stale entr${removed.length === 1 ? 'y' : 'ies'}:`);
|
||||||
|
for (const path of removed) {
|
||||||
|
logger.log('info', ` • ${path}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
9
ts/mod_services/mod.plugins.ts
Normal file
9
ts/mod_services/mod.plugins.ts
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
export * from '../plugins.js';
|
||||||
|
|
||||||
|
import * as smartshell from '@push.rocks/smartshell';
|
||||||
|
import * as smartfile from '@push.rocks/smartfile';
|
||||||
|
import * as smartinteract from '@push.rocks/smartinteract';
|
||||||
|
import * as smartnetwork from '@push.rocks/smartnetwork';
|
||||||
|
import * as smartdelay from '@push.rocks/smartdelay';
|
||||||
|
|
||||||
|
export { smartshell, smartfile, smartinteract, smartnetwork, smartdelay };
|
||||||
@@ -6,23 +6,36 @@ import * as paths from '../paths.js';
|
|||||||
|
|
||||||
import { logger } from '../gitzone.logging.js';
|
import { logger } from '../gitzone.logging.js';
|
||||||
|
|
||||||
export let run = () => {
|
export let run = async () => {
|
||||||
const done = plugins.smartpromise.defer();
|
const done = plugins.smartpromise.defer();
|
||||||
logger.log('warn', 'no action specified');
|
logger.log('warn', 'no action specified');
|
||||||
|
|
||||||
|
const dirEntries = await plugins.smartfs.directory(paths.templatesDir).list();
|
||||||
|
const templates: string[] = [];
|
||||||
|
for (const entry of dirEntries) {
|
||||||
|
try {
|
||||||
|
const stats = await plugins.smartfs
|
||||||
|
.file(plugins.path.join(paths.templatesDir, entry.path))
|
||||||
|
.stat();
|
||||||
|
if (stats.isDirectory) {
|
||||||
|
templates.push(entry.name);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Skip entries that can't be accessed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let projects = `\n`;
|
||||||
|
for (const template of templates) {
|
||||||
|
projects += ` - ${template}\n`;
|
||||||
|
}
|
||||||
|
|
||||||
logger.log(
|
logger.log(
|
||||||
'info',
|
'info',
|
||||||
`
|
`
|
||||||
You can do one of the following things:
|
You can do one of the following things:
|
||||||
* create a new project with 'gitzone template [template]'
|
* create a new project with 'gitzone template [template]'
|
||||||
the following templates exist: ${(() => {
|
the following templates exist: ${projects}
|
||||||
let projects = `\n`;
|
|
||||||
for (const template of plugins.smartfile.fs.listFoldersSync(
|
|
||||||
paths.templatesDir,
|
|
||||||
)) {
|
|
||||||
projects += ` - ${template}\n`;
|
|
||||||
}
|
|
||||||
return projects;
|
|
||||||
})()}
|
|
||||||
* format a project with 'gitzone format'
|
* format a project with 'gitzone format'
|
||||||
`,
|
`,
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ export const getTemplatePath = (templateNameArg: string) => {
|
|||||||
* receives a template name and returns wether there is a corresponding template
|
* receives a template name and returns wether there is a corresponding template
|
||||||
*/
|
*/
|
||||||
export const isTemplate = async (templateNameArg: string) => {
|
export const isTemplate = async (templateNameArg: string) => {
|
||||||
return plugins.smartfile.fs.isDirectory(getTemplatePath(templateNameArg));
|
return plugins.smartfs.directory(getTemplatePath(templateNameArg)).exists();
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getTemplate = async (templateNameArg: string) => {
|
export const getTemplate = async (templateNameArg: string) => {
|
||||||
|
|||||||
@@ -7,6 +7,15 @@ import * as smartcli from '@push.rocks/smartcli';
|
|||||||
import * as smartpath from '@push.rocks/smartpath';
|
import * as smartpath from '@push.rocks/smartpath';
|
||||||
import * as smartpromise from '@push.rocks/smartpromise';
|
import * as smartpromise from '@push.rocks/smartpromise';
|
||||||
import * as smartupdate from '@push.rocks/smartupdate';
|
import * as smartupdate from '@push.rocks/smartupdate';
|
||||||
|
import * as smartshell from '@push.rocks/smartshell';
|
||||||
|
import * as smartnetwork from '@push.rocks/smartnetwork';
|
||||||
|
import * as smartfile from '@push.rocks/smartfile';
|
||||||
|
import { SmartFs, SmartFsProviderNode } from '@push.rocks/smartfs';
|
||||||
|
import * as smartinteract from '@push.rocks/smartinteract';
|
||||||
|
import * as smartdelay from '@push.rocks/smartdelay';
|
||||||
|
|
||||||
|
// Create smartfs instance for filesystem operations
|
||||||
|
export const smartfs = new SmartFs(new SmartFsProviderNode());
|
||||||
|
|
||||||
export {
|
export {
|
||||||
smartlog,
|
smartlog,
|
||||||
@@ -18,4 +27,9 @@ export {
|
|||||||
smartpath,
|
smartpath,
|
||||||
smartpromise,
|
smartpromise,
|
||||||
smartupdate,
|
smartupdate,
|
||||||
|
smartshell,
|
||||||
|
smartnetwork,
|
||||||
|
smartfile,
|
||||||
|
smartinteract,
|
||||||
|
smartdelay,
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -10,7 +10,5 @@
|
|||||||
"baseUrl": ".",
|
"baseUrl": ".",
|
||||||
"paths": {}
|
"paths": {}
|
||||||
},
|
},
|
||||||
"exclude": [
|
"exclude": ["dist_*/**/*.d.ts"]
|
||||||
"dist_*/**/*.d.ts"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
Reference in New Issue
Block a user