Compare commits
44 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 27f2d265de | |||
| af3e15e922 | |||
| b44624f2e7 | |||
| 847e679e92 | |||
| ddf5023ecb | |||
| e1d28bc10a | |||
| 2f3d67f9e3 | |||
| 6304953234 | |||
| 8d84620bc4 | |||
| efd6f04e63 | |||
| 97ce9db28e | |||
| 362b4c106e | |||
| 3efe385952 | |||
| f6886f172d | |||
| 81d6273346 | |||
| 7e6cf5f046 | |||
| 89cf7dca04 | |||
| 9639a64437 | |||
| 48305ebb6a | |||
| 485c0a3855 | |||
| adc828d9bb | |||
| fff1d39338 | |||
| 5afbe6ccbc | |||
| 9de17a428d | |||
| c9985102c3 | |||
| 73f98c1c3f | |||
| ae93e6f146 | |||
| 2abaeee500 | |||
| 0538ba2586 | |||
| a451779724 | |||
| cd3246d659 | |||
| d37ffd7177 | |||
| a69b613087 | |||
| 1ea186d233 | |||
| f5e7d43cf3 | |||
| d80faa044a | |||
| 64062e5c43 | |||
| bd22844280 | |||
| 366c4a0bc2 | |||
| 0d3b10bd00 | |||
| a41e3d5d2c | |||
| c45cff89de | |||
| 7bb43ad478 | |||
| 8dcaf1c631 |
9
assets/templates/multienv/deno.json
Normal file
9
assets/templates/multienv/deno.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"experimentalDecorators": true,
|
||||
"lib": ["ES2022", "DOM"],
|
||||
"target": "ES2022",
|
||||
"checkJs": true
|
||||
},
|
||||
"nodeModulesDir": true
|
||||
}
|
||||
@@ -17,12 +17,10 @@ fileName: package.json
|
||||
"buildDocs": "(tsdoc)"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@git.zone/tsbuild": "^2.1.25",
|
||||
"@git.zone/tsbundle": "^2.0.5",
|
||||
"@git.zone/tsrun": "^1.2.46",
|
||||
"@git.zone/tstest": "^1.0.44",
|
||||
"@push.rocks/tapbundle": "^5.0.15",
|
||||
"@types/node": "^20.8.7"
|
||||
"@git.zone/tsbuild": "^3.1.2",
|
||||
"@git.zone/tsrun": "^2.0.0",
|
||||
"@git.zone/tstest": "^3.1.3",
|
||||
"@types/node": "^24.10.1"
|
||||
},
|
||||
"dependencies": {}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
|
||||
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||
import * as {{module.name}} from '../ts/index.js'
|
||||
|
||||
tap.test('first test', async () => {
|
||||
console.log({{module.name}})
|
||||
})
|
||||
|
||||
tap.start()
|
||||
export default tap.start()
|
||||
|
||||
@@ -17,18 +17,18 @@ fileName: package.json
|
||||
"build": "(tsbuild --web --allowimplicitany)"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@git.zone/tsbuild": "^2.1.17",
|
||||
"@git.zone/tsrun": "^1.2.8",
|
||||
"@git.zone/tstest": "^1.0.28",
|
||||
"@git.zone/tsbuild": "^3.1.2",
|
||||
"@git.zone/tsrun": "^2.0.0",
|
||||
"@git.zone/tstest": "^3.1.3",
|
||||
"@git.zone/tswatch": "^2.0.1",
|
||||
"@push.rocks/tapbundle": "^5.5.4"
|
||||
"@types/node": "^24.10.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@api.global/typedserver": "^3.0.53",
|
||||
"@push.rocks/projectinfo": "^5.0.1",
|
||||
"@push.rocks/projectinfo": "^5.0.2",
|
||||
"@push.rocks/qenv": "^6.1.0",
|
||||
"@push.rocks/smartdata": "^5.0.7",
|
||||
"@push.rocks/smartpath": "^5.0.5",
|
||||
"@push.rocks/smartpath": "^6.0.0",
|
||||
"@push.rocks/smartstate": "^2.0.0"
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"experimentalDecorators": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
"useDefineForClassFields": false,
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
|
||||
151
changelog.md
151
changelog.md
@@ -1,5 +1,156 @@
|
||||
# Changelog
|
||||
|
||||
## 2025-12-02 - 2.2.0 - feat(services)
|
||||
Improve services manager and configuration; switch test templates to @git.zone/tstest; bump dev dependencies and update docs
|
||||
|
||||
- services: Add robust ServiceConfiguration (creates .nogit/env.json with sane defaults, syncs ports from existing Docker containers, validates and can reconfigure ports)
|
||||
- services CLI: improved start/stop/restart flows, better logging/help output and enhanced global commands (list/status/stop/cleanup)
|
||||
- templates/tests: replace @push.rocks/tapbundle with @git.zone/tstest and update template test.ts to export default tap.start()
|
||||
- format: stop auto-updating tslint template and mark @push.rocks/tapbundle as deprecated in package formatting logic
|
||||
- dependencies: bump @git.zone/tsbuild, @git.zone/tsrun, @git.zone/tstest, @git.zone/tsdoc, @push.rocks/projectinfo, @push.rocks/smartpath, @push.rocks/smartfs, prettier and other dev deps
|
||||
- docs: README updates — add issue reporting/security section, AI-powered commit recommendation notes, and clarify trademark/legal wording
|
||||
|
||||
## 2025-11-29 - 2.1.0 - feat(mod_services)
|
||||
Add global service registry and global commands for managing project containers
|
||||
|
||||
- Introduce GlobalRegistry class to track registered projects, their containers, ports and last activity (ts/mod_services/classes.globalregistry.ts)
|
||||
- Add global CLI mode for services (use -g/--global) with commands: list, status, stop, cleanup (ts/mod_services/index.ts)
|
||||
- ServiceManager now registers the current project with the global registry when starting services and unregisters when all containers are removed (ts/mod_services/classes.servicemanager.ts)
|
||||
- Global handlers to list projects, show aggregated status, stop containers across projects and cleanup stale entries
|
||||
- Bump dependency @push.rocks/smartfile to ^13.1.0 in package.json
|
||||
|
||||
## 2025-11-27 - 2.0.0 - BREAKING CHANGE(core)
|
||||
Migrate filesystem to smartfs (async) and add Elasticsearch service support; refactor format/commit/meta modules
|
||||
|
||||
- Replace @push.rocks/smartfile usage with @push.rocks/smartfs across the codebase; all filesystem operations are now async (SmartFs.file(...).read()/write(), SmartFs.directory(...).list()/create()/delete(), etc.)
|
||||
- Convert formerly synchronous helpers and APIs to async (notable: detectProjectType, getProjectName, readCurrentVersion and related version bumping logic). Callers updated accordingly.
|
||||
- Add Elasticsearch support to services: new config fields (ELASTICSEARCH_*), Docker run/start/stop/logs/status handling, and ELASTICSEARCH_URL in service configuration.
|
||||
- Refactor formatting subsystem: cache and rollback/backup systems removed/disabled for stability, format planner execution simplified (sequential), diff/stats reporting updated to use smartfs.
|
||||
- Update package.json dependencies: bump @git.zone/tsbuild, tsrun, tstest; upgrade @push.rocks/smartfile to v13 and add @push.rocks/smartfs dependency; update @types/node.
|
||||
- Update commit flow and changelog generation to use smartfs for reading/writing files and to await version/branch detection where necessary.
|
||||
- Expose a SmartFs instance via plugins and adjust all mod.* plugin files to import/use smartfs where required.
|
||||
- Breaking change: Public and internal APIs that previously used synchronous smartfile APIs are now asynchronous. Consumers and scripts must await these functions and use the new smartfs API.
|
||||
|
||||
## 2025-11-17 - 1.21.5 - fix(tsconfig)
|
||||
Remove emitDecoratorMetadata from tsconfig template
|
||||
|
||||
- Removed the "emitDecoratorMetadata" compiler option from assets/templates/tsconfig_update/tsconfig.json
|
||||
- This updates the tsconfig template to avoid emitting decorator metadata when targeting ES2022
|
||||
|
||||
## 2025-11-17 - 1.21.4 - fix(tsconfig template)
|
||||
Remove experimentalDecorators and useDefineForClassFields from tsconfig template
|
||||
|
||||
- Removed experimentalDecorators option from assets/templates/tsconfig_update/tsconfig.json
|
||||
- Removed useDefineForClassFields option from assets/templates/tsconfig_update/tsconfig.json
|
||||
|
||||
## 2025-11-17 - 1.21.3 - fix(assets/templates/multienv)
|
||||
Remove unused Bun configuration template (assets/templates/multienv/bunfig.toml)
|
||||
|
||||
- Deleted assets/templates/multienv/bunfig.toml which previously provided Bun TypeScript decorator configuration
|
||||
- Cleans up stale/unused template to avoid shipping obsolete Bun config
|
||||
- No functional code changes; removes an unused asset file
|
||||
|
||||
## 2025-11-17 - 1.21.2 - fix(templates/multienv)
|
||||
Disable useDefineForClassFields in multienv TypeScript configs to ensure decorator compatibility
|
||||
|
||||
- Set useDefineForClassFields = false in assets/templates/multienv/bunfig.toml to keep Bun's transpiler compatible with decorator usage
|
||||
- Set "useDefineForClassFields": false in assets/templates/multienv/deno.json to ensure Deno/TypeScript compiler emits class fields compatible with decorators
|
||||
|
||||
## 2025-11-17 - 1.21.1 - fix(templates.multienv)
|
||||
Enable checkJs in multienv Deno template to enable JS type checking
|
||||
|
||||
- Added "checkJs": true to compilerOptions in assets/templates/multienv/deno.json to enable JavaScript type checking for the Deno multienv template
|
||||
|
||||
## 2025-11-17 - 1.21.0 - feat(multienv)
|
||||
Add multi-env templates enabling TypeScript decorators for Bun and Deno; rename npmextra config key to szci
|
||||
|
||||
- Added assets/templates/multienv/bunfig.toml to enable Bun TypeScript transpiler experimentalDecorators
|
||||
- Added assets/templates/multienv/deno.json with experimentalDecorators, lib and target set for ES2022
|
||||
- Updated npmextra.json: renamed top-level config key from "npmci" to "szci" (keeps npmGlobalTools, npmAccessLevel and npmRegistryUrl unchanged)
|
||||
|
||||
## 2025-11-06 - 1.20.0 - feat(commit)
|
||||
Add non-interactive --yes (-y) flag to commit command to auto-accept AI recommendations and optionally push with -p
|
||||
|
||||
- Add -y / --yes flag to gitzone commit to auto-accept AI-generated commit recommendations without interactive prompts
|
||||
- Support -yp or -y -p combinations to auto-accept and push to origin; -p / --push remains the separate control for pushing
|
||||
- Implementation creates a smartinteract AnswerBucket programmatically when -y is used and populates commitType, commitScope, commitDescription and pushToOrigin
|
||||
- Preserves existing UI output and interactive flow when -y is not used; fully backward compatible and CI/CD friendly
|
||||
- Updated CLI usage and documentation (readme.hints.md) to document the new flags
|
||||
|
||||
## 2025-11-05 - 1.19.9 - fix(mod_commit)
|
||||
Refactor version bumping to a unified implementation for npm and Deno; remove npm-exec based helpers and add file-based version readers/updaters to avoid npm warning pollution
|
||||
|
||||
- Removed legacy npm/deno-specific helpers (bumpNpmVersion, syncVersionToDenoJson, bumpDenoVersion) that relied on executing npm and caused warning pollution
|
||||
- Added readCurrentVersion() to read version from package.json or deno.json
|
||||
- Added updateVersionFile() helper to write version directly into JSON files
|
||||
- Added unified bumpProjectVersion() that handles npm, deno and both with a single code path; reuses calculateNewVersion()
|
||||
- Stages updated files, commits v<newVersion> and creates a tag v<newVersion>
|
||||
- Benefits: no npm warning pollution in deno.json, simpler git history, consistent behavior across project types
|
||||
|
||||
## 2025-11-04 - 1.19.8 - fix(package.json)
|
||||
Bump @git.zone/tsdoc dependency to ^1.9.2
|
||||
|
||||
- Updated dependency @git.zone/tsdoc from ^1.9.1 to ^1.9.2 in package.json
|
||||
|
||||
## 2025-11-04 - 1.19.7 - fix(dependencies)
|
||||
Bump @git.zone/tsdoc to ^1.9.1
|
||||
|
||||
- Updated package.json dependency @git.zone/tsdoc from ^1.9.0 to ^1.9.1
|
||||
|
||||
## 2025-11-04 - 1.19.6 - fix(cli)
|
||||
Bump @git.zone/tsdoc dependency to ^1.9.0
|
||||
|
||||
- Updated dependency @git.zone/tsdoc from ^1.8.3 to ^1.9.0 in package.json
|
||||
|
||||
## 2025-11-04 - 1.19.5 - fix(cli)
|
||||
Bump @git.zone/tsdoc to ^1.8.3 and add local .claude settings for allowed permissions
|
||||
|
||||
- Updated dependency @git.zone/tsdoc from ^1.8.2 to ^1.8.3
|
||||
- Added .claude/settings.local.json to declare allowed permissions for local tooling (Bash commands, Docker, npm, WebFetch and MCP actions)
|
||||
|
||||
## 2025-11-03 - 1.19.3 - fix(tsdoc)
|
||||
Bump @git.zone/tsdoc to ^1.8.0 and add .claude local settings
|
||||
|
||||
- Upgrade dependency @git.zone/tsdoc from ^1.6.1 to ^1.8.0 in package.json
|
||||
- Add .claude/settings.local.json for local assistant permissions/configuration
|
||||
|
||||
## 2025-11-03 - 1.19.2 - fix(tsdoc)
|
||||
Bump @git.zone/tsdoc to ^1.6.1 and add .claude/settings.local.json
|
||||
|
||||
- Update dependency @git.zone/tsdoc from ^1.6.0 to ^1.6.1
|
||||
- Add .claude/settings.local.json to include local Claude settings/permissions
|
||||
|
||||
## 2025-11-02 - 1.19.1 - fix(dependencies)
|
||||
Bump dependencies and add local Claude settings
|
||||
|
||||
- Bump devDependencies: @git.zone/tsbuild -> ^2.7.1, @git.zone/tsrun -> ^1.6.2, @git.zone/tstest -> ^2.7.0
|
||||
- Upgrade runtime dependencies: @git.zone/tsdoc -> ^1.6.0; update @push.rocks packages (smartcli ^4.0.19, smartjson ^5.2.0, smartlog ^3.1.10, smartnetwork ^4.4.0, etc.)
|
||||
- Add .claude/settings.local.json (local project permissions/settings file)
|
||||
|
||||
## 2025-10-23 - 1.19.0 - feat(mod_commit)
|
||||
Add CLI UI helpers and improve commit workflow with progress, recommendations and summary
|
||||
|
||||
- Introduce ts/mod_commit/mod.ui.ts: reusable CLI UI helpers (pretty headers, sections, AI recommendation box, step printer, commit summary and helpers for consistent messaging).
|
||||
- Refactor ts/mod_commit/index.ts: use new UI functions to display AI recommendations, show step-by-step progress for baking commit info, generating changelog, staging, committing, bumping version and optional push; include commit SHA in final summary.
|
||||
- Enhance ts/mod_commit/mod.helpers.ts: bumpProjectVersion now accepts currentStep/totalSteps to report progress and returns a consistent newVersion after handling npm/deno/both cases.
|
||||
- Add .claude/settings.local.json: local permissions configuration for development tooling.
|
||||
|
||||
## 2025-10-23 - 1.18.9 - fix(mod_commit)
|
||||
Stage and commit deno.json when bumping/syncing versions and create/update git tags
|
||||
|
||||
- bumpDenoVersion now creates a Smartshell instance and runs git add deno.json, git commit -m "v<newVersion>", and git tag v<newVersion> to persist the version bump
|
||||
- syncVersionToDenoJson now stages deno.json, amends the npm version commit with --no-edit, and recreates the tag with -fa to keep package.json and deno.json in sync
|
||||
- Added informative logger messages after creating commits and tags
|
||||
|
||||
## 2025-10-23 - 1.18.8 - fix(mod_commit)
|
||||
Improve commit workflow: detect project type and current branch; add robust version bump helpers for npm/deno
|
||||
|
||||
- Add mod_commit/mod.helpers.ts with utilities: detectCurrentBranch(), detectProjectType(), bumpProjectVersion(), bumpDenoVersion(), bumpNpmVersion(), syncVersionToDenoJson(), and calculateNewVersion()
|
||||
- Refactor ts/mod_commit/index.ts to use the new helpers: bumpProjectVersion(projectType, ... ) instead of a hard npm version call and push the actual current branch instead of hardcoding 'master'
|
||||
- Support bumping versions for npm-only, deno-only, and hybrid (both) projects and synchronize versions from package.json to deno.json when applicable
|
||||
- Improve branch detection with a fallback to 'master' and informative logging on detection failures
|
||||
- Add local Claude settings file (.claude/settings.local.json) (editor/CI config) — no code behavior change but included in diff
|
||||
|
||||
## 2025-09-07 - 1.18.7 - fix(claude)
|
||||
Add .claude local settings to whitelist dev tool permissions
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"npmci": {
|
||||
"szci": {
|
||||
"npmGlobalTools": [],
|
||||
"npmAccessLevel": "private",
|
||||
"npmRegistryUrl": "verdaccio.lossless.one"
|
||||
|
||||
26
package.json
26
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@git.zone/cli",
|
||||
"private": false,
|
||||
"version": "1.18.7",
|
||||
"version": "2.2.0",
|
||||
"description": "A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.",
|
||||
"main": "dist_ts/index.ts",
|
||||
"typings": "dist_ts/index.d.ts",
|
||||
@@ -57,18 +57,17 @@
|
||||
},
|
||||
"homepage": "https://gitlab.com/gitzone/private/gitzone#readme",
|
||||
"devDependencies": {
|
||||
"@git.zone/tsbuild": "^2.6.8",
|
||||
"@git.zone/tsrun": "^1.3.3",
|
||||
"@git.zone/tstest": "^2.3.6",
|
||||
"@git.zone/tsbuild": "^3.1.2",
|
||||
"@git.zone/tsrun": "^2.0.0",
|
||||
"@git.zone/tstest": "^3.1.3",
|
||||
"@push.rocks/smartdelay": "^3.0.5",
|
||||
"@push.rocks/smartfile": "^11.2.7",
|
||||
"@push.rocks/smartinteract": "^2.0.16",
|
||||
"@push.rocks/smartnetwork": "^4.1.2",
|
||||
"@push.rocks/smartnetwork": "^4.4.0",
|
||||
"@push.rocks/smartshell": "^3.3.0",
|
||||
"@types/node": "^22.15.18"
|
||||
"@types/node": "^24.10.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@git.zone/tsdoc": "^1.5.2",
|
||||
"@git.zone/tsdoc": "^1.10.0",
|
||||
"@git.zone/tspublish": "^1.10.3",
|
||||
"@push.rocks/commitinfo": "^1.0.12",
|
||||
"@push.rocks/early": "^4.0.4",
|
||||
@@ -76,13 +75,14 @@
|
||||
"@push.rocks/lik": "^6.2.2",
|
||||
"@push.rocks/npmextra": "^5.3.3",
|
||||
"@push.rocks/projectinfo": "^5.0.2",
|
||||
"@push.rocks/smartchok": "^1.1.1",
|
||||
"@push.rocks/smartcli": "^4.0.11",
|
||||
"@push.rocks/smartcli": "^4.0.19",
|
||||
"@push.rocks/smartdiff": "^1.0.3",
|
||||
"@push.rocks/smartfile": "^13.1.0",
|
||||
"@push.rocks/smartfs": "^1.2.0",
|
||||
"@push.rocks/smartgulp": "^3.0.4",
|
||||
"@push.rocks/smartjson": "^5.0.20",
|
||||
"@push.rocks/smartjson": "^5.2.0",
|
||||
"@push.rocks/smartlegal": "^1.0.27",
|
||||
"@push.rocks/smartlog": "^3.1.9",
|
||||
"@push.rocks/smartlog": "^3.1.10",
|
||||
"@push.rocks/smartlog-destination-local": "^9.0.2",
|
||||
"@push.rocks/smartmustache": "^3.0.2",
|
||||
"@push.rocks/smartnpm": "^2.0.6",
|
||||
@@ -95,7 +95,7 @@
|
||||
"@push.rocks/smartunique": "^3.0.9",
|
||||
"@push.rocks/smartupdate": "^2.0.6",
|
||||
"@types/through2": "^2.0.41",
|
||||
"prettier": "^3.6.2",
|
||||
"prettier": "^3.7.3",
|
||||
"through2": "^4.0.2"
|
||||
},
|
||||
"files": [
|
||||
|
||||
4002
pnpm-lock.yaml
generated
4002
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
113
readme.hints.md
113
readme.hints.md
@@ -89,6 +89,41 @@ The format module is responsible for project standardization:
|
||||
5. **Performance Optimizations**: Parallel execution and caching
|
||||
6. **Reporting**: Diff views, statistics, verbose logging
|
||||
7. **Architecture**: Clean separation of concerns with new classes
|
||||
8. **Unified Version Bumping**: Self-managed version updates eliminating npm warning pollution in deno.json
|
||||
|
||||
### Version Bumping Refactor (Latest)
|
||||
|
||||
The commit module's version bumping has been refactored to eliminate npm command dependencies:
|
||||
|
||||
**Changes:**
|
||||
- Removed `bumpNpmVersion()` - was causing npm warnings to pollute deno.json
|
||||
- Removed `syncVersionToDenoJson()` - no longer needed with unified approach
|
||||
- Removed separate `bumpDenoVersion()` - replaced by unified implementation
|
||||
- Added `readCurrentVersion()` helper - reads from either package.json or deno.json
|
||||
- Added `updateVersionFile()` helper - updates JSON files directly
|
||||
- Unified `bumpProjectVersion()` - handles npm/deno/both with single clean code path
|
||||
|
||||
**Benefits:**
|
||||
- No npm warning pollution in version fields
|
||||
- Full control over version bumping process
|
||||
- Simpler git history (no amending, no force-tagging)
|
||||
- Same code path for all project types
|
||||
- Reuses existing `calculateNewVersion()` function
|
||||
|
||||
### Auto-Accept Flag for Commits
|
||||
|
||||
The commit module now supports `-y/--yes` flag for non-interactive commits:
|
||||
|
||||
**Usage:**
|
||||
- `gitzone commit -y` - Auto-accepts AI recommendations without prompts
|
||||
- `gitzone commit -yp` - Auto-accepts and pushes to origin
|
||||
- Separate `-p/--push` flag controls push behavior
|
||||
|
||||
**Implementation:**
|
||||
- Creates AnswerBucket programmatically when `-y` flag detected
|
||||
- Preserves all UI output for transparency
|
||||
- Fully backward compatible with interactive mode
|
||||
- CI/CD friendly for automated workflows
|
||||
|
||||
## Development Tips
|
||||
|
||||
@@ -137,6 +172,27 @@ The format module is responsible for project standardization:
|
||||
|
||||
## CLI Usage
|
||||
|
||||
### Commit Commands
|
||||
|
||||
```bash
|
||||
# Interactive commit (default)
|
||||
gitzone commit
|
||||
|
||||
# Auto-accept AI recommendations (no prompts)
|
||||
gitzone commit -y
|
||||
gitzone commit --yes
|
||||
|
||||
# Auto-accept and push to origin
|
||||
gitzone commit -yp
|
||||
gitzone commit -y -p
|
||||
gitzone commit --yes --push
|
||||
|
||||
# Run format before commit
|
||||
gitzone commit --format
|
||||
```
|
||||
|
||||
### Format Commands
|
||||
|
||||
```bash
|
||||
# Basic format
|
||||
gitzone format
|
||||
@@ -187,7 +243,60 @@ gitzone format --clean-backups
|
||||
|
||||
## API Changes
|
||||
|
||||
- smartfile API updated to use fs._ and memory._ namespaces
|
||||
### Smartfile v13 Migration (Latest - Completed)
|
||||
|
||||
The project has been fully migrated from @push.rocks/smartfile v11 to v13, which introduced a major breaking change where filesystem operations were split into two separate packages:
|
||||
|
||||
**Packages:**
|
||||
- `@push.rocks/smartfile` v13.0.1 - File representation classes (SmartFile, StreamFile, VirtualDirectory)
|
||||
- `@push.rocks/smartfs` v1.1.0 - Filesystem operations (read, write, exists, stat, etc.)
|
||||
|
||||
**Key API Changes:**
|
||||
1. **File Reading**:
|
||||
- Old: `plugins.smartfile.fs.toStringSync(path)` or `plugins.smartfile.fs.toObjectSync(path)`
|
||||
- New: `await plugins.smartfs.file(path).encoding('utf8').read()` + JSON.parse if needed
|
||||
- Important: `read()` returns `string | Buffer` - use `as string` type assertion when encoding is set
|
||||
|
||||
2. **File Writing**:
|
||||
- Old: `plugins.smartfile.memory.toFs(content, path)` or `plugins.smartfile.memory.toFsSync(content, path)`
|
||||
- New: `await plugins.smartfs.file(path).encoding('utf8').write(content)`
|
||||
|
||||
3. **File Existence**:
|
||||
- Old: `plugins.smartfile.fs.fileExists(path)` or `plugins.smartfile.fs.fileExistsSync(path)`
|
||||
- New: `await plugins.smartfs.file(path).exists()`
|
||||
|
||||
4. **Directory Operations**:
|
||||
- Old: `plugins.smartfile.fs.ensureDir(path)`
|
||||
- New: `await plugins.smartfs.directory(path).recursive().create()`
|
||||
- Old: `plugins.smartfile.fs.remove(path)`
|
||||
- New: `await plugins.smartfs.directory(path).recursive().delete()` or `await plugins.smartfs.file(path).delete()`
|
||||
|
||||
5. **Directory Listing**:
|
||||
- Old: `plugins.smartfile.fs.listFolders(path)` or `plugins.smartfile.fs.listFoldersSync(path)`
|
||||
- New: `await plugins.smartfs.directory(path).list()` then filter by `stats.isDirectory`
|
||||
- Note: `list()` returns `IDirectoryEntry[]` with `path` and `name` properties - use `stat()` to check if directory
|
||||
|
||||
6. **File Stats**:
|
||||
- Old: `stats.isDirectory()` (method)
|
||||
- New: `stats.isDirectory` (boolean property)
|
||||
- Old: `stats.mtimeMs`
|
||||
- New: `stats.mtime.getTime()`
|
||||
|
||||
7. **SmartFile Factory**:
|
||||
- Old: Direct SmartFile instantiation
|
||||
- New: `plugins.smartfile.SmartFileFactory.nodeFs()` then factory methods
|
||||
|
||||
**Migration Pattern:**
|
||||
All sync methods must become async. Functions that were previously synchronous (like `getProjectName()`) now return `Promise<T>` and must be awaited.
|
||||
|
||||
**Affected Modules:**
|
||||
- ts/mod_format/* (largest area - 15+ files)
|
||||
- ts/mod_commit/* (version bumping)
|
||||
- ts/mod_services/* (configuration management)
|
||||
- ts/mod_meta/* (meta repository management)
|
||||
- ts/mod_standard/* (template listing)
|
||||
- ts/mod_template/* (template operations)
|
||||
|
||||
**Previous API Changes:**
|
||||
- smartnpm requires instance creation: `new NpmRegistry()`
|
||||
- All file operations now use updated APIs
|
||||
- Type imports use `import type` for proper verbatim module syntax
|
||||
|
||||
43
readme.md
43
readme.md
@@ -9,6 +9,10 @@
|
||||
|
||||
gitzone is a powerful command-line interface that supercharges your development workflow with automated project management, intelligent code formatting, seamless version control, and development service orchestration. Whether you're bootstrapping a new TypeScript project, maintaining code quality, managing complex multi-repository setups, or spinning up local development databases, gitzone has got you covered.
|
||||
|
||||
## Issue Reporting and Security
|
||||
|
||||
For reporting bugs, issues, or security vulnerabilities, please visit [community.foss.global/](https://community.foss.global/). This is the central community hub for all issue reporting. Developers who sign and comply with our contribution agreement and go through identification can also get a [code.foss.global/](https://code.foss.global/) account to submit Pull Requests directly.
|
||||
|
||||
## 🏃♂️ Quick Start
|
||||
|
||||
### Installation
|
||||
@@ -35,13 +39,13 @@ gitzone format
|
||||
# Start local MongoDB and MinIO services
|
||||
gitzone services start
|
||||
|
||||
# Create a semantic commit
|
||||
# Create a semantic commit with AI-powered suggestions
|
||||
gitzone commit
|
||||
```
|
||||
|
||||
## 🛠️ Core Features
|
||||
|
||||
### 🐳 Development Services Management (NEW!)
|
||||
### 🐳 Development Services Management
|
||||
|
||||
Effortlessly manage local MongoDB and MinIO (S3-compatible) services for your development environment:
|
||||
|
||||
@@ -110,7 +114,7 @@ gitzone template [template-name]
|
||||
Each template comes pre-configured with:
|
||||
|
||||
- ✅ TypeScript with modern configurations
|
||||
- ✅ Automated testing setup
|
||||
- ✅ Automated testing setup with `@git.zone/tstest`
|
||||
- ✅ CI/CD pipelines (GitLab/GitHub)
|
||||
- ✅ Code formatting and linting
|
||||
- ✅ Documentation structure
|
||||
@@ -170,18 +174,27 @@ gitzone format --clean-backups
|
||||
- **Gitignore** - Repository ignore rules
|
||||
- **Templates** - Project template updates
|
||||
- **Npmextra** - Extended npm configurations
|
||||
- **Cleanup** - Removes obsolete files (yarn.lock, package-lock.json, tslint.json, etc.)
|
||||
|
||||
### 🔀 Semantic Commits & Versioning
|
||||
|
||||
Create standardized commits that automatically handle versioning:
|
||||
Create standardized commits with AI-powered suggestions that automatically handle versioning:
|
||||
|
||||
```bash
|
||||
# Interactive commit with AI recommendations
|
||||
gitzone commit
|
||||
|
||||
# Auto-accept AI recommendations
|
||||
gitzone commit -y
|
||||
|
||||
# Auto-accept and push
|
||||
gitzone commit -y -p
|
||||
```
|
||||
|
||||
Features:
|
||||
|
||||
- 📝 Interactive commit message builder
|
||||
- 🤖 **AI-powered analysis** - Analyzes your changes and suggests commit type, scope, and message
|
||||
- 📝 Interactive commit message builder with smart defaults
|
||||
- 🏷️ Automatic version bumping (major/minor/patch)
|
||||
- 📜 Changelog generation
|
||||
- 🚀 Optional auto-push to origin
|
||||
@@ -189,11 +202,10 @@ Features:
|
||||
|
||||
The commit wizard guides you through:
|
||||
|
||||
1. **Type selection** (feat/fix/docs/style/refactor/perf/test/chore)
|
||||
1. **Type selection** (fix/feat/BREAKING CHANGE) with AI recommendation
|
||||
2. **Scope definition** (component/module affected)
|
||||
3. **Description crafting**
|
||||
4. **Breaking change detection**
|
||||
5. **Version bump determination**
|
||||
4. **Version bump determination**
|
||||
|
||||
### 🏗️ Meta Repository Management
|
||||
|
||||
@@ -443,7 +455,6 @@ gitzone services clean # ⚠️ Warning: deletes data
|
||||
|
||||
- **TypeScript** - First-class support
|
||||
- **Prettier** - Code formatting
|
||||
- **ESLint** - Linting (via format modules)
|
||||
- **npm/pnpm** - Package management
|
||||
- **MongoDB** - Local database service
|
||||
- **MinIO** - S3-compatible object storage
|
||||
@@ -522,19 +533,21 @@ gitzone is optimized for speed:
|
||||
|
||||
## License and Legal Information
|
||||
|
||||
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
||||
This repository contains open-source code licensed under the MIT License. A copy of the license can be found in the [LICENSE](./LICENSE) file.
|
||||
|
||||
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||
|
||||
### Trademarks
|
||||
|
||||
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
|
||||
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH or third parties, and are not included within the scope of the MIT license granted herein.
|
||||
|
||||
Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines or the guidelines of the respective third-party owners, and any usage must be approved in writing. Third-party trademarks used herein are the property of their respective owners and used only in a descriptive manner, e.g. for an implementation of an API or similar.
|
||||
|
||||
### Company Information
|
||||
|
||||
Task Venture Capital GmbH
|
||||
Registered at District court Bremen HRB 35230 HB, Germany
|
||||
Task Venture Capital GmbH
|
||||
Registered at District Court Bremen HRB 35230 HB, Germany
|
||||
|
||||
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
||||
For any legal inquiries or further information, please contact us via email at hello@task.vc.
|
||||
|
||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||
|
||||
@@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@git.zone/cli',
|
||||
version: '1.18.7',
|
||||
version: '2.2.0',
|
||||
description: 'A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.'
|
||||
}
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import * as paths from '../paths.js';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
import * as helpers from './mod.helpers.js';
|
||||
import * as ui from './mod.ui.js';
|
||||
|
||||
export const run = async (argvArg: any) => {
|
||||
if (argvArg.format) {
|
||||
@@ -10,7 +12,8 @@ export const run = async (argvArg: any) => {
|
||||
await formatMod.run();
|
||||
}
|
||||
|
||||
logger.log('info', `gathering facts...`);
|
||||
ui.printHeader('🔍 Analyzing repository changes...');
|
||||
|
||||
const aidoc = new plugins.tsdoc.AiDoc();
|
||||
await aidoc.start();
|
||||
|
||||
@@ -18,45 +21,69 @@ export const run = async (argvArg: any) => {
|
||||
|
||||
await aidoc.stop();
|
||||
|
||||
logger.log(
|
||||
'info',
|
||||
`---------
|
||||
Next recommended commit would be:
|
||||
===========
|
||||
-> ${nextCommitObject.recommendedNextVersion}:
|
||||
-> ${nextCommitObject.recommendedNextVersionLevel}(${nextCommitObject.recommendedNextVersionScope}): ${nextCommitObject.recommendedNextVersionMessage}
|
||||
===========
|
||||
`,
|
||||
);
|
||||
const commitInteract = new plugins.smartinteract.SmartInteract();
|
||||
commitInteract.addQuestions([
|
||||
{
|
||||
type: 'list',
|
||||
name: `commitType`,
|
||||
message: `Choose TYPE of the commit:`,
|
||||
choices: [`fix`, `feat`, `BREAKING CHANGE`],
|
||||
default: nextCommitObject.recommendedNextVersionLevel,
|
||||
},
|
||||
{
|
||||
type: 'input',
|
||||
name: `commitScope`,
|
||||
message: `What is the SCOPE of the commit:`,
|
||||
default: nextCommitObject.recommendedNextVersionScope,
|
||||
},
|
||||
{
|
||||
type: `input`,
|
||||
name: `commitDescription`,
|
||||
message: `What is the DESCRIPTION of the commit?`,
|
||||
default: nextCommitObject.recommendedNextVersionMessage,
|
||||
},
|
||||
{
|
||||
type: 'confirm',
|
||||
name: `pushToOrigin`,
|
||||
message: `Do you want to push this version now?`,
|
||||
default: true,
|
||||
},
|
||||
]);
|
||||
const answerBucket = await commitInteract.runQueue();
|
||||
ui.printRecommendation({
|
||||
recommendedNextVersion: nextCommitObject.recommendedNextVersion,
|
||||
recommendedNextVersionLevel: nextCommitObject.recommendedNextVersionLevel,
|
||||
recommendedNextVersionScope: nextCommitObject.recommendedNextVersionScope,
|
||||
recommendedNextVersionMessage: nextCommitObject.recommendedNextVersionMessage,
|
||||
});
|
||||
|
||||
let answerBucket: plugins.smartinteract.AnswerBucket;
|
||||
|
||||
// Check if -y or --yes flag is set to auto-accept recommendations
|
||||
if (argvArg.y || argvArg.yes) {
|
||||
// Auto-mode: create AnswerBucket programmatically
|
||||
logger.log('info', '✓ Auto-accepting AI recommendations (--yes flag)');
|
||||
|
||||
answerBucket = new plugins.smartinteract.AnswerBucket();
|
||||
answerBucket.addAnswer({
|
||||
name: 'commitType',
|
||||
value: nextCommitObject.recommendedNextVersionLevel,
|
||||
});
|
||||
answerBucket.addAnswer({
|
||||
name: 'commitScope',
|
||||
value: nextCommitObject.recommendedNextVersionScope,
|
||||
});
|
||||
answerBucket.addAnswer({
|
||||
name: 'commitDescription',
|
||||
value: nextCommitObject.recommendedNextVersionMessage,
|
||||
});
|
||||
answerBucket.addAnswer({
|
||||
name: 'pushToOrigin',
|
||||
value: !!(argvArg.p || argvArg.push), // Only push if -p flag also provided
|
||||
});
|
||||
} else {
|
||||
// Interactive mode: prompt user for input
|
||||
const commitInteract = new plugins.smartinteract.SmartInteract();
|
||||
commitInteract.addQuestions([
|
||||
{
|
||||
type: 'list',
|
||||
name: `commitType`,
|
||||
message: `Choose TYPE of the commit:`,
|
||||
choices: [`fix`, `feat`, `BREAKING CHANGE`],
|
||||
default: nextCommitObject.recommendedNextVersionLevel,
|
||||
},
|
||||
{
|
||||
type: 'input',
|
||||
name: `commitScope`,
|
||||
message: `What is the SCOPE of the commit:`,
|
||||
default: nextCommitObject.recommendedNextVersionScope,
|
||||
},
|
||||
{
|
||||
type: `input`,
|
||||
name: `commitDescription`,
|
||||
message: `What is the DESCRIPTION of the commit?`,
|
||||
default: nextCommitObject.recommendedNextVersionMessage,
|
||||
},
|
||||
{
|
||||
type: 'confirm',
|
||||
name: `pushToOrigin`,
|
||||
message: `Do you want to push this version now?`,
|
||||
default: true,
|
||||
},
|
||||
]);
|
||||
answerBucket = await commitInteract.runQueue();
|
||||
}
|
||||
const commitString = createCommitStringFromAnswerBucket(answerBucket);
|
||||
const commitVersionType = (() => {
|
||||
switch (answerBucket.getAnswerFor('commitType')) {
|
||||
@@ -69,20 +96,30 @@ export const run = async (argvArg: any) => {
|
||||
}
|
||||
})();
|
||||
|
||||
logger.log('info', `OK! Creating commit with message '${commitString}'`);
|
||||
ui.printHeader('✨ Creating Semantic Commit');
|
||||
ui.printCommitMessage(commitString);
|
||||
const smartshellInstance = new plugins.smartshell.Smartshell({
|
||||
executor: 'bash',
|
||||
sourceFilePaths: [],
|
||||
});
|
||||
|
||||
logger.log('info', `Baking commitinfo into code ...`);
|
||||
// Determine total steps (6 if pushing, 5 if not)
|
||||
const totalSteps = answerBucket.getAnswerFor('pushToOrigin') && !(process.env.CI === 'true') ? 6 : 5;
|
||||
let currentStep = 0;
|
||||
|
||||
// Step 1: Baking commitinfo
|
||||
currentStep++;
|
||||
ui.printStep(currentStep, totalSteps, '🔧 Baking commit info into code', 'in-progress');
|
||||
const commitInfo = new plugins.commitinfo.CommitInfo(
|
||||
paths.cwd,
|
||||
commitVersionType,
|
||||
);
|
||||
await commitInfo.writeIntoPotentialDirs();
|
||||
ui.printStep(currentStep, totalSteps, '🔧 Baking commit info into code', 'done');
|
||||
|
||||
logger.log('info', `Writing changelog.md ...`);
|
||||
// Step 2: Writing changelog
|
||||
currentStep++;
|
||||
ui.printStep(currentStep, totalSteps, '📄 Generating changelog.md', 'in-progress');
|
||||
let changelog = nextCommitObject.changelog;
|
||||
changelog = changelog.replaceAll(
|
||||
'{{nextVersion}}',
|
||||
@@ -105,21 +142,58 @@ export const run = async (argvArg: any) => {
|
||||
changelog = changelog.replaceAll('\n{{nextVersionDetails}}', '');
|
||||
}
|
||||
|
||||
await plugins.smartfile.memory.toFs(
|
||||
changelog,
|
||||
plugins.path.join(paths.cwd, `changelog.md`),
|
||||
);
|
||||
await plugins.smartfs
|
||||
.file(plugins.path.join(paths.cwd, `changelog.md`))
|
||||
.encoding('utf8')
|
||||
.write(changelog);
|
||||
ui.printStep(currentStep, totalSteps, '📄 Generating changelog.md', 'done');
|
||||
|
||||
logger.log('info', `Staging files for commit:`);
|
||||
// Step 3: Staging files
|
||||
currentStep++;
|
||||
ui.printStep(currentStep, totalSteps, '📦 Staging files', 'in-progress');
|
||||
await smartshellInstance.exec(`git add -A`);
|
||||
ui.printStep(currentStep, totalSteps, '📦 Staging files', 'done');
|
||||
|
||||
// Step 4: Creating commit
|
||||
currentStep++;
|
||||
ui.printStep(currentStep, totalSteps, '💾 Creating git commit', 'in-progress');
|
||||
await smartshellInstance.exec(`git commit -m "${commitString}"`);
|
||||
await smartshellInstance.exec(`npm version ${commitVersionType}`);
|
||||
ui.printStep(currentStep, totalSteps, '💾 Creating git commit', 'done');
|
||||
|
||||
// Step 5: Bumping version
|
||||
currentStep++;
|
||||
const projectType = await helpers.detectProjectType();
|
||||
const newVersion = await helpers.bumpProjectVersion(projectType, commitVersionType, currentStep, totalSteps);
|
||||
|
||||
// Step 6: Push to remote (optional)
|
||||
const currentBranch = await helpers.detectCurrentBranch();
|
||||
if (
|
||||
answerBucket.getAnswerFor('pushToOrigin') &&
|
||||
!(process.env.CI === 'true')
|
||||
) {
|
||||
await smartshellInstance.exec(`git push origin master --follow-tags`);
|
||||
currentStep++;
|
||||
ui.printStep(currentStep, totalSteps, `🚀 Pushing to origin/${currentBranch}`, 'in-progress');
|
||||
await smartshellInstance.exec(`git push origin ${currentBranch} --follow-tags`);
|
||||
ui.printStep(currentStep, totalSteps, `🚀 Pushing to origin/${currentBranch}`, 'done');
|
||||
}
|
||||
|
||||
console.log(''); // Add spacing before summary
|
||||
|
||||
// Get commit SHA for summary
|
||||
const commitShaResult = await smartshellInstance.exec('git rev-parse --short HEAD');
|
||||
const commitSha = commitShaResult.stdout.trim();
|
||||
|
||||
// Print final summary
|
||||
ui.printSummary({
|
||||
projectType,
|
||||
branch: currentBranch,
|
||||
commitType: answerBucket.getAnswerFor('commitType'),
|
||||
commitScope: answerBucket.getAnswerFor('commitScope'),
|
||||
commitMessage: answerBucket.getAnswerFor('commitDescription'),
|
||||
newVersion: newVersion,
|
||||
commitSha: commitSha,
|
||||
pushed: answerBucket.getAnswerFor('pushToOrigin') && !(process.env.CI === 'true'),
|
||||
});
|
||||
};
|
||||
|
||||
const createCommitStringFromAnswerBucket = (
|
||||
|
||||
218
ts/mod_commit/mod.helpers.ts
Normal file
218
ts/mod_commit/mod.helpers.ts
Normal file
@@ -0,0 +1,218 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import * as paths from '../paths.js';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
import * as ui from './mod.ui.js';
|
||||
|
||||
export type ProjectType = 'npm' | 'deno' | 'both' | 'none';
|
||||
export type VersionType = 'patch' | 'minor' | 'major';
|
||||
|
||||
/**
|
||||
* Detects the current git branch
|
||||
* @returns The current branch name, defaults to 'master' if detection fails
|
||||
*/
|
||||
export async function detectCurrentBranch(): Promise<string> {
|
||||
try {
|
||||
const smartshellInstance = new plugins.smartshell.Smartshell({
|
||||
executor: 'bash',
|
||||
sourceFilePaths: [],
|
||||
});
|
||||
const result = await smartshellInstance.exec('git branch --show-current');
|
||||
const branchName = result.stdout.trim();
|
||||
|
||||
if (!branchName) {
|
||||
logger.log('warn', 'Could not detect current branch, falling back to "master"');
|
||||
return 'master';
|
||||
}
|
||||
|
||||
logger.log('info', `Detected current branch: ${branchName}`);
|
||||
return branchName;
|
||||
} catch (error) {
|
||||
logger.log('warn', `Failed to detect branch: ${error.message}, falling back to "master"`);
|
||||
return 'master';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects the project type based on presence of package.json and/or deno.json
|
||||
* @returns The project type
|
||||
*/
|
||||
export async function detectProjectType(): Promise<ProjectType> {
|
||||
const packageJsonPath = plugins.path.join(paths.cwd, 'package.json');
|
||||
const denoJsonPath = plugins.path.join(paths.cwd, 'deno.json');
|
||||
|
||||
const hasPackageJson = await plugins.smartfs.file(packageJsonPath).exists();
|
||||
const hasDenoJson = await plugins.smartfs.file(denoJsonPath).exists();
|
||||
|
||||
if (hasPackageJson && hasDenoJson) {
|
||||
logger.log('info', 'Detected dual project (npm + deno)');
|
||||
return 'both';
|
||||
} else if (hasPackageJson) {
|
||||
logger.log('info', 'Detected npm project');
|
||||
return 'npm';
|
||||
} else if (hasDenoJson) {
|
||||
logger.log('info', 'Detected deno project');
|
||||
return 'deno';
|
||||
} else {
|
||||
throw new Error('No package.json or deno.json found in current directory');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a semantic version string and bumps it according to the version type
|
||||
* @param currentVersion Current version string (e.g., "1.2.3")
|
||||
* @param versionType Type of version bump
|
||||
* @returns New version string
|
||||
*/
|
||||
function calculateNewVersion(currentVersion: string, versionType: VersionType): string {
|
||||
const versionMatch = currentVersion.match(/^(\d+)\.(\d+)\.(\d+)/);
|
||||
|
||||
if (!versionMatch) {
|
||||
throw new Error(`Invalid version format: ${currentVersion}`);
|
||||
}
|
||||
|
||||
let [, major, minor, patch] = versionMatch.map(Number);
|
||||
|
||||
switch (versionType) {
|
||||
case 'major':
|
||||
major += 1;
|
||||
minor = 0;
|
||||
patch = 0;
|
||||
break;
|
||||
case 'minor':
|
||||
minor += 1;
|
||||
patch = 0;
|
||||
break;
|
||||
case 'patch':
|
||||
patch += 1;
|
||||
break;
|
||||
}
|
||||
|
||||
return `${major}.${minor}.${patch}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reads the current version from package.json or deno.json
|
||||
* @param projectType The project type to determine which file to read
|
||||
* @returns The current version string
|
||||
*/
|
||||
async function readCurrentVersion(projectType: ProjectType): Promise<string> {
|
||||
if (projectType === 'npm' || projectType === 'both') {
|
||||
const packageJsonPath = plugins.path.join(paths.cwd, 'package.json');
|
||||
const content = (await plugins.smartfs
|
||||
.file(packageJsonPath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const packageJson = JSON.parse(content) as { version?: string };
|
||||
|
||||
if (!packageJson.version) {
|
||||
throw new Error('package.json does not contain a version field');
|
||||
}
|
||||
return packageJson.version;
|
||||
} else {
|
||||
const denoJsonPath = plugins.path.join(paths.cwd, 'deno.json');
|
||||
const content = (await plugins.smartfs
|
||||
.file(denoJsonPath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const denoConfig = JSON.parse(content) as { version?: string };
|
||||
|
||||
if (!denoConfig.version) {
|
||||
throw new Error('deno.json does not contain a version field');
|
||||
}
|
||||
return denoConfig.version;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the version field in a JSON file (package.json or deno.json)
|
||||
* @param filePath Path to the JSON file
|
||||
* @param newVersion The new version to write
|
||||
*/
|
||||
async function updateVersionFile(filePath: string, newVersion: string): Promise<void> {
|
||||
const content = (await plugins.smartfs
|
||||
.file(filePath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const config = JSON.parse(content) as { version?: string };
|
||||
config.version = newVersion;
|
||||
await plugins.smartfs
|
||||
.file(filePath)
|
||||
.encoding('utf8')
|
||||
.write(JSON.stringify(config, null, 2) + '\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Bumps the project version based on project type
|
||||
* Handles npm-only, deno-only, and dual projects with unified logic
|
||||
* @param projectType The detected project type
|
||||
* @param versionType The type of version bump
|
||||
* @param currentStep The current step number for progress display
|
||||
* @param totalSteps The total number of steps for progress display
|
||||
* @returns The new version string
|
||||
*/
|
||||
export async function bumpProjectVersion(
|
||||
projectType: ProjectType,
|
||||
versionType: VersionType,
|
||||
currentStep?: number,
|
||||
totalSteps?: number
|
||||
): Promise<string> {
|
||||
if (projectType === 'none') {
|
||||
throw new Error('Cannot bump version: no package.json or deno.json found');
|
||||
}
|
||||
|
||||
const projectEmoji = projectType === 'npm' ? '📦' : projectType === 'deno' ? '🦕' : '🔀';
|
||||
const description = `🏷️ Bumping version (${projectEmoji} ${projectType})`;
|
||||
|
||||
if (currentStep && totalSteps) {
|
||||
ui.printStep(currentStep, totalSteps, description, 'in-progress');
|
||||
}
|
||||
|
||||
const smartshellInstance = new plugins.smartshell.Smartshell({
|
||||
executor: 'bash',
|
||||
sourceFilePaths: [],
|
||||
});
|
||||
|
||||
try {
|
||||
// 1. Read current version
|
||||
const currentVersion = await readCurrentVersion(projectType);
|
||||
|
||||
// 2. Calculate new version (reuse existing function!)
|
||||
const newVersion = calculateNewVersion(currentVersion, versionType);
|
||||
|
||||
logger.log('info', `Bumping version: ${currentVersion} → ${newVersion}`);
|
||||
|
||||
// 3. Determine which files to update
|
||||
const filesToUpdate: string[] = [];
|
||||
const packageJsonPath = plugins.path.join(paths.cwd, 'package.json');
|
||||
const denoJsonPath = plugins.path.join(paths.cwd, 'deno.json');
|
||||
|
||||
if (projectType === 'npm' || projectType === 'both') {
|
||||
await updateVersionFile(packageJsonPath, newVersion);
|
||||
filesToUpdate.push('package.json');
|
||||
}
|
||||
|
||||
if (projectType === 'deno' || projectType === 'both') {
|
||||
await updateVersionFile(denoJsonPath, newVersion);
|
||||
filesToUpdate.push('deno.json');
|
||||
}
|
||||
|
||||
// 4. Stage all updated files
|
||||
await smartshellInstance.exec(`git add ${filesToUpdate.join(' ')}`);
|
||||
|
||||
// 5. Create version commit
|
||||
await smartshellInstance.exec(`git commit -m "v${newVersion}"`);
|
||||
|
||||
// 6. Create version tag
|
||||
await smartshellInstance.exec(`git tag v${newVersion} -m "v${newVersion}"`);
|
||||
|
||||
logger.log('info', `Created commit and tag v${newVersion}`);
|
||||
|
||||
if (currentStep && totalSteps) {
|
||||
ui.printStep(currentStep, totalSteps, description, 'done');
|
||||
}
|
||||
|
||||
return newVersion;
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to bump project version: ${error.message}`);
|
||||
}
|
||||
}
|
||||
196
ts/mod_commit/mod.ui.ts
Normal file
196
ts/mod_commit/mod.ui.ts
Normal file
@@ -0,0 +1,196 @@
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
|
||||
/**
|
||||
* UI helper module for beautiful CLI output
|
||||
*/
|
||||
|
||||
interface ICommitSummary {
|
||||
projectType: string;
|
||||
branch: string;
|
||||
commitType: string;
|
||||
commitScope: string;
|
||||
commitMessage: string;
|
||||
newVersion: string;
|
||||
commitSha?: string;
|
||||
pushed: boolean;
|
||||
repoUrl?: string;
|
||||
}
|
||||
|
||||
interface IRecommendation {
|
||||
recommendedNextVersion: string;
|
||||
recommendedNextVersionLevel: string;
|
||||
recommendedNextVersionScope: string;
|
||||
recommendedNextVersionMessage: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Print a header with a box around it
|
||||
*/
|
||||
export function printHeader(title: string): void {
|
||||
const width = 57;
|
||||
const padding = Math.max(0, width - title.length - 2);
|
||||
const leftPad = Math.floor(padding / 2);
|
||||
const rightPad = padding - leftPad;
|
||||
|
||||
console.log('');
|
||||
console.log('╭─' + '─'.repeat(width) + '─╮');
|
||||
console.log('│ ' + title + ' '.repeat(rightPad + leftPad) + ' │');
|
||||
console.log('╰─' + '─'.repeat(width) + '─╯');
|
||||
console.log('');
|
||||
}
|
||||
|
||||
/**
|
||||
* Print a section with a border
|
||||
*/
|
||||
export function printSection(title: string, lines: string[]): void {
|
||||
const width = 59;
|
||||
|
||||
console.log('┌─ ' + title + ' ' + '─'.repeat(Math.max(0, width - title.length - 3)) + '┐');
|
||||
console.log('│' + ' '.repeat(width) + '│');
|
||||
|
||||
for (const line of lines) {
|
||||
const padding = width - line.length;
|
||||
console.log('│ ' + line + ' '.repeat(Math.max(0, padding - 2)) + '│');
|
||||
}
|
||||
|
||||
console.log('│' + ' '.repeat(width) + '│');
|
||||
console.log('└─' + '─'.repeat(width) + '─┘');
|
||||
console.log('');
|
||||
}
|
||||
|
||||
/**
|
||||
* Print AI recommendations in a nice box
|
||||
*/
|
||||
export function printRecommendation(recommendation: IRecommendation): void {
|
||||
const lines = [
|
||||
`Suggested Version: v${recommendation.recommendedNextVersion}`,
|
||||
`Suggested Type: ${recommendation.recommendedNextVersionLevel}`,
|
||||
`Suggested Scope: ${recommendation.recommendedNextVersionScope}`,
|
||||
`Suggested Message: ${recommendation.recommendedNextVersionMessage}`,
|
||||
];
|
||||
|
||||
printSection('📊 AI Recommendations', lines);
|
||||
}
|
||||
|
||||
/**
|
||||
* Print a progress step
|
||||
*/
|
||||
export function printStep(
|
||||
current: number,
|
||||
total: number,
|
||||
description: string,
|
||||
status: 'in-progress' | 'done' | 'error'
|
||||
): void {
|
||||
const statusIcon = status === 'done' ? '✓' : status === 'error' ? '✗' : '⏳';
|
||||
const dots = '.'.repeat(Math.max(0, 40 - description.length));
|
||||
|
||||
console.log(` [${current}/${total}] ${description}${dots} ${statusIcon}`);
|
||||
|
||||
// Clear the line on next update if in progress
|
||||
if (status === 'in-progress') {
|
||||
process.stdout.write('\x1b[1A'); // Move cursor up one line
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get emoji for project type
|
||||
*/
|
||||
function getProjectTypeEmoji(projectType: string): string {
|
||||
switch (projectType) {
|
||||
case 'npm':
|
||||
return '📦 npm';
|
||||
case 'deno':
|
||||
return '🦕 Deno';
|
||||
case 'both':
|
||||
return '🔀 npm + Deno';
|
||||
default:
|
||||
return '❓ Unknown';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get emoji for commit type
|
||||
*/
|
||||
function getCommitTypeEmoji(commitType: string): string {
|
||||
switch (commitType) {
|
||||
case 'fix':
|
||||
return '🔧 fix';
|
||||
case 'feat':
|
||||
return '✨ feat';
|
||||
case 'BREAKING CHANGE':
|
||||
return '💥 BREAKING CHANGE';
|
||||
default:
|
||||
return commitType;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Print final commit summary
|
||||
*/
|
||||
export function printSummary(summary: ICommitSummary): void {
|
||||
const lines = [
|
||||
`Project Type: ${getProjectTypeEmoji(summary.projectType)}`,
|
||||
`Branch: 🌿 ${summary.branch}`,
|
||||
`Commit Type: ${getCommitTypeEmoji(summary.commitType)}`,
|
||||
`Scope: 📍 ${summary.commitScope}`,
|
||||
`New Version: 🏷️ v${summary.newVersion}`,
|
||||
];
|
||||
|
||||
if (summary.commitSha) {
|
||||
lines.push(`Commit SHA: 📌 ${summary.commitSha}`);
|
||||
}
|
||||
|
||||
if (summary.pushed) {
|
||||
lines.push(`Remote: ✓ Pushed successfully`);
|
||||
} else {
|
||||
lines.push(`Remote: ⊘ Not pushed (local only)`);
|
||||
}
|
||||
|
||||
if (summary.repoUrl && summary.commitSha) {
|
||||
lines.push('');
|
||||
lines.push(`View at: ${summary.repoUrl}/commit/${summary.commitSha}`);
|
||||
}
|
||||
|
||||
printSection('✅ Commit Summary', lines);
|
||||
|
||||
if (summary.pushed) {
|
||||
console.log('🎉 All done! Your changes are committed and pushed.\n');
|
||||
} else {
|
||||
console.log('✓ Commit created successfully.\n');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Print an info message with consistent formatting
|
||||
*/
|
||||
export function printInfo(message: string): void {
|
||||
console.log(` ℹ️ ${message}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Print a success message
|
||||
*/
|
||||
export function printSuccess(message: string): void {
|
||||
console.log(` ✓ ${message}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Print a warning message
|
||||
*/
|
||||
export function printWarning(message: string): void {
|
||||
logger.log('warn', `⚠️ ${message}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Print an error message
|
||||
*/
|
||||
export function printError(message: string): void {
|
||||
logger.log('error', `✗ ${message}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Print commit message being created
|
||||
*/
|
||||
export function printCommitMessage(commitString: string): void {
|
||||
console.log(`\n 📝 Commit: ${commitString}\n`);
|
||||
}
|
||||
@@ -65,15 +65,15 @@ export abstract class BaseFormatter {
|
||||
normalizedPath = './' + filepath;
|
||||
}
|
||||
|
||||
await plugins.smartfile.memory.toFs(content, normalizedPath);
|
||||
await plugins.smartfs.file(normalizedPath).encoding('utf8').write(content);
|
||||
}
|
||||
|
||||
protected async createFile(filepath: string, content: string): Promise<void> {
|
||||
await plugins.smartfile.memory.toFs(content, filepath);
|
||||
await plugins.smartfs.file(filepath).encoding('utf8').write(content);
|
||||
}
|
||||
|
||||
protected async deleteFile(filepath: string): Promise<void> {
|
||||
await plugins.smartfile.fs.remove(filepath);
|
||||
await plugins.smartfs.file(filepath).delete();
|
||||
}
|
||||
|
||||
protected async shouldProcessFile(filepath: string): Promise<boolean> {
|
||||
|
||||
@@ -25,7 +25,7 @@ export class ChangeCache {
|
||||
}
|
||||
|
||||
async initialize(): Promise<void> {
|
||||
await plugins.smartfile.fs.ensureDir(this.cacheDir);
|
||||
await plugins.smartfs.directory(this.cacheDir).recursive().create();
|
||||
}
|
||||
|
||||
async getManifest(): Promise<ICacheManifest> {
|
||||
@@ -35,13 +35,16 @@ export class ChangeCache {
|
||||
files: [],
|
||||
};
|
||||
|
||||
const exists = await plugins.smartfile.fs.fileExists(this.manifestPath);
|
||||
const exists = await plugins.smartfs.file(this.manifestPath).exists();
|
||||
if (!exists) {
|
||||
return defaultManifest;
|
||||
}
|
||||
|
||||
try {
|
||||
const content = plugins.smartfile.fs.toStringSync(this.manifestPath);
|
||||
const content = (await plugins.smartfs
|
||||
.file(this.manifestPath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const manifest = JSON.parse(content);
|
||||
|
||||
// Validate the manifest structure
|
||||
@@ -57,7 +60,7 @@ export class ChangeCache {
|
||||
);
|
||||
// Try to delete the corrupted file
|
||||
try {
|
||||
await plugins.smartfile.fs.remove(this.manifestPath);
|
||||
await plugins.smartfs.file(this.manifestPath).delete();
|
||||
} catch (removeError) {
|
||||
// Ignore removal errors
|
||||
}
|
||||
@@ -72,11 +75,14 @@ export class ChangeCache {
|
||||
}
|
||||
|
||||
// Ensure directory exists
|
||||
await plugins.smartfile.fs.ensureDir(this.cacheDir);
|
||||
await plugins.smartfs.directory(this.cacheDir).recursive().create();
|
||||
|
||||
// Write directly with proper JSON stringification
|
||||
const jsonContent = JSON.stringify(manifest, null, 2);
|
||||
await plugins.smartfile.memory.toFs(jsonContent, this.manifestPath);
|
||||
await plugins.smartfs
|
||||
.file(this.manifestPath)
|
||||
.encoding('utf8')
|
||||
.write(jsonContent);
|
||||
}
|
||||
|
||||
async hasFileChanged(filePath: string): Promise<boolean> {
|
||||
@@ -85,20 +91,23 @@ export class ChangeCache {
|
||||
: plugins.path.join(paths.cwd, filePath);
|
||||
|
||||
// Check if file exists
|
||||
const exists = await plugins.smartfile.fs.fileExists(absolutePath);
|
||||
const exists = await plugins.smartfs.file(absolutePath).exists();
|
||||
if (!exists) {
|
||||
return true; // File doesn't exist, so it's "changed" (will be created)
|
||||
}
|
||||
|
||||
// Get current file stats
|
||||
const stats = await plugins.smartfile.fs.stat(absolutePath);
|
||||
const stats = await plugins.smartfs.file(absolutePath).stat();
|
||||
|
||||
// Skip directories
|
||||
if (stats.isDirectory()) {
|
||||
if (stats.isDirectory) {
|
||||
return false; // Directories are not processed
|
||||
}
|
||||
|
||||
const content = plugins.smartfile.fs.toStringSync(absolutePath);
|
||||
const content = (await plugins.smartfs
|
||||
.file(absolutePath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const currentChecksum = this.calculateChecksum(content);
|
||||
|
||||
// Get cached info
|
||||
@@ -113,7 +122,7 @@ export class ChangeCache {
|
||||
return (
|
||||
cachedFile.checksum !== currentChecksum ||
|
||||
cachedFile.size !== stats.size ||
|
||||
cachedFile.modified !== stats.mtimeMs
|
||||
cachedFile.modified !== stats.mtime.getTime()
|
||||
);
|
||||
}
|
||||
|
||||
@@ -123,14 +132,17 @@ export class ChangeCache {
|
||||
: plugins.path.join(paths.cwd, filePath);
|
||||
|
||||
// Get current file stats
|
||||
const stats = await plugins.smartfile.fs.stat(absolutePath);
|
||||
const stats = await plugins.smartfs.file(absolutePath).stat();
|
||||
|
||||
// Skip directories
|
||||
if (stats.isDirectory()) {
|
||||
if (stats.isDirectory) {
|
||||
return; // Don't cache directories
|
||||
}
|
||||
|
||||
const content = plugins.smartfile.fs.toStringSync(absolutePath);
|
||||
const content = (await plugins.smartfs
|
||||
.file(absolutePath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const checksum = this.calculateChecksum(content);
|
||||
|
||||
// Update manifest
|
||||
@@ -140,7 +152,7 @@ export class ChangeCache {
|
||||
const cacheEntry: IFileCache = {
|
||||
path: filePath,
|
||||
checksum,
|
||||
modified: stats.mtimeMs,
|
||||
modified: stats.mtime.getTime(),
|
||||
size: stats.size,
|
||||
};
|
||||
|
||||
@@ -176,7 +188,7 @@ export class ChangeCache {
|
||||
? file.path
|
||||
: plugins.path.join(paths.cwd, file.path);
|
||||
|
||||
if (await plugins.smartfile.fs.fileExists(absolutePath)) {
|
||||
if (await plugins.smartfs.file(absolutePath).exists()) {
|
||||
validFiles.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,14 +21,15 @@ export class DiffReporter {
|
||||
}
|
||||
|
||||
try {
|
||||
const exists = await plugins.smartfile.fs.fileExists(change.path);
|
||||
const exists = await plugins.smartfs.file(change.path).exists();
|
||||
if (!exists) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const currentContent = await plugins.smartfile.fs.toStringSync(
|
||||
change.path,
|
||||
);
|
||||
const currentContent = (await plugins.smartfs
|
||||
.file(change.path)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
|
||||
// For planned changes, we need the new content
|
||||
if (!change.content) {
|
||||
@@ -107,10 +108,10 @@ export class DiffReporter {
|
||||
})),
|
||||
};
|
||||
|
||||
await plugins.smartfile.memory.toFs(
|
||||
JSON.stringify(report, null, 2),
|
||||
outputPath,
|
||||
);
|
||||
await plugins.smartfs
|
||||
.file(outputPath)
|
||||
.encoding('utf8')
|
||||
.write(JSON.stringify(report, null, 2));
|
||||
logger.log('info', `Diff report saved to ${outputPath}`);
|
||||
}
|
||||
|
||||
|
||||
@@ -192,10 +192,10 @@ export class FormatStats {
|
||||
moduleStats: Array.from(this.stats.moduleStats.values()),
|
||||
};
|
||||
|
||||
await plugins.smartfile.memory.toFs(
|
||||
JSON.stringify(report, null, 2),
|
||||
outputPath,
|
||||
);
|
||||
await plugins.smartfs
|
||||
.file(outputPath)
|
||||
.encoding('utf8')
|
||||
.write(JSON.stringify(report, null, 2));
|
||||
logger.log('info', `Statistics report saved to ${outputPath}`);
|
||||
}
|
||||
|
||||
|
||||
@@ -36,21 +36,27 @@ export class RollbackManager {
|
||||
: plugins.path.join(paths.cwd, filepath);
|
||||
|
||||
// Check if file exists
|
||||
const exists = await plugins.smartfile.fs.fileExists(absolutePath);
|
||||
const exists = await plugins.smartfs.file(absolutePath).exists();
|
||||
if (!exists) {
|
||||
// File doesn't exist yet (will be created), so we skip backup
|
||||
return;
|
||||
}
|
||||
|
||||
// Read file content and metadata
|
||||
const content = plugins.smartfile.fs.toStringSync(absolutePath);
|
||||
const stats = await plugins.smartfile.fs.stat(absolutePath);
|
||||
const content = (await plugins.smartfs
|
||||
.file(absolutePath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const stats = await plugins.smartfs.file(absolutePath).stat();
|
||||
const checksum = this.calculateChecksum(content);
|
||||
|
||||
// Create backup
|
||||
const backupPath = this.getBackupPath(operationId, filepath);
|
||||
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(backupPath));
|
||||
await plugins.smartfile.memory.toFs(content, backupPath);
|
||||
await plugins.smartfs
|
||||
.directory(plugins.path.dirname(backupPath))
|
||||
.recursive()
|
||||
.create();
|
||||
await plugins.smartfs.file(backupPath).encoding('utf8').write(content);
|
||||
|
||||
// Update operation
|
||||
operation.files.push({
|
||||
@@ -84,7 +90,10 @@ export class RollbackManager {
|
||||
|
||||
// Verify backup integrity
|
||||
const backupPath = this.getBackupPath(operationId, file.path);
|
||||
const backupContent = plugins.smartfile.fs.toStringSync(backupPath);
|
||||
const backupContent = await plugins.smartfs
|
||||
.file(backupPath)
|
||||
.encoding('utf8')
|
||||
.read();
|
||||
const backupChecksum = this.calculateChecksum(backupContent);
|
||||
|
||||
if (backupChecksum !== file.checksum) {
|
||||
@@ -92,7 +101,10 @@ export class RollbackManager {
|
||||
}
|
||||
|
||||
// Restore file
|
||||
await plugins.smartfile.memory.toFs(file.originalContent, absolutePath);
|
||||
await plugins.smartfs
|
||||
.file(absolutePath)
|
||||
.encoding('utf8')
|
||||
.write(file.originalContent);
|
||||
|
||||
// Restore permissions
|
||||
const mode = parseInt(file.permissions, 8);
|
||||
@@ -129,7 +141,7 @@ export class RollbackManager {
|
||||
'operations',
|
||||
operation.id,
|
||||
);
|
||||
await plugins.smartfile.fs.remove(operationDir);
|
||||
await plugins.smartfs.directory(operationDir).recursive().delete();
|
||||
|
||||
// Remove from manifest
|
||||
manifest.operations = manifest.operations.filter(
|
||||
@@ -148,13 +160,16 @@ export class RollbackManager {
|
||||
|
||||
for (const file of operation.files) {
|
||||
const backupPath = this.getBackupPath(operationId, file.path);
|
||||
const exists = await plugins.smartfile.fs.fileExists(backupPath);
|
||||
const exists = await plugins.smartfs.file(backupPath).exists();
|
||||
|
||||
if (!exists) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const content = plugins.smartfile.fs.toStringSync(backupPath);
|
||||
const content = await plugins.smartfs
|
||||
.file(backupPath)
|
||||
.encoding('utf8')
|
||||
.read();
|
||||
const checksum = this.calculateChecksum(content);
|
||||
|
||||
if (checksum !== file.checksum) {
|
||||
@@ -171,10 +186,11 @@ export class RollbackManager {
|
||||
}
|
||||
|
||||
private async ensureBackupDir(): Promise<void> {
|
||||
await plugins.smartfile.fs.ensureDir(this.backupDir);
|
||||
await plugins.smartfile.fs.ensureDir(
|
||||
plugins.path.join(this.backupDir, 'operations'),
|
||||
);
|
||||
await plugins.smartfs.directory(this.backupDir).recursive().create();
|
||||
await plugins.smartfs
|
||||
.directory(plugins.path.join(this.backupDir, 'operations'))
|
||||
.recursive()
|
||||
.create();
|
||||
}
|
||||
|
||||
private generateOperationId(): string {
|
||||
@@ -204,13 +220,16 @@ export class RollbackManager {
|
||||
private async getManifest(): Promise<{ operations: IFormatOperation[] }> {
|
||||
const defaultManifest = { operations: [] };
|
||||
|
||||
const exists = await plugins.smartfile.fs.fileExists(this.manifestPath);
|
||||
const exists = await plugins.smartfs.file(this.manifestPath).exists();
|
||||
if (!exists) {
|
||||
return defaultManifest;
|
||||
}
|
||||
|
||||
try {
|
||||
const content = plugins.smartfile.fs.toStringSync(this.manifestPath);
|
||||
const content = (await plugins.smartfs
|
||||
.file(this.manifestPath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const manifest = JSON.parse(content);
|
||||
|
||||
// Validate the manifest structure
|
||||
@@ -228,7 +247,7 @@ export class RollbackManager {
|
||||
);
|
||||
// Try to delete the corrupted file
|
||||
try {
|
||||
await plugins.smartfile.fs.remove(this.manifestPath);
|
||||
await plugins.smartfs.file(this.manifestPath).delete();
|
||||
} catch (removeError) {
|
||||
// Ignore removal errors
|
||||
}
|
||||
@@ -249,7 +268,10 @@ export class RollbackManager {
|
||||
|
||||
// Write directly with proper JSON stringification
|
||||
const jsonContent = JSON.stringify(manifest, null, 2);
|
||||
await plugins.smartfile.memory.toFs(jsonContent, this.manifestPath);
|
||||
await plugins.smartfs
|
||||
.file(this.manifestPath)
|
||||
.encoding('utf8')
|
||||
.write(jsonContent);
|
||||
}
|
||||
|
||||
private async getOperation(
|
||||
|
||||
@@ -13,12 +13,12 @@ const filesToDelete = [
|
||||
|
||||
export const run = async (projectArg: Project) => {
|
||||
for (const relativeFilePath of filesToDelete) {
|
||||
const fileExists = plugins.smartfile.fs.fileExistsSync(relativeFilePath);
|
||||
const fileExists = await plugins.smartfs.file(relativeFilePath).exists();
|
||||
if (fileExists) {
|
||||
logger.log('info', `Found ${relativeFilePath}! Removing it!`);
|
||||
plugins.smartfile.fs.removeSync(
|
||||
plugins.path.join(paths.cwd, relativeFilePath),
|
||||
);
|
||||
await plugins.smartfs
|
||||
.file(plugins.path.join(paths.cwd, relativeFilePath))
|
||||
.delete();
|
||||
} else {
|
||||
logger.log('info', `Project is free of ${relativeFilePath}`);
|
||||
}
|
||||
|
||||
@@ -24,7 +24,12 @@ export const run = async (projectArg: Project) => {
|
||||
|
||||
try {
|
||||
// Handle glob patterns
|
||||
const files = await plugins.smartfile.fs.listFileTree('.', pattern.from);
|
||||
const entries = await plugins.smartfs
|
||||
.directory('.')
|
||||
.recursive()
|
||||
.filter(pattern.from)
|
||||
.list();
|
||||
const files = entries.map((entry) => entry.path);
|
||||
|
||||
for (const file of files) {
|
||||
const sourcePath = file;
|
||||
@@ -46,10 +51,13 @@ export const run = async (projectArg: Project) => {
|
||||
}
|
||||
|
||||
// Ensure destination directory exists
|
||||
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(destPath));
|
||||
await plugins.smartfs
|
||||
.directory(plugins.path.dirname(destPath))
|
||||
.recursive()
|
||||
.create();
|
||||
|
||||
// Copy file
|
||||
await plugins.smartfile.fs.copy(sourcePath, destPath);
|
||||
await plugins.smartfs.file(sourcePath).copy(destPath);
|
||||
logger.log('info', `Copied ${sourcePath} to ${destPath}`);
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
@@ -7,13 +7,15 @@ import { logger } from '../gitzone.logging.js';
|
||||
const gitignorePath = plugins.path.join(paths.cwd, './.gitignore');
|
||||
|
||||
export const run = async (projectArg: Project) => {
|
||||
const gitignoreExists = await plugins.smartfile.fs.fileExists(gitignorePath);
|
||||
const gitignoreExists = await plugins.smartfs.file(gitignorePath).exists();
|
||||
let customContent = '';
|
||||
|
||||
if (gitignoreExists) {
|
||||
// lets get the existing gitignore file
|
||||
const existingGitIgnoreString =
|
||||
plugins.smartfile.fs.toStringSync(gitignorePath);
|
||||
const existingGitIgnoreString = (await plugins.smartfs
|
||||
.file(gitignorePath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
|
||||
// Check for different custom section markers
|
||||
const customMarkers = ['#------# custom', '# custom'];
|
||||
@@ -34,12 +36,17 @@ export const run = async (projectArg: Project) => {
|
||||
|
||||
// Append the custom content if it exists
|
||||
if (customContent) {
|
||||
const newGitignoreContent =
|
||||
plugins.smartfile.fs.toStringSync(gitignorePath);
|
||||
const newGitignoreContent = (await plugins.smartfs
|
||||
.file(gitignorePath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
// The template already ends with "#------# custom", so just append the content
|
||||
const finalContent =
|
||||
newGitignoreContent.trimEnd() + '\n' + customContent + '\n';
|
||||
await plugins.smartfile.fs.toFs(finalContent, gitignorePath);
|
||||
await plugins.smartfs
|
||||
.file(gitignorePath)
|
||||
.encoding('utf8')
|
||||
.write(finalContent);
|
||||
logger.log('info', 'Updated .gitignore while preserving custom section!');
|
||||
} else {
|
||||
logger.log('info', 'Added a .gitignore!');
|
||||
|
||||
@@ -7,9 +7,9 @@ import { logger } from '../gitzone.logging.js';
|
||||
const incompatibleLicenses: string[] = ['AGPL', 'GPL', 'SSPL'];
|
||||
|
||||
export const run = async (projectArg: Project) => {
|
||||
const nodeModulesInstalled = await plugins.smartfile.fs.isDirectory(
|
||||
plugins.path.join(paths.cwd, 'node_modules'),
|
||||
);
|
||||
const nodeModulesInstalled = await plugins.smartfs
|
||||
.directory(plugins.path.join(paths.cwd, 'node_modules'))
|
||||
.exists();
|
||||
if (!nodeModulesInstalled) {
|
||||
logger.log('warn', 'No node_modules found. Skipping license check');
|
||||
return;
|
||||
|
||||
@@ -154,10 +154,11 @@ export const run = async (projectArg: Project) => {
|
||||
];
|
||||
|
||||
// check for dependencies
|
||||
// Note: @push.rocks/tapbundle is deprecated - use @git.zone/tstest/tapbundle instead
|
||||
await ensureDependency(
|
||||
packageJson,
|
||||
'devDep',
|
||||
'latest',
|
||||
'exclude',
|
||||
'@push.rocks/tapbundle',
|
||||
);
|
||||
await ensureDependency(
|
||||
@@ -174,9 +175,11 @@ export const run = async (projectArg: Project) => {
|
||||
);
|
||||
|
||||
// set overrides
|
||||
const overrides = plugins.smartfile.fs.toObjectSync(
|
||||
plugins.path.join(paths.assetsDir, 'overrides.json'),
|
||||
);
|
||||
const overridesContent = (await plugins.smartfs
|
||||
.file(plugins.path.join(paths.assetsDir, 'overrides.json'))
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const overrides = JSON.parse(overridesContent);
|
||||
packageJson.pnpm = packageJson.pnpm || {};
|
||||
packageJson.pnpm.overrides = overrides;
|
||||
|
||||
|
||||
@@ -6,25 +6,22 @@ export const run = async () => {
|
||||
const readmeHintsPath = plugins.path.join(paths.cwd, 'readme.hints.md');
|
||||
|
||||
// Check and initialize readme.md if it doesn't exist
|
||||
const readmeExists = await plugins.smartfile.fs.fileExists(readmePath);
|
||||
const readmeExists = await plugins.smartfs.file(readmePath).exists();
|
||||
if (!readmeExists) {
|
||||
await plugins.smartfile.fs.toFs(
|
||||
'# Project Readme\n\nThis is the initial readme file.',
|
||||
readmePath,
|
||||
);
|
||||
await plugins.smartfs.file(readmePath)
|
||||
.encoding('utf8')
|
||||
.write('# Project Readme\n\nThis is the initial readme file.');
|
||||
console.log('Initialized readme.md');
|
||||
} else {
|
||||
console.log('readme.md already exists');
|
||||
}
|
||||
|
||||
// Check and initialize readme.hints.md if it doesn't exist
|
||||
const readmeHintsExists =
|
||||
await plugins.smartfile.fs.fileExists(readmeHintsPath);
|
||||
const readmeHintsExists = await plugins.smartfs.file(readmeHintsPath).exists();
|
||||
if (!readmeHintsExists) {
|
||||
await plugins.smartfile.fs.toFs(
|
||||
'# Project Readme Hints\n\nThis is the initial readme hints file.',
|
||||
readmeHintsPath,
|
||||
);
|
||||
await plugins.smartfs.file(readmeHintsPath)
|
||||
.encoding('utf8')
|
||||
.write('# Project Readme Hints\n\nThis is the initial readme hints file.');
|
||||
console.log('Initialized readme.hints.md');
|
||||
} else {
|
||||
console.log('readme.hints.md already exists');
|
||||
|
||||
@@ -10,12 +10,6 @@ import { Project } from '../classes.project.js';
|
||||
export const run = async (project: Project) => {
|
||||
const templateModule = await import('../mod_template/index.js');
|
||||
|
||||
// update tslint
|
||||
// getting template
|
||||
const tslintTemplate = await templateModule.getTemplate('tslint');
|
||||
await tslintTemplate.writeToDisk(paths.cwd);
|
||||
logger.log('info', 'Updated tslint.json!');
|
||||
|
||||
// update vscode
|
||||
const vscodeTemplate = await templateModule.getTemplate('vscode');
|
||||
await vscodeTemplate.writeToDisk(paths.cwd);
|
||||
|
||||
@@ -7,10 +7,11 @@ import { Project } from '../classes.project.js';
|
||||
export const run = async (projectArg: Project) => {
|
||||
// lets care about tsconfig.json
|
||||
logger.log('info', 'Formatting tsconfig.json...');
|
||||
const tsconfigSmartfile = await plugins.smartfile.SmartFile.fromFilePath(
|
||||
const factory = plugins.smartfile.SmartFileFactory.nodeFs();
|
||||
const tsconfigSmartfile = await factory.fromFilePath(
|
||||
plugins.path.join(paths.cwd, 'tsconfig.json'),
|
||||
);
|
||||
const tsconfigObject = JSON.parse(tsconfigSmartfile.contentBuffer.toString());
|
||||
const tsconfigObject = JSON.parse(tsconfigSmartfile.parseContentAsString());
|
||||
tsconfigObject.compilerOptions = tsconfigObject.compilerOptions || {};
|
||||
tsconfigObject.compilerOptions.baseUrl = '.';
|
||||
tsconfigObject.compilerOptions.paths = {};
|
||||
@@ -23,8 +24,8 @@ export const run = async (projectArg: Project) => {
|
||||
`./${publishModule}/index.js`,
|
||||
];
|
||||
}
|
||||
tsconfigSmartfile.setContentsFromString(
|
||||
JSON.stringify(tsconfigObject, null, 2),
|
||||
);
|
||||
await tsconfigSmartfile.editContentAsString(async () => {
|
||||
return JSON.stringify(tsconfigObject, null, 2);
|
||||
});
|
||||
await tsconfigSmartfile.write();
|
||||
};
|
||||
|
||||
@@ -20,7 +20,7 @@ export class CleanupFormatter extends BaseFormatter {
|
||||
];
|
||||
|
||||
for (const file of filesToRemove) {
|
||||
const exists = await plugins.smartfile.fs.fileExists(file);
|
||||
const exists = await plugins.smartfs.file(file).exists();
|
||||
if (exists) {
|
||||
changes.push({
|
||||
type: 'delete',
|
||||
|
||||
@@ -41,16 +41,23 @@ export class PrettierFormatter extends BaseFormatter {
|
||||
// Add files from TypeScript directories
|
||||
for (const dir of includeDirs) {
|
||||
const globPattern = `${dir}/**/*.${extensions}`;
|
||||
const dirFiles = await plugins.smartfile.fs.listFileTree(
|
||||
'.',
|
||||
globPattern,
|
||||
);
|
||||
const dirEntries = await plugins.smartfs
|
||||
.directory('.')
|
||||
.recursive()
|
||||
.filter(globPattern)
|
||||
.list();
|
||||
const dirFiles = dirEntries.map((entry) => entry.path);
|
||||
allFiles.push(...dirFiles);
|
||||
}
|
||||
|
||||
// Add root config files
|
||||
for (const pattern of rootConfigFiles) {
|
||||
const rootFiles = await plugins.smartfile.fs.listFileTree('.', pattern);
|
||||
const rootEntries = await plugins.smartfs
|
||||
.directory('.')
|
||||
.recursive()
|
||||
.filter(pattern)
|
||||
.list();
|
||||
const rootFiles = rootEntries.map((entry) => entry.path);
|
||||
// Only include files at root level (no slashes in path)
|
||||
const rootLevelFiles = rootFiles.filter((f) => !f.includes('/'));
|
||||
allFiles.push(...rootLevelFiles);
|
||||
@@ -66,8 +73,8 @@ export class PrettierFormatter extends BaseFormatter {
|
||||
const validFiles: string[] = [];
|
||||
for (const file of files) {
|
||||
try {
|
||||
const stats = await plugins.smartfile.fs.stat(file);
|
||||
if (!stats.isDirectory()) {
|
||||
const stats = await plugins.smartfs.file(file).stat();
|
||||
if (!stats.isDirectory) {
|
||||
validFiles.push(file);
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -148,7 +155,10 @@ export class PrettierFormatter extends BaseFormatter {
|
||||
}
|
||||
|
||||
// Read current content
|
||||
const content = plugins.smartfile.fs.toStringSync(change.path);
|
||||
const content = (await plugins.smartfs
|
||||
.file(change.path)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
|
||||
// Format with prettier
|
||||
const prettier = await import('prettier');
|
||||
|
||||
@@ -101,7 +101,12 @@ export let run = async (
|
||||
// Plan phase
|
||||
logger.log('info', 'Analyzing project for format operations...');
|
||||
let plan = options.fromPlan
|
||||
? JSON.parse(await plugins.smartfile.fs.toStringSync(options.fromPlan))
|
||||
? JSON.parse(
|
||||
(await plugins.smartfs
|
||||
.file(options.fromPlan)
|
||||
.encoding('utf8')
|
||||
.read()) as string,
|
||||
)
|
||||
: await planner.planFormat(activeFormatters);
|
||||
|
||||
// Display plan
|
||||
@@ -109,10 +114,10 @@ export let run = async (
|
||||
|
||||
// Save plan if requested
|
||||
if (options.savePlan) {
|
||||
await plugins.smartfile.memory.toFs(
|
||||
JSON.stringify(plan, null, 2),
|
||||
options.savePlan,
|
||||
);
|
||||
await plugins.smartfs
|
||||
.file(options.savePlan)
|
||||
.encoding('utf8')
|
||||
.write(JSON.stringify(plan, null, 2));
|
||||
logger.log('info', `Plan saved to ${options.savePlan}`);
|
||||
}
|
||||
|
||||
|
||||
@@ -48,15 +48,17 @@ export class Meta {
|
||||
public async readDirectory() {
|
||||
await this.syncToRemote(true);
|
||||
logger.log('info', `reading directory`);
|
||||
const metaFileExists = plugins.smartfile.fs.fileExistsSync(
|
||||
this.filePaths.metaJson,
|
||||
);
|
||||
const metaFileExists = await plugins.smartfs
|
||||
.file(this.filePaths.metaJson)
|
||||
.exists();
|
||||
if (!metaFileExists) {
|
||||
throw new Error(`meta file does not exist at ${this.filePaths.metaJson}`);
|
||||
}
|
||||
this.metaRepoData = plugins.smartfile.fs.toObjectSync(
|
||||
this.filePaths.metaJson,
|
||||
);
|
||||
const content = (await plugins.smartfs
|
||||
.file(this.filePaths.metaJson)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
this.metaRepoData = JSON.parse(content);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -78,15 +80,15 @@ export class Meta {
|
||||
*/
|
||||
public async writeToDisk() {
|
||||
// write .meta.json to disk
|
||||
plugins.smartfile.memory.toFsSync(
|
||||
JSON.stringify(this.metaRepoData, null, 2),
|
||||
this.filePaths.metaJson,
|
||||
);
|
||||
await plugins.smartfs
|
||||
.file(this.filePaths.metaJson)
|
||||
.encoding('utf8')
|
||||
.write(JSON.stringify(this.metaRepoData, null, 2));
|
||||
// write .gitignore to disk
|
||||
plugins.smartfile.memory.toFsSync(
|
||||
await this.generateGitignore(),
|
||||
this.filePaths.gitIgnore,
|
||||
);
|
||||
await plugins.smartfs
|
||||
.file(this.filePaths.gitIgnore)
|
||||
.encoding('utf8')
|
||||
.write(await this.generateGitignore());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -112,10 +114,25 @@ export class Meta {
|
||||
*/
|
||||
public async updateLocalRepos() {
|
||||
await this.syncToRemote();
|
||||
const projects = plugins.smartfile.fs.toObjectSync(
|
||||
this.filePaths.metaJson,
|
||||
).projects;
|
||||
const preExistingFolders = plugins.smartfile.fs.listFoldersSync(this.cwd);
|
||||
const metaContent = (await plugins.smartfs
|
||||
.file(this.filePaths.metaJson)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const projects = JSON.parse(metaContent).projects;
|
||||
const entries = await plugins.smartfs.directory(this.cwd).list();
|
||||
const preExistingFolders: string[] = [];
|
||||
for (const entry of entries) {
|
||||
try {
|
||||
const stats = await plugins.smartfs
|
||||
.file(plugins.path.join(this.cwd, entry.path))
|
||||
.stat();
|
||||
if (stats.isDirectory) {
|
||||
preExistingFolders.push(entry.name);
|
||||
}
|
||||
} catch {
|
||||
// Skip entries that can't be accessed
|
||||
}
|
||||
}
|
||||
for (const preExistingFolderArg of preExistingFolders) {
|
||||
if (
|
||||
preExistingFolderArg !== '.git' &&
|
||||
@@ -143,9 +160,17 @@ export class Meta {
|
||||
await this.sortMetaRepoData();
|
||||
const missingRepos: string[] = [];
|
||||
for (const key of Object.keys(this.metaRepoData.projects)) {
|
||||
plugins.smartfile.fs.isDirectory(key)
|
||||
? logger.log('ok', `${key} -> is already cloned`)
|
||||
: missingRepos.push(key);
|
||||
const fullPath = plugins.path.join(this.cwd, key);
|
||||
try {
|
||||
const stats = await plugins.smartfs.file(fullPath).stat();
|
||||
if (stats.isDirectory) {
|
||||
logger.log('ok', `${key} -> is already cloned`);
|
||||
} else {
|
||||
missingRepos.push(key);
|
||||
}
|
||||
} catch {
|
||||
missingRepos.push(key);
|
||||
}
|
||||
}
|
||||
|
||||
logger.log('info', `found ${missingRepos.length} missing repos`);
|
||||
@@ -165,7 +190,20 @@ export class Meta {
|
||||
await this.syncToRemote();
|
||||
|
||||
// go recursive
|
||||
const folders = await plugins.smartfile.fs.listFolders(this.cwd);
|
||||
const listEntries = await plugins.smartfs.directory(this.cwd).list();
|
||||
const folders: string[] = [];
|
||||
for (const entry of listEntries) {
|
||||
try {
|
||||
const stats = await plugins.smartfs
|
||||
.file(plugins.path.join(this.cwd, entry.path))
|
||||
.stat();
|
||||
if (stats.isDirectory) {
|
||||
folders.push(entry.name);
|
||||
}
|
||||
} catch {
|
||||
// Skip entries that can't be accessed
|
||||
}
|
||||
}
|
||||
const childMetaRepositories: string[] = [];
|
||||
for (const folder of folders) {
|
||||
logger.log('info', folder);
|
||||
@@ -180,27 +218,31 @@ export class Meta {
|
||||
*/
|
||||
public async initProject() {
|
||||
await this.syncToRemote(true);
|
||||
const fileExists = await plugins.smartfile.fs.fileExists(
|
||||
this.filePaths.metaJson,
|
||||
);
|
||||
const fileExists = await plugins.smartfs
|
||||
.file(this.filePaths.metaJson)
|
||||
.exists();
|
||||
if (!fileExists) {
|
||||
await plugins.smartfile.memory.toFs(
|
||||
JSON.stringify({
|
||||
projects: {},
|
||||
}),
|
||||
this.filePaths.metaJson,
|
||||
);
|
||||
await plugins.smartfs
|
||||
.file(this.filePaths.metaJson)
|
||||
.encoding('utf8')
|
||||
.write(
|
||||
JSON.stringify({
|
||||
projects: {},
|
||||
}),
|
||||
);
|
||||
logger.log(
|
||||
`success`,
|
||||
`created a new .meta.json in directory ${this.cwd}`,
|
||||
);
|
||||
await plugins.smartfile.memory.toFs(
|
||||
JSON.stringify({
|
||||
name: this.dirName,
|
||||
version: '1.0.0',
|
||||
}),
|
||||
this.filePaths.packageJson,
|
||||
);
|
||||
await plugins.smartfs
|
||||
.file(this.filePaths.packageJson)
|
||||
.encoding('utf8')
|
||||
.write(
|
||||
JSON.stringify({
|
||||
name: this.dirName,
|
||||
version: '1.0.0',
|
||||
}),
|
||||
);
|
||||
logger.log(
|
||||
`success`,
|
||||
`created a new package.json in directory ${this.cwd}`,
|
||||
@@ -264,9 +306,10 @@ export class Meta {
|
||||
await this.writeToDisk();
|
||||
|
||||
logger.log('info', 'removing directory from cwd');
|
||||
await plugins.smartfile.fs.remove(
|
||||
plugins.path.join(paths.cwd, projectNameArg),
|
||||
);
|
||||
await plugins.smartfs
|
||||
.directory(plugins.path.join(paths.cwd, projectNameArg))
|
||||
.recursive()
|
||||
.delete();
|
||||
await this.updateLocalRepos();
|
||||
}
|
||||
}
|
||||
|
||||
190
ts/mod_services/classes.globalregistry.ts
Normal file
190
ts/mod_services/classes.globalregistry.ts
Normal file
@@ -0,0 +1,190 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
import { DockerContainer } from './classes.dockercontainer.js';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
|
||||
export interface IRegisteredProject {
|
||||
projectPath: string;
|
||||
projectName: string;
|
||||
containers: {
|
||||
mongo?: string;
|
||||
minio?: string;
|
||||
elasticsearch?: string;
|
||||
};
|
||||
ports: {
|
||||
mongo?: number;
|
||||
s3?: number;
|
||||
s3Console?: number;
|
||||
elasticsearch?: number;
|
||||
};
|
||||
enabledServices: string[];
|
||||
lastActive: number;
|
||||
}
|
||||
|
||||
export interface IGlobalRegistryData {
|
||||
projects: { [projectPath: string]: IRegisteredProject };
|
||||
}
|
||||
|
||||
export class GlobalRegistry {
|
||||
private static instance: GlobalRegistry | null = null;
|
||||
private kvStore: plugins.npmextra.KeyValueStore<IGlobalRegistryData>;
|
||||
private docker: DockerContainer;
|
||||
|
||||
private constructor() {
|
||||
this.kvStore = new plugins.npmextra.KeyValueStore({
|
||||
typeArg: 'userHomeDir',
|
||||
identityArg: 'gitzone-services',
|
||||
});
|
||||
this.docker = new DockerContainer();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the singleton instance
|
||||
*/
|
||||
public static getInstance(): GlobalRegistry {
|
||||
if (!GlobalRegistry.instance) {
|
||||
GlobalRegistry.instance = new GlobalRegistry();
|
||||
}
|
||||
return GlobalRegistry.instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register or update a project in the global registry
|
||||
*/
|
||||
public async registerProject(data: Omit<IRegisteredProject, 'lastActive'>): Promise<void> {
|
||||
const allData = await this.kvStore.readAll();
|
||||
const projects = allData.projects || {};
|
||||
|
||||
projects[data.projectPath] = {
|
||||
...data,
|
||||
lastActive: Date.now(),
|
||||
};
|
||||
|
||||
await this.kvStore.writeKey('projects', projects);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a project from the registry
|
||||
*/
|
||||
public async unregisterProject(projectPath: string): Promise<void> {
|
||||
const allData = await this.kvStore.readAll();
|
||||
const projects = allData.projects || {};
|
||||
|
||||
if (projects[projectPath]) {
|
||||
delete projects[projectPath];
|
||||
await this.kvStore.writeKey('projects', projects);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the lastActive timestamp for a project
|
||||
*/
|
||||
public async touchProject(projectPath: string): Promise<void> {
|
||||
const allData = await this.kvStore.readAll();
|
||||
const projects = allData.projects || {};
|
||||
|
||||
if (projects[projectPath]) {
|
||||
projects[projectPath].lastActive = Date.now();
|
||||
await this.kvStore.writeKey('projects', projects);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all registered projects
|
||||
*/
|
||||
public async getAllProjects(): Promise<{ [path: string]: IRegisteredProject }> {
|
||||
const allData = await this.kvStore.readAll();
|
||||
return allData.projects || {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a project is registered
|
||||
*/
|
||||
public async isRegistered(projectPath: string): Promise<boolean> {
|
||||
const projects = await this.getAllProjects();
|
||||
return !!projects[projectPath];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get status of all containers across all registered projects
|
||||
*/
|
||||
public async getGlobalStatus(): Promise<
|
||||
Array<{
|
||||
projectPath: string;
|
||||
projectName: string;
|
||||
containers: Array<{ name: string; status: string }>;
|
||||
lastActive: number;
|
||||
}>
|
||||
> {
|
||||
const projects = await this.getAllProjects();
|
||||
const result: Array<{
|
||||
projectPath: string;
|
||||
projectName: string;
|
||||
containers: Array<{ name: string; status: string }>;
|
||||
lastActive: number;
|
||||
}> = [];
|
||||
|
||||
for (const [path, project] of Object.entries(projects)) {
|
||||
const containerStatuses: Array<{ name: string; status: string }> = [];
|
||||
|
||||
for (const containerName of Object.values(project.containers)) {
|
||||
if (containerName) {
|
||||
const status = await this.docker.getStatus(containerName);
|
||||
containerStatuses.push({ name: containerName, status });
|
||||
}
|
||||
}
|
||||
|
||||
result.push({
|
||||
projectPath: path,
|
||||
projectName: project.projectName,
|
||||
containers: containerStatuses,
|
||||
lastActive: project.lastActive,
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop all containers across all registered projects
|
||||
*/
|
||||
public async stopAll(): Promise<{ stopped: string[]; failed: string[] }> {
|
||||
const projects = await this.getAllProjects();
|
||||
const stopped: string[] = [];
|
||||
const failed: string[] = [];
|
||||
|
||||
for (const project of Object.values(projects)) {
|
||||
for (const containerName of Object.values(project.containers)) {
|
||||
if (containerName) {
|
||||
const status = await this.docker.getStatus(containerName);
|
||||
if (status === 'running') {
|
||||
if (await this.docker.stop(containerName)) {
|
||||
stopped.push(containerName);
|
||||
} else {
|
||||
failed.push(containerName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { stopped, failed };
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove stale registry entries (projects that no longer exist on disk)
|
||||
*/
|
||||
public async cleanup(): Promise<string[]> {
|
||||
const projects = await this.getAllProjects();
|
||||
const removed: string[] = [];
|
||||
|
||||
for (const projectPath of Object.keys(projects)) {
|
||||
const exists = await plugins.smartfs.directory(projectPath).exists();
|
||||
if (!exists) {
|
||||
await this.unregisterProject(projectPath);
|
||||
removed.push(projectPath);
|
||||
}
|
||||
}
|
||||
|
||||
return removed;
|
||||
}
|
||||
}
|
||||
@@ -19,6 +19,11 @@ export interface IServiceConfig {
|
||||
S3_BUCKET: string;
|
||||
S3_ENDPOINT: string;
|
||||
S3_USESSL: boolean;
|
||||
ELASTICSEARCH_HOST: string;
|
||||
ELASTICSEARCH_PORT: string;
|
||||
ELASTICSEARCH_USER: string;
|
||||
ELASTICSEARCH_PASS: string;
|
||||
ELASTICSEARCH_URL: string;
|
||||
}
|
||||
|
||||
export class ServiceConfiguration {
|
||||
@@ -61,10 +66,10 @@ export class ServiceConfiguration {
|
||||
* Save the configuration to file
|
||||
*/
|
||||
public async saveConfig(): Promise<void> {
|
||||
await plugins.smartfile.memory.toFs(
|
||||
JSON.stringify(this.config, null, 2),
|
||||
this.configPath
|
||||
);
|
||||
await plugins.smartfs
|
||||
.file(this.configPath)
|
||||
.encoding('utf8')
|
||||
.write(JSON.stringify(this.config, null, 2));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -72,21 +77,24 @@ export class ServiceConfiguration {
|
||||
*/
|
||||
private async ensureNogitDirectory(): Promise<void> {
|
||||
const nogitPath = plugins.path.join(process.cwd(), '.nogit');
|
||||
await plugins.smartfile.fs.ensureDir(nogitPath);
|
||||
await plugins.smartfs.directory(nogitPath).recursive().create();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if configuration file exists
|
||||
*/
|
||||
private async configExists(): Promise<boolean> {
|
||||
return plugins.smartfile.fs.fileExists(this.configPath);
|
||||
return plugins.smartfs.file(this.configPath).exists();
|
||||
}
|
||||
|
||||
/**
|
||||
* Load configuration from file
|
||||
*/
|
||||
private async loadConfig(): Promise<void> {
|
||||
const configContent = plugins.smartfile.fs.toStringSync(this.configPath);
|
||||
const configContent = (await plugins.smartfs
|
||||
.file(this.configPath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
this.config = JSON.parse(configContent);
|
||||
}
|
||||
|
||||
@@ -94,16 +102,16 @@ export class ServiceConfiguration {
|
||||
* Create default configuration
|
||||
*/
|
||||
private async createDefaultConfig(): Promise<void> {
|
||||
const projectName = helpers.getProjectName();
|
||||
const projectName = await helpers.getProjectName();
|
||||
const mongoPort = await helpers.getRandomAvailablePort();
|
||||
const s3Port = await helpers.getRandomAvailablePort();
|
||||
let s3ConsolePort = s3Port + 1;
|
||||
|
||||
|
||||
// Ensure console port is also available
|
||||
while (!(await helpers.isPortAvailable(s3ConsolePort))) {
|
||||
s3ConsolePort++;
|
||||
}
|
||||
|
||||
|
||||
const mongoUser = 'defaultadmin';
|
||||
const mongoPass = 'defaultpass';
|
||||
const mongoHost = 'localhost';
|
||||
@@ -111,7 +119,11 @@ export class ServiceConfiguration {
|
||||
const mongoPortStr = mongoPort.toString();
|
||||
const s3Host = 'localhost';
|
||||
const s3PortStr = s3Port.toString();
|
||||
|
||||
const esHost = 'localhost';
|
||||
const esPort = '9200';
|
||||
const esUser = 'elastic';
|
||||
const esPass = 'elastic';
|
||||
|
||||
this.config = {
|
||||
PROJECT_NAME: projectName,
|
||||
MONGODB_HOST: mongoHost,
|
||||
@@ -127,22 +139,28 @@ export class ServiceConfiguration {
|
||||
S3_SECRETKEY: 'defaultpass',
|
||||
S3_BUCKET: `${projectName}-documents`,
|
||||
S3_ENDPOINT: s3Host,
|
||||
S3_USESSL: false
|
||||
S3_USESSL: false,
|
||||
ELASTICSEARCH_HOST: esHost,
|
||||
ELASTICSEARCH_PORT: esPort,
|
||||
ELASTICSEARCH_USER: esUser,
|
||||
ELASTICSEARCH_PASS: esPass,
|
||||
ELASTICSEARCH_URL: `http://${esUser}:${esPass}@${esHost}:${esPort}`
|
||||
};
|
||||
|
||||
|
||||
await this.saveConfig();
|
||||
|
||||
|
||||
logger.log('ok', '✅ Created .nogit/env.json with project defaults');
|
||||
logger.log('info', `📍 MongoDB port: ${mongoPort}`);
|
||||
logger.log('info', `📍 S3 API port: ${s3Port}`);
|
||||
logger.log('info', `📍 S3 Console port: ${s3ConsolePort}`);
|
||||
logger.log('info', `📍 Elasticsearch port: ${esPort}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update missing fields in existing configuration
|
||||
*/
|
||||
private async updateMissingFields(): Promise<void> {
|
||||
const projectName = helpers.getProjectName();
|
||||
const projectName = await helpers.getProjectName();
|
||||
let updated = false;
|
||||
const fieldsAdded: string[] = [];
|
||||
|
||||
@@ -249,7 +267,39 @@ export class ServiceConfiguration {
|
||||
fieldsAdded.push('S3_ENDPOINT');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
|
||||
if (!this.config.ELASTICSEARCH_HOST) {
|
||||
this.config.ELASTICSEARCH_HOST = 'localhost';
|
||||
fieldsAdded.push('ELASTICSEARCH_HOST');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.ELASTICSEARCH_PORT) {
|
||||
this.config.ELASTICSEARCH_PORT = '9200';
|
||||
fieldsAdded.push('ELASTICSEARCH_PORT');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.ELASTICSEARCH_USER) {
|
||||
this.config.ELASTICSEARCH_USER = 'elastic';
|
||||
fieldsAdded.push('ELASTICSEARCH_USER');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.ELASTICSEARCH_PASS) {
|
||||
this.config.ELASTICSEARCH_PASS = 'elastic';
|
||||
fieldsAdded.push('ELASTICSEARCH_PASS');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
// Always update ELASTICSEARCH_URL based on current settings
|
||||
const oldEsUrl = this.config.ELASTICSEARCH_URL;
|
||||
this.config.ELASTICSEARCH_URL = `http://${this.config.ELASTICSEARCH_USER}:${this.config.ELASTICSEARCH_PASS}@${this.config.ELASTICSEARCH_HOST}:${this.config.ELASTICSEARCH_PORT}`;
|
||||
if (oldEsUrl !== this.config.ELASTICSEARCH_URL) {
|
||||
fieldsAdded.push('ELASTICSEARCH_URL');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (updated) {
|
||||
await this.saveConfig();
|
||||
logger.log('ok', `✅ Added missing fields: ${fieldsAdded.join(', ')}`);
|
||||
@@ -272,17 +322,19 @@ export class ServiceConfiguration {
|
||||
public getContainerNames() {
|
||||
return {
|
||||
mongo: `${this.config.PROJECT_NAME}-mongodb`,
|
||||
minio: `${this.config.PROJECT_NAME}-minio`
|
||||
minio: `${this.config.PROJECT_NAME}-minio`,
|
||||
elasticsearch: `${this.config.PROJECT_NAME}-elasticsearch`
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Get data directories
|
||||
*/
|
||||
public getDataDirectories() {
|
||||
return {
|
||||
mongo: plugins.path.join(process.cwd(), '.nogit', 'mongodata'),
|
||||
minio: plugins.path.join(process.cwd(), '.nogit', 'miniodata')
|
||||
minio: plugins.path.join(process.cwd(), '.nogit', 'miniodata'),
|
||||
elasticsearch: plugins.path.join(process.cwd(), '.nogit', 'esdata')
|
||||
};
|
||||
}
|
||||
|
||||
@@ -330,12 +382,27 @@ export class ServiceConfiguration {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Check Elasticsearch container
|
||||
const esStatus = await this.docker.getStatus(containers.elasticsearch);
|
||||
if (esStatus !== 'not_exists') {
|
||||
const portMappings = await this.docker.getPortMappings(containers.elasticsearch);
|
||||
if (portMappings && portMappings['9200']) {
|
||||
const dockerPort = portMappings['9200'];
|
||||
if (this.config.ELASTICSEARCH_PORT !== dockerPort) {
|
||||
logger.log('note', `📍 Syncing Elasticsearch port from Docker: ${dockerPort}`);
|
||||
this.config.ELASTICSEARCH_PORT = dockerPort;
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (updated) {
|
||||
// Update derived fields
|
||||
this.config.MONGODB_URL = `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${this.config.MONGODB_HOST}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
||||
this.config.S3_ENDPOINT = this.config.S3_HOST;
|
||||
|
||||
this.config.ELASTICSEARCH_URL = `http://${this.config.ELASTICSEARCH_USER}:${this.config.ELASTICSEARCH_PASS}@${this.config.ELASTICSEARCH_HOST}:${this.config.ELASTICSEARCH_PORT}`;
|
||||
|
||||
await this.saveConfig();
|
||||
logger.log('ok', '✅ Configuration synced with Docker containers');
|
||||
}
|
||||
@@ -347,11 +414,12 @@ export class ServiceConfiguration {
|
||||
public async validateAndUpdatePorts(): Promise<boolean> {
|
||||
let updated = false;
|
||||
const containers = this.getContainerNames();
|
||||
|
||||
|
||||
// Check if containers exist - if they do, ports are fine
|
||||
const mongoExists = await this.docker.exists(containers.mongo);
|
||||
const minioExists = await this.docker.exists(containers.minio);
|
||||
|
||||
const esExists = await this.docker.exists(containers.elasticsearch);
|
||||
|
||||
// Only check port availability if containers don't exist
|
||||
if (!mongoExists) {
|
||||
const mongoPort = parseInt(this.config.MONGODB_PORT);
|
||||
@@ -363,11 +431,11 @@ export class ServiceConfiguration {
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (!minioExists) {
|
||||
const s3Port = parseInt(this.config.S3_PORT);
|
||||
const s3ConsolePort = parseInt(this.config.S3_CONSOLE_PORT);
|
||||
|
||||
|
||||
if (!(await helpers.isPortAvailable(s3Port))) {
|
||||
logger.log('note', `⚠️ S3 API port ${s3Port} is in use, finding new port...`);
|
||||
const newPort = await helpers.getRandomAvailablePort();
|
||||
@@ -375,7 +443,7 @@ export class ServiceConfiguration {
|
||||
logger.log('ok', `✅ New S3 API port: ${newPort}`);
|
||||
updated = true;
|
||||
}
|
||||
|
||||
|
||||
if (!(await helpers.isPortAvailable(s3ConsolePort))) {
|
||||
logger.log('note', `⚠️ S3 Console port ${s3ConsolePort} is in use, finding new port...`);
|
||||
let newPort = parseInt(this.config.S3_PORT) + 1;
|
||||
@@ -387,15 +455,27 @@ export class ServiceConfiguration {
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (!esExists) {
|
||||
const esPort = parseInt(this.config.ELASTICSEARCH_PORT);
|
||||
if (!(await helpers.isPortAvailable(esPort))) {
|
||||
logger.log('note', `⚠️ Elasticsearch port ${esPort} is in use, finding new port...`);
|
||||
const newPort = await helpers.getRandomAvailablePort();
|
||||
this.config.ELASTICSEARCH_PORT = newPort.toString();
|
||||
logger.log('ok', `✅ New Elasticsearch port: ${newPort}`);
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (updated) {
|
||||
// Update derived fields
|
||||
this.config.MONGODB_URL = `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${this.config.MONGODB_HOST}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
||||
this.config.S3_ENDPOINT = this.config.S3_HOST;
|
||||
|
||||
this.config.ELASTICSEARCH_URL = `http://${this.config.ELASTICSEARCH_USER}:${this.config.ELASTICSEARCH_PASS}@${this.config.ELASTICSEARCH_HOST}:${this.config.ELASTICSEARCH_PORT}`;
|
||||
|
||||
await this.saveConfig();
|
||||
}
|
||||
|
||||
|
||||
return updated;
|
||||
}
|
||||
|
||||
@@ -404,29 +484,35 @@ export class ServiceConfiguration {
|
||||
*/
|
||||
public async reconfigurePorts(): Promise<void> {
|
||||
logger.log('note', '🔄 Finding new available ports...');
|
||||
|
||||
|
||||
const mongoPort = await helpers.getRandomAvailablePort();
|
||||
const s3Port = await helpers.getRandomAvailablePort();
|
||||
let s3ConsolePort = s3Port + 1;
|
||||
|
||||
|
||||
// Ensure console port is also available
|
||||
while (!(await helpers.isPortAvailable(s3ConsolePort))) {
|
||||
s3ConsolePort++;
|
||||
}
|
||||
|
||||
|
||||
// Elasticsearch uses standard port 9200
|
||||
const esPort = '9200';
|
||||
|
||||
this.config.MONGODB_PORT = mongoPort.toString();
|
||||
this.config.S3_PORT = s3Port.toString();
|
||||
this.config.S3_CONSOLE_PORT = s3ConsolePort.toString();
|
||||
|
||||
this.config.ELASTICSEARCH_PORT = esPort;
|
||||
|
||||
// Update derived fields
|
||||
this.config.MONGODB_URL = `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${this.config.MONGODB_HOST}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
||||
this.config.S3_ENDPOINT = this.config.S3_HOST;
|
||||
|
||||
this.config.ELASTICSEARCH_URL = `http://${this.config.ELASTICSEARCH_USER}:${this.config.ELASTICSEARCH_PASS}@${this.config.ELASTICSEARCH_HOST}:${this.config.ELASTICSEARCH_PORT}`;
|
||||
|
||||
await this.saveConfig();
|
||||
|
||||
|
||||
logger.log('ok', '✅ New port configuration:');
|
||||
logger.log('info', ` 📍 MongoDB: ${mongoPort}`);
|
||||
logger.log('info', ` 📍 S3 API: ${s3Port}`);
|
||||
logger.log('info', ` 📍 S3 Console: ${s3ConsolePort}`);
|
||||
logger.log('info', ` 📍 Elasticsearch: ${esPort}`);
|
||||
}
|
||||
}
|
||||
@@ -2,17 +2,21 @@ import * as plugins from './mod.plugins.js';
|
||||
import * as helpers from './helpers.js';
|
||||
import { ServiceConfiguration } from './classes.serviceconfiguration.js';
|
||||
import { DockerContainer } from './classes.dockercontainer.js';
|
||||
import { GlobalRegistry } from './classes.globalregistry.js';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
|
||||
export class ServiceManager {
|
||||
private config: ServiceConfiguration;
|
||||
private docker: DockerContainer;
|
||||
|
||||
private enabledServices: string[] | null = null;
|
||||
private globalRegistry: GlobalRegistry;
|
||||
|
||||
constructor() {
|
||||
this.config = new ServiceConfiguration();
|
||||
this.docker = new DockerContainer();
|
||||
this.globalRegistry = GlobalRegistry.getInstance();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Initialize the service manager
|
||||
*/
|
||||
@@ -22,15 +26,162 @@ export class ServiceManager {
|
||||
logger.log('error', 'Error: Docker is not installed. Please install Docker first.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
|
||||
// Load or create configuration
|
||||
await this.config.loadOrCreate();
|
||||
logger.log('info', `📋 Project: ${this.config.getConfig().PROJECT_NAME}`);
|
||||
|
||||
|
||||
// Load service selection from npmextra.json
|
||||
await this.loadServiceConfiguration();
|
||||
|
||||
// Validate and update ports if needed
|
||||
await this.config.validateAndUpdatePorts();
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Load service configuration from npmextra.json
|
||||
*/
|
||||
private async loadServiceConfiguration(): Promise<void> {
|
||||
const npmextraConfig = new plugins.npmextra.Npmextra(process.cwd());
|
||||
const gitzoneConfig = npmextraConfig.dataFor<any>('gitzone', {});
|
||||
|
||||
// Check if services array exists
|
||||
if (!gitzoneConfig.services || !Array.isArray(gitzoneConfig.services) || gitzoneConfig.services.length === 0) {
|
||||
// Prompt user to select services
|
||||
const smartinteract = new plugins.smartinteract.SmartInteract();
|
||||
const response = await smartinteract.askQuestion({
|
||||
name: 'services',
|
||||
type: 'checkbox',
|
||||
message: 'Which services do you want to enable for this project?',
|
||||
choices: [
|
||||
{ name: 'MongoDB', value: 'mongodb' },
|
||||
{ name: 'MinIO (S3)', value: 'minio' },
|
||||
{ name: 'Elasticsearch', value: 'elasticsearch' }
|
||||
],
|
||||
default: ['mongodb', 'minio', 'elasticsearch']
|
||||
});
|
||||
|
||||
this.enabledServices = response.value || ['mongodb', 'minio', 'elasticsearch'];
|
||||
|
||||
// Save to npmextra.json
|
||||
await this.saveServiceConfiguration(this.enabledServices);
|
||||
} else {
|
||||
this.enabledServices = gitzoneConfig.services;
|
||||
logger.log('info', `🔧 Enabled services: ${this.enabledServices.join(', ')}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save service configuration to npmextra.json
|
||||
*/
|
||||
private async saveServiceConfiguration(services: string[]): Promise<void> {
|
||||
const npmextraPath = plugins.path.join(process.cwd(), 'npmextra.json');
|
||||
let npmextraData: any = {};
|
||||
|
||||
// Read existing npmextra.json if it exists
|
||||
if (await plugins.smartfs.file(npmextraPath).exists()) {
|
||||
const content = await plugins.smartfs.file(npmextraPath).encoding('utf8').read();
|
||||
npmextraData = JSON.parse(content as string);
|
||||
}
|
||||
|
||||
// Update gitzone.services
|
||||
if (!npmextraData.gitzone) {
|
||||
npmextraData.gitzone = {};
|
||||
}
|
||||
npmextraData.gitzone.services = services;
|
||||
|
||||
// Write back to npmextra.json
|
||||
await plugins.smartfs
|
||||
.file(npmextraPath)
|
||||
.encoding('utf8')
|
||||
.write(JSON.stringify(npmextraData, null, 2));
|
||||
|
||||
logger.log('ok', `✅ Saved service configuration to npmextra.json`);
|
||||
logger.log('info', `🔧 Enabled services: ${services.join(', ')}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a service is enabled
|
||||
*/
|
||||
private isServiceEnabled(service: string): boolean {
|
||||
if (!this.enabledServices) {
|
||||
return true; // If no configuration, enable all
|
||||
}
|
||||
return this.enabledServices.includes(service);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register this project with the global registry
|
||||
*/
|
||||
private async registerWithGlobalRegistry(): Promise<void> {
|
||||
const config = this.config.getConfig();
|
||||
const containers = this.config.getContainerNames();
|
||||
|
||||
await this.globalRegistry.registerProject({
|
||||
projectPath: process.cwd(),
|
||||
projectName: config.PROJECT_NAME,
|
||||
containers: {
|
||||
mongo: containers.mongo,
|
||||
minio: containers.minio,
|
||||
elasticsearch: containers.elasticsearch,
|
||||
},
|
||||
ports: {
|
||||
mongo: parseInt(config.MONGODB_PORT),
|
||||
s3: parseInt(config.S3_PORT),
|
||||
s3Console: parseInt(config.S3_CONSOLE_PORT),
|
||||
elasticsearch: parseInt(config.ELASTICSEARCH_PORT),
|
||||
},
|
||||
enabledServices: this.enabledServices || ['mongodb', 'minio', 'elasticsearch'],
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Start all enabled services
|
||||
*/
|
||||
public async startAll(): Promise<void> {
|
||||
let first = true;
|
||||
if (this.isServiceEnabled('mongodb')) {
|
||||
if (!first) console.log();
|
||||
await this.startMongoDB();
|
||||
first = false;
|
||||
}
|
||||
if (this.isServiceEnabled('minio')) {
|
||||
if (!first) console.log();
|
||||
await this.startMinIO();
|
||||
first = false;
|
||||
}
|
||||
if (this.isServiceEnabled('elasticsearch')) {
|
||||
if (!first) console.log();
|
||||
await this.startElasticsearch();
|
||||
first = false;
|
||||
}
|
||||
|
||||
// Register with global registry
|
||||
await this.registerWithGlobalRegistry();
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop all enabled services
|
||||
*/
|
||||
public async stopAll(): Promise<void> {
|
||||
let first = true;
|
||||
if (this.isServiceEnabled('mongodb')) {
|
||||
if (!first) console.log();
|
||||
await this.stopMongoDB();
|
||||
first = false;
|
||||
}
|
||||
if (this.isServiceEnabled('minio')) {
|
||||
if (!first) console.log();
|
||||
await this.stopMinIO();
|
||||
first = false;
|
||||
}
|
||||
if (this.isServiceEnabled('elasticsearch')) {
|
||||
if (!first) console.log();
|
||||
await this.stopElasticsearch();
|
||||
first = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start MongoDB service
|
||||
*/
|
||||
@@ -42,7 +193,7 @@ export class ServiceManager {
|
||||
const directories = this.config.getDataDirectories();
|
||||
|
||||
// Ensure data directory exists
|
||||
await plugins.smartfile.fs.ensureDir(directories.mongo);
|
||||
await plugins.smartfs.directory(directories.mongo).recursive().create();
|
||||
|
||||
const status = await this.docker.getStatus(containers.mongo);
|
||||
|
||||
@@ -141,7 +292,7 @@ export class ServiceManager {
|
||||
const directories = this.config.getDataDirectories();
|
||||
|
||||
// Ensure data directory exists
|
||||
await plugins.smartfile.fs.ensureDir(directories.minio);
|
||||
await plugins.smartfs.directory(directories.minio).recursive().create();
|
||||
|
||||
const status = await this.docker.getStatus(containers.minio);
|
||||
|
||||
@@ -259,7 +410,103 @@ export class ServiceManager {
|
||||
logger.log('info', ` API: http://${config.S3_HOST}:${config.S3_PORT}`);
|
||||
logger.log('info', ` Console: http://${config.S3_HOST}:${config.S3_CONSOLE_PORT} (login: ${config.S3_ACCESSKEY}/***)`);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Start Elasticsearch service
|
||||
*/
|
||||
public async startElasticsearch(): Promise<void> {
|
||||
logger.log('note', '📦 Elasticsearch:');
|
||||
|
||||
const config = this.config.getConfig();
|
||||
const containers = this.config.getContainerNames();
|
||||
const directories = this.config.getDataDirectories();
|
||||
|
||||
// Ensure data directory exists
|
||||
await plugins.smartfs.directory(directories.elasticsearch).recursive().create();
|
||||
|
||||
const status = await this.docker.getStatus(containers.elasticsearch);
|
||||
|
||||
switch (status) {
|
||||
case 'running':
|
||||
logger.log('ok', ' Already running ✓');
|
||||
break;
|
||||
|
||||
case 'stopped':
|
||||
// Check if port mapping matches config
|
||||
const esPortMappings = await this.docker.getPortMappings(containers.elasticsearch);
|
||||
if (esPortMappings && esPortMappings['9200'] !== config.ELASTICSEARCH_PORT) {
|
||||
logger.log('note', ' Port configuration changed, recreating container...');
|
||||
await this.docker.remove(containers.elasticsearch, true);
|
||||
// Fall through to create new container
|
||||
const success = await this.docker.run({
|
||||
name: containers.elasticsearch,
|
||||
image: 'elasticsearch:8.11.0',
|
||||
ports: {
|
||||
[`0.0.0.0:${config.ELASTICSEARCH_PORT}`]: '9200'
|
||||
},
|
||||
volumes: {
|
||||
[directories.elasticsearch]: '/usr/share/elasticsearch/data'
|
||||
},
|
||||
environment: {
|
||||
'discovery.type': 'single-node',
|
||||
'xpack.security.enabled': 'true',
|
||||
'ELASTIC_PASSWORD': config.ELASTICSEARCH_PASS,
|
||||
'ES_JAVA_OPTS': '-Xms512m -Xmx512m'
|
||||
},
|
||||
restart: 'unless-stopped'
|
||||
});
|
||||
|
||||
if (success) {
|
||||
logger.log('ok', ' Recreated with new port ✓');
|
||||
} else {
|
||||
logger.log('error', ' Failed to recreate container');
|
||||
}
|
||||
} else {
|
||||
// Ports match, just start the container
|
||||
if (await this.docker.start(containers.elasticsearch)) {
|
||||
logger.log('ok', ' Started ✓');
|
||||
} else {
|
||||
logger.log('error', ' Failed to start');
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case 'not_exists':
|
||||
logger.log('note', ' Creating container...');
|
||||
|
||||
const success = await this.docker.run({
|
||||
name: containers.elasticsearch,
|
||||
image: 'elasticsearch:8.11.0',
|
||||
ports: {
|
||||
[`0.0.0.0:${config.ELASTICSEARCH_PORT}`]: '9200'
|
||||
},
|
||||
volumes: {
|
||||
[directories.elasticsearch]: '/usr/share/elasticsearch/data'
|
||||
},
|
||||
environment: {
|
||||
'discovery.type': 'single-node',
|
||||
'xpack.security.enabled': 'true',
|
||||
'ELASTIC_PASSWORD': config.ELASTICSEARCH_PASS,
|
||||
'ES_JAVA_OPTS': '-Xms512m -Xmx512m'
|
||||
},
|
||||
restart: 'unless-stopped'
|
||||
});
|
||||
|
||||
if (success) {
|
||||
logger.log('ok', ' Created and started ✓');
|
||||
} else {
|
||||
logger.log('error', ' Failed to create container');
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
logger.log('info', ` Container: ${containers.elasticsearch}`);
|
||||
logger.log('info', ` Port: ${config.ELASTICSEARCH_PORT}`);
|
||||
logger.log('info', ` Connection: ${config.ELASTICSEARCH_URL}`);
|
||||
logger.log('info', ` Username: ${config.ELASTICSEARCH_USER}`);
|
||||
logger.log('info', ` Password: ${config.ELASTICSEARCH_PASS}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop MongoDB service
|
||||
*/
|
||||
@@ -285,10 +532,10 @@ export class ServiceManager {
|
||||
*/
|
||||
public async stopMinIO(): Promise<void> {
|
||||
logger.log('note', '📦 S3/MinIO:');
|
||||
|
||||
|
||||
const containers = this.config.getContainerNames();
|
||||
const status = await this.docker.getStatus(containers.minio);
|
||||
|
||||
|
||||
if (status === 'running') {
|
||||
if (await this.docker.stop(containers.minio)) {
|
||||
logger.log('ok', ' Stopped ✓');
|
||||
@@ -299,7 +546,27 @@ export class ServiceManager {
|
||||
logger.log('note', ' Not running');
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Stop Elasticsearch service
|
||||
*/
|
||||
public async stopElasticsearch(): Promise<void> {
|
||||
logger.log('note', '📦 Elasticsearch:');
|
||||
|
||||
const containers = this.config.getContainerNames();
|
||||
const status = await this.docker.getStatus(containers.elasticsearch);
|
||||
|
||||
if (status === 'running') {
|
||||
if (await this.docker.stop(containers.elasticsearch)) {
|
||||
logger.log('ok', ' Stopped ✓');
|
||||
} else {
|
||||
logger.log('error', ' Failed to stop');
|
||||
}
|
||||
} else {
|
||||
logger.log('note', ' Not running');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Show service status
|
||||
*/
|
||||
@@ -385,8 +652,36 @@ export class ServiceManager {
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// Elasticsearch status
|
||||
const esStatus = await this.docker.getStatus(containers.elasticsearch);
|
||||
switch (esStatus) {
|
||||
case 'running':
|
||||
logger.log('ok', '📦 Elasticsearch: 🟢 Running');
|
||||
logger.log('info', ` ├─ Container: ${containers.elasticsearch}`);
|
||||
logger.log('info', ` ├─ Port: ${config.ELASTICSEARCH_PORT}`);
|
||||
logger.log('info', ` ├─ Connection: ${config.ELASTICSEARCH_URL}`);
|
||||
logger.log('info', ` └─ Credentials: ${config.ELASTICSEARCH_USER}/${config.ELASTICSEARCH_PASS}`);
|
||||
break;
|
||||
case 'stopped':
|
||||
logger.log('note', '📦 Elasticsearch: 🟡 Stopped');
|
||||
logger.log('info', ` ├─ Container: ${containers.elasticsearch}`);
|
||||
logger.log('info', ` └─ Port: ${config.ELASTICSEARCH_PORT}`);
|
||||
break;
|
||||
case 'not_exists':
|
||||
logger.log('info', '📦 Elasticsearch: ⚪ Not installed');
|
||||
// Check port availability
|
||||
const esPort = parseInt(config.ELASTICSEARCH_PORT);
|
||||
const esAvailable = await helpers.isPortAvailable(esPort);
|
||||
if (!esAvailable) {
|
||||
logger.log('error', ` └─ ⚠️ Port ${esPort} is in use by another process`);
|
||||
} else {
|
||||
logger.log('info', ` └─ Port ${esPort} is available`);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Show configuration
|
||||
*/
|
||||
@@ -420,6 +715,15 @@ export class ServiceManager {
|
||||
logger.log('info', ` Data: ${this.config.getDataDirectories().minio}`);
|
||||
logger.log('info', ` Endpoint: ${config.S3_ENDPOINT}`);
|
||||
logger.log('info', ` Console URL: http://${config.S3_HOST}:${config.S3_CONSOLE_PORT}`);
|
||||
|
||||
console.log();
|
||||
logger.log('note', 'Elasticsearch:');
|
||||
logger.log('info', ` Host: ${config.ELASTICSEARCH_HOST}:${config.ELASTICSEARCH_PORT}`);
|
||||
logger.log('info', ` User: ${config.ELASTICSEARCH_USER}`);
|
||||
logger.log('info', ' Password: ***');
|
||||
logger.log('info', ` Container: ${this.config.getContainerNames().elasticsearch}`);
|
||||
logger.log('info', ` Data: ${this.config.getDataDirectories().elasticsearch}`);
|
||||
logger.log('info', ` Connection: ${config.ELASTICSEARCH_URL}`);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -477,16 +781,29 @@ export class ServiceManager {
|
||||
logger.log('note', 'S3/MinIO container is not running');
|
||||
}
|
||||
break;
|
||||
|
||||
|
||||
case 'elasticsearch':
|
||||
case 'es':
|
||||
if (await this.docker.isRunning(containers.elasticsearch)) {
|
||||
helpers.printHeader(`Elasticsearch Logs (last ${lines} lines)`);
|
||||
const logs = await this.docker.logs(containers.elasticsearch, lines);
|
||||
console.log(logs);
|
||||
} else {
|
||||
logger.log('note', 'Elasticsearch container is not running');
|
||||
}
|
||||
break;
|
||||
|
||||
case 'all':
|
||||
case '':
|
||||
await this.showLogs('mongo', lines);
|
||||
console.log();
|
||||
await this.showLogs('minio', lines);
|
||||
console.log();
|
||||
await this.showLogs('elasticsearch', lines);
|
||||
break;
|
||||
|
||||
|
||||
default:
|
||||
logger.log('note', 'Usage: gitzone services logs [mongo|s3|all] [lines]');
|
||||
logger.log('note', 'Usage: gitzone services logs [mongo|s3|elasticsearch|all] [lines]');
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -497,24 +814,40 @@ export class ServiceManager {
|
||||
public async removeContainers(): Promise<void> {
|
||||
const containers = this.config.getContainerNames();
|
||||
let removed = false;
|
||||
|
||||
|
||||
if (await this.docker.exists(containers.mongo)) {
|
||||
if (await this.docker.remove(containers.mongo, true)) {
|
||||
logger.log('ok', ' MongoDB container removed ✓');
|
||||
removed = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (await this.docker.exists(containers.minio)) {
|
||||
if (await this.docker.remove(containers.minio, true)) {
|
||||
logger.log('ok', ' S3/MinIO container removed ✓');
|
||||
removed = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (await this.docker.exists(containers.elasticsearch)) {
|
||||
if (await this.docker.remove(containers.elasticsearch, true)) {
|
||||
logger.log('ok', ' Elasticsearch container removed ✓');
|
||||
removed = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!removed) {
|
||||
logger.log('note', ' No containers to remove');
|
||||
}
|
||||
|
||||
// Check if all containers are gone, then unregister from global registry
|
||||
const mongoExists = await this.docker.exists(containers.mongo);
|
||||
const minioExists = await this.docker.exists(containers.minio);
|
||||
const esExists = await this.docker.exists(containers.elasticsearch);
|
||||
|
||||
if (!mongoExists && !minioExists && !esExists) {
|
||||
await this.globalRegistry.unregisterProject(process.cwd());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -523,24 +856,60 @@ export class ServiceManager {
|
||||
public async cleanData(): Promise<void> {
|
||||
const directories = this.config.getDataDirectories();
|
||||
let cleaned = false;
|
||||
|
||||
if (await plugins.smartfile.fs.fileExists(directories.mongo)) {
|
||||
await plugins.smartfile.fs.remove(directories.mongo);
|
||||
|
||||
if (await plugins.smartfs.directory(directories.mongo).exists()) {
|
||||
await plugins.smartfs.directory(directories.mongo).recursive().delete();
|
||||
logger.log('ok', ' MongoDB data removed ✓');
|
||||
cleaned = true;
|
||||
}
|
||||
|
||||
if (await plugins.smartfile.fs.fileExists(directories.minio)) {
|
||||
await plugins.smartfile.fs.remove(directories.minio);
|
||||
|
||||
if (await plugins.smartfs.directory(directories.minio).exists()) {
|
||||
await plugins.smartfs.directory(directories.minio).recursive().delete();
|
||||
logger.log('ok', ' S3/MinIO data removed ✓');
|
||||
cleaned = true;
|
||||
}
|
||||
|
||||
|
||||
if (await plugins.smartfs.directory(directories.elasticsearch).exists()) {
|
||||
await plugins.smartfs.directory(directories.elasticsearch).recursive().delete();
|
||||
logger.log('ok', ' Elasticsearch data removed ✓');
|
||||
cleaned = true;
|
||||
}
|
||||
|
||||
if (!cleaned) {
|
||||
logger.log('note', ' No data to clean');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure which services are enabled
|
||||
*/
|
||||
public async configureServices(): Promise<void> {
|
||||
logger.log('note', 'Select which services to enable for this project:');
|
||||
console.log();
|
||||
|
||||
const currentServices = this.enabledServices || ['mongodb', 'minio', 'elasticsearch'];
|
||||
|
||||
const smartinteract = new plugins.smartinteract.SmartInteract();
|
||||
const response = await smartinteract.askQuestion({
|
||||
name: 'services',
|
||||
type: 'checkbox',
|
||||
message: 'Which services do you want to enable?',
|
||||
choices: [
|
||||
{ name: 'MongoDB', value: 'mongodb' },
|
||||
{ name: 'MinIO (S3)', value: 'minio' },
|
||||
{ name: 'Elasticsearch', value: 'elasticsearch' }
|
||||
],
|
||||
default: currentServices
|
||||
});
|
||||
|
||||
this.enabledServices = response.value || ['mongodb', 'minio', 'elasticsearch'];
|
||||
|
||||
// Save to npmextra.json
|
||||
await this.saveServiceConfiguration(this.enabledServices);
|
||||
|
||||
logger.log('ok', '✅ Service configuration updated');
|
||||
}
|
||||
|
||||
/**
|
||||
* Reconfigure services with new ports
|
||||
*/
|
||||
@@ -551,20 +920,25 @@ export class ServiceManager {
|
||||
|
||||
// Stop existing containers
|
||||
logger.log('note', '🛑 Stopping existing containers...');
|
||||
|
||||
|
||||
if (await this.docker.exists(containers.mongo)) {
|
||||
await this.docker.stop(containers.mongo);
|
||||
logger.log('ok', ' MongoDB stopped ✓');
|
||||
}
|
||||
|
||||
|
||||
if (await this.docker.exists(containers.minio)) {
|
||||
await this.docker.stop(containers.minio);
|
||||
logger.log('ok', ' S3/MinIO stopped ✓');
|
||||
}
|
||||
|
||||
|
||||
if (await this.docker.exists(containers.elasticsearch)) {
|
||||
await this.docker.stop(containers.elasticsearch);
|
||||
logger.log('ok', ' Elasticsearch stopped ✓');
|
||||
}
|
||||
|
||||
// Reconfigure ports
|
||||
await this.config.reconfigurePorts();
|
||||
|
||||
|
||||
// Ask if user wants to restart services
|
||||
const smartinteract = new plugins.smartinteract.SmartInteract();
|
||||
const response = await smartinteract.askQuestion({
|
||||
@@ -573,11 +947,10 @@ export class ServiceManager {
|
||||
message: 'Do you want to start services with new ports?',
|
||||
default: true
|
||||
});
|
||||
|
||||
|
||||
if (response.value) {
|
||||
console.log();
|
||||
await this.startMongoDB();
|
||||
await this.startMinIO();
|
||||
await this.startAll();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,11 +42,15 @@ export const getRandomAvailablePort = async (): Promise<number> => {
|
||||
/**
|
||||
* Get the project name from package.json or directory
|
||||
*/
|
||||
export const getProjectName = (): string => {
|
||||
export const getProjectName = async (): Promise<string> => {
|
||||
try {
|
||||
const packageJsonPath = plugins.path.join(process.cwd(), 'package.json');
|
||||
if (plugins.smartfile.fs.fileExistsSync(packageJsonPath)) {
|
||||
const packageJson = plugins.smartfile.fs.toObjectSync(packageJsonPath);
|
||||
if (await plugins.smartfs.file(packageJsonPath).exists()) {
|
||||
const content = (await plugins.smartfs
|
||||
.file(packageJsonPath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const packageJson = JSON.parse(content);
|
||||
if (packageJson.name) {
|
||||
// Sanitize: @fin.cx/skr → fin-cx-skr
|
||||
return packageJson.name.replace(/@/g, '').replace(/[\/\.]/g, '-');
|
||||
@@ -55,7 +59,7 @@ export const getProjectName = (): string => {
|
||||
} catch (error) {
|
||||
// Ignore errors and fall back to directory name
|
||||
}
|
||||
|
||||
|
||||
return plugins.path.basename(process.cwd());
|
||||
};
|
||||
|
||||
|
||||
@@ -1,15 +1,25 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import * as helpers from './helpers.js';
|
||||
import { ServiceManager } from './classes.servicemanager.js';
|
||||
import { GlobalRegistry } from './classes.globalregistry.js';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
|
||||
export const run = async (argvArg: any) => {
|
||||
const isGlobal = argvArg.g || argvArg.global;
|
||||
const command = argvArg._[1] || 'help';
|
||||
|
||||
// Handle global commands first
|
||||
if (isGlobal) {
|
||||
await handleGlobalCommand(command);
|
||||
return;
|
||||
}
|
||||
|
||||
// Local project commands
|
||||
const serviceManager = new ServiceManager();
|
||||
await serviceManager.init();
|
||||
|
||||
const command = argvArg._[1] || 'help';
|
||||
|
||||
const service = argvArg._[2] || 'all';
|
||||
|
||||
|
||||
switch (command) {
|
||||
case 'start':
|
||||
await handleStart(serviceManager, service);
|
||||
@@ -28,9 +38,13 @@ export const run = async (argvArg: any) => {
|
||||
break;
|
||||
|
||||
case 'config':
|
||||
await serviceManager.showConfig();
|
||||
if (service === 'services' || argvArg._[2] === 'services') {
|
||||
await handleConfigureServices(serviceManager);
|
||||
} else {
|
||||
await serviceManager.showConfig();
|
||||
}
|
||||
break;
|
||||
|
||||
|
||||
case 'compass':
|
||||
await serviceManager.showCompassConnection();
|
||||
break;
|
||||
@@ -61,63 +75,69 @@ export const run = async (argvArg: any) => {
|
||||
|
||||
async function handleStart(serviceManager: ServiceManager, service: string) {
|
||||
helpers.printHeader('Starting Services');
|
||||
|
||||
|
||||
switch (service) {
|
||||
case 'mongo':
|
||||
case 'mongodb':
|
||||
await serviceManager.startMongoDB();
|
||||
break;
|
||||
|
||||
|
||||
case 'minio':
|
||||
case 's3':
|
||||
await serviceManager.startMinIO();
|
||||
break;
|
||||
|
||||
|
||||
case 'elasticsearch':
|
||||
case 'es':
|
||||
await serviceManager.startElasticsearch();
|
||||
break;
|
||||
|
||||
case 'all':
|
||||
case '':
|
||||
await serviceManager.startMongoDB();
|
||||
console.log();
|
||||
await serviceManager.startMinIO();
|
||||
await serviceManager.startAll();
|
||||
break;
|
||||
|
||||
|
||||
default:
|
||||
logger.log('error', `Unknown service: ${service}`);
|
||||
logger.log('note', 'Use: mongo, s3, or all');
|
||||
logger.log('note', 'Use: mongo, s3, elasticsearch, or all');
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
async function handleStop(serviceManager: ServiceManager, service: string) {
|
||||
helpers.printHeader('Stopping Services');
|
||||
|
||||
|
||||
switch (service) {
|
||||
case 'mongo':
|
||||
case 'mongodb':
|
||||
await serviceManager.stopMongoDB();
|
||||
break;
|
||||
|
||||
|
||||
case 'minio':
|
||||
case 's3':
|
||||
await serviceManager.stopMinIO();
|
||||
break;
|
||||
|
||||
|
||||
case 'elasticsearch':
|
||||
case 'es':
|
||||
await serviceManager.stopElasticsearch();
|
||||
break;
|
||||
|
||||
case 'all':
|
||||
case '':
|
||||
await serviceManager.stopMongoDB();
|
||||
console.log();
|
||||
await serviceManager.stopMinIO();
|
||||
await serviceManager.stopAll();
|
||||
break;
|
||||
|
||||
|
||||
default:
|
||||
logger.log('error', `Unknown service: ${service}`);
|
||||
logger.log('note', 'Use: mongo, s3, or all');
|
||||
logger.log('note', 'Use: mongo, s3, elasticsearch, or all');
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
async function handleRestart(serviceManager: ServiceManager, service: string) {
|
||||
helpers.printHeader('Restarting Services');
|
||||
|
||||
|
||||
switch (service) {
|
||||
case 'mongo':
|
||||
case 'mongodb':
|
||||
@@ -125,24 +145,28 @@ async function handleRestart(serviceManager: ServiceManager, service: string) {
|
||||
await plugins.smartdelay.delayFor(2000);
|
||||
await serviceManager.startMongoDB();
|
||||
break;
|
||||
|
||||
|
||||
case 'minio':
|
||||
case 's3':
|
||||
await serviceManager.stopMinIO();
|
||||
await plugins.smartdelay.delayFor(2000);
|
||||
await serviceManager.startMinIO();
|
||||
break;
|
||||
|
||||
|
||||
case 'elasticsearch':
|
||||
case 'es':
|
||||
await serviceManager.stopElasticsearch();
|
||||
await plugins.smartdelay.delayFor(2000);
|
||||
await serviceManager.startElasticsearch();
|
||||
break;
|
||||
|
||||
case 'all':
|
||||
case '':
|
||||
await serviceManager.stopMongoDB();
|
||||
await serviceManager.stopMinIO();
|
||||
await serviceManager.stopAll();
|
||||
await plugins.smartdelay.delayFor(2000);
|
||||
await serviceManager.startMongoDB();
|
||||
console.log();
|
||||
await serviceManager.startMinIO();
|
||||
await serviceManager.startAll();
|
||||
break;
|
||||
|
||||
|
||||
default:
|
||||
logger.log('error', `Unknown service: ${service}`);
|
||||
break;
|
||||
@@ -166,7 +190,7 @@ async function handleClean(serviceManager: ServiceManager) {
|
||||
helpers.printHeader('Clean All');
|
||||
logger.log('error', '⚠️ WARNING: This will remove all containers and data!');
|
||||
logger.log('error', 'This action cannot be undone!');
|
||||
|
||||
|
||||
const smartinteraction = new plugins.smartinteract.SmartInteract();
|
||||
const confirmAnswer = await smartinteraction.askQuestion({
|
||||
name: 'confirm',
|
||||
@@ -174,7 +198,7 @@ async function handleClean(serviceManager: ServiceManager) {
|
||||
message: 'Type "yes" to confirm:',
|
||||
default: 'no'
|
||||
});
|
||||
|
||||
|
||||
if (confirmAnswer.value === 'yes') {
|
||||
await serviceManager.removeContainers();
|
||||
console.log();
|
||||
@@ -185,40 +209,225 @@ async function handleClean(serviceManager: ServiceManager) {
|
||||
}
|
||||
}
|
||||
|
||||
async function handleConfigureServices(serviceManager: ServiceManager) {
|
||||
helpers.printHeader('Configure Services');
|
||||
await serviceManager.configureServices();
|
||||
}
|
||||
|
||||
function showHelp() {
|
||||
helpers.printHeader('GitZone Services Manager');
|
||||
|
||||
|
||||
logger.log('ok', 'Usage: gitzone services [command] [options]');
|
||||
console.log();
|
||||
|
||||
|
||||
logger.log('note', 'Commands:');
|
||||
logger.log('info', ' start [service] Start services (mongo|s3|all)');
|
||||
logger.log('info', ' stop [service] Stop services (mongo|s3|all)');
|
||||
logger.log('info', ' restart [service] Restart services (mongo|s3|all)');
|
||||
logger.log('info', ' start [service] Start services (mongo|s3|elasticsearch|all)');
|
||||
logger.log('info', ' stop [service] Stop services (mongo|s3|elasticsearch|all)');
|
||||
logger.log('info', ' restart [service] Restart services (mongo|s3|elasticsearch|all)');
|
||||
logger.log('info', ' status Show service status');
|
||||
logger.log('info', ' config Show current configuration');
|
||||
logger.log('info', ' config services Configure which services are enabled');
|
||||
logger.log('info', ' compass Show MongoDB Compass connection string');
|
||||
logger.log('info', ' logs [service] Show logs (mongo|s3|all) [lines]');
|
||||
logger.log('info', ' logs [service] Show logs (mongo|s3|elasticsearch|all) [lines]');
|
||||
logger.log('info', ' reconfigure Reassign ports and restart services');
|
||||
logger.log('info', ' remove Remove all containers');
|
||||
logger.log('info', ' clean Remove all containers and data ⚠️');
|
||||
logger.log('info', ' help Show this help message');
|
||||
console.log();
|
||||
|
||||
|
||||
logger.log('note', 'Available Services:');
|
||||
logger.log('info', ' • MongoDB (mongo) - Document database');
|
||||
logger.log('info', ' • MinIO (s3) - S3-compatible object storage');
|
||||
logger.log('info', ' • Elasticsearch (elasticsearch) - Search and analytics engine');
|
||||
console.log();
|
||||
|
||||
logger.log('note', 'Features:');
|
||||
logger.log('info', ' • Auto-creates .nogit/env.json with smart defaults');
|
||||
logger.log('info', ' • Random ports (20000-30000) to avoid conflicts');
|
||||
logger.log('info', ' • Random ports (20000-30000) for MongoDB/MinIO to avoid conflicts');
|
||||
logger.log('info', ' • Elasticsearch uses standard port 9200');
|
||||
logger.log('info', ' • Project-specific containers for multi-project support');
|
||||
logger.log('info', ' • Preserves custom configuration values');
|
||||
logger.log('info', ' • MongoDB Compass connection support');
|
||||
console.log();
|
||||
|
||||
|
||||
logger.log('note', 'Examples:');
|
||||
logger.log('info', ' gitzone services start # Start all services');
|
||||
logger.log('info', ' gitzone services start mongo # Start only MongoDB');
|
||||
logger.log('info', ' gitzone services stop # Stop all services');
|
||||
logger.log('info', ' gitzone services status # Check service status');
|
||||
logger.log('info', ' gitzone services config # Show configuration');
|
||||
logger.log('info', ' gitzone services compass # Get MongoDB Compass connection');
|
||||
logger.log('info', ' gitzone services logs mongo 50 # Show last 50 lines of MongoDB logs');
|
||||
logger.log('info', ' gitzone services start # Start all services');
|
||||
logger.log('info', ' gitzone services start mongo # Start only MongoDB');
|
||||
logger.log('info', ' gitzone services start elasticsearch # Start only Elasticsearch');
|
||||
logger.log('info', ' gitzone services stop # Stop all services');
|
||||
logger.log('info', ' gitzone services status # Check service status');
|
||||
logger.log('info', ' gitzone services config # Show configuration');
|
||||
logger.log('info', ' gitzone services compass # Get MongoDB Compass connection');
|
||||
logger.log('info', ' gitzone services logs elasticsearch # Show Elasticsearch logs');
|
||||
console.log();
|
||||
|
||||
logger.log('note', 'Global Commands (-g/--global):');
|
||||
logger.log('info', ' list -g List all registered projects');
|
||||
logger.log('info', ' status -g Show status across all projects');
|
||||
logger.log('info', ' stop -g Stop all containers across all projects');
|
||||
logger.log('info', ' cleanup -g Remove stale registry entries');
|
||||
console.log();
|
||||
|
||||
logger.log('note', 'Global Examples:');
|
||||
logger.log('info', ' gitzone services list -g # List all registered projects');
|
||||
logger.log('info', ' gitzone services status -g # Show global container status');
|
||||
logger.log('info', ' gitzone services stop -g # Stop all (prompts for confirmation)');
|
||||
}
|
||||
|
||||
// ==================== Global Command Handlers ====================
|
||||
|
||||
async function handleGlobalCommand(command: string) {
|
||||
const globalRegistry = GlobalRegistry.getInstance();
|
||||
|
||||
switch (command) {
|
||||
case 'list':
|
||||
await handleGlobalList(globalRegistry);
|
||||
break;
|
||||
|
||||
case 'status':
|
||||
await handleGlobalStatus(globalRegistry);
|
||||
break;
|
||||
|
||||
case 'stop':
|
||||
await handleGlobalStop(globalRegistry);
|
||||
break;
|
||||
|
||||
case 'cleanup':
|
||||
await handleGlobalCleanup(globalRegistry);
|
||||
break;
|
||||
|
||||
case 'help':
|
||||
default:
|
||||
showHelp();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
async function handleGlobalList(globalRegistry: GlobalRegistry) {
|
||||
helpers.printHeader('Registered Projects (Global)');
|
||||
|
||||
const projects = await globalRegistry.getAllProjects();
|
||||
const projectPaths = Object.keys(projects);
|
||||
|
||||
if (projectPaths.length === 0) {
|
||||
logger.log('note', 'No projects registered');
|
||||
return;
|
||||
}
|
||||
|
||||
for (const path of projectPaths) {
|
||||
const project = projects[path];
|
||||
const lastActive = new Date(project.lastActive).toLocaleString();
|
||||
|
||||
console.log();
|
||||
logger.log('ok', `📁 ${project.projectName}`);
|
||||
logger.log('info', ` Path: ${project.projectPath}`);
|
||||
logger.log('info', ` Services: ${project.enabledServices.join(', ')}`);
|
||||
logger.log('info', ` Last Active: ${lastActive}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function handleGlobalStatus(globalRegistry: GlobalRegistry) {
|
||||
helpers.printHeader('Global Service Status');
|
||||
|
||||
const statuses = await globalRegistry.getGlobalStatus();
|
||||
|
||||
if (statuses.length === 0) {
|
||||
logger.log('note', 'No projects registered');
|
||||
return;
|
||||
}
|
||||
|
||||
let runningCount = 0;
|
||||
let totalContainers = 0;
|
||||
|
||||
for (const project of statuses) {
|
||||
console.log();
|
||||
logger.log('ok', `📁 ${project.projectName}`);
|
||||
logger.log('info', ` Path: ${project.projectPath}`);
|
||||
|
||||
if (project.containers.length === 0) {
|
||||
logger.log('note', ' No containers configured');
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const container of project.containers) {
|
||||
totalContainers++;
|
||||
const statusIcon = container.status === 'running' ? '🟢' : container.status === 'exited' ? '🟡' : '⚪';
|
||||
if (container.status === 'running') runningCount++;
|
||||
logger.log('info', ` ${statusIcon} ${container.name}: ${container.status}`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log();
|
||||
logger.log('note', `Summary: ${runningCount}/${totalContainers} containers running across ${statuses.length} project(s)`);
|
||||
}
|
||||
|
||||
async function handleGlobalStop(globalRegistry: GlobalRegistry) {
|
||||
helpers.printHeader('Stop All Containers (Global)');
|
||||
|
||||
const statuses = await globalRegistry.getGlobalStatus();
|
||||
|
||||
// Count running containers
|
||||
let runningCount = 0;
|
||||
for (const project of statuses) {
|
||||
for (const container of project.containers) {
|
||||
if (container.status === 'running') runningCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (runningCount === 0) {
|
||||
logger.log('note', 'No running containers found');
|
||||
return;
|
||||
}
|
||||
|
||||
logger.log('note', `Found ${runningCount} running container(s) across ${statuses.length} project(s)`);
|
||||
console.log();
|
||||
|
||||
// Show what will be stopped
|
||||
for (const project of statuses) {
|
||||
const runningContainers = project.containers.filter(c => c.status === 'running');
|
||||
if (runningContainers.length > 0) {
|
||||
logger.log('info', `${project.projectName}:`);
|
||||
for (const container of runningContainers) {
|
||||
logger.log('info', ` • ${container.name}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log();
|
||||
const shouldContinue = await plugins.smartinteract.SmartInteract.getCliConfirmation(
|
||||
'Stop all containers?',
|
||||
false
|
||||
);
|
||||
|
||||
if (!shouldContinue) {
|
||||
logger.log('note', 'Cancelled');
|
||||
return;
|
||||
}
|
||||
|
||||
logger.log('note', 'Stopping all containers...');
|
||||
const result = await globalRegistry.stopAll();
|
||||
|
||||
if (result.stopped.length > 0) {
|
||||
logger.log('ok', `Stopped: ${result.stopped.join(', ')}`);
|
||||
}
|
||||
if (result.failed.length > 0) {
|
||||
logger.log('error', `Failed to stop: ${result.failed.join(', ')}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function handleGlobalCleanup(globalRegistry: GlobalRegistry) {
|
||||
helpers.printHeader('Cleanup Registry (Global)');
|
||||
|
||||
logger.log('note', 'Checking for stale registry entries...');
|
||||
const removed = await globalRegistry.cleanup();
|
||||
|
||||
if (removed.length === 0) {
|
||||
logger.log('ok', 'No stale entries found');
|
||||
return;
|
||||
}
|
||||
|
||||
logger.log('ok', `Removed ${removed.length} stale entr${removed.length === 1 ? 'y' : 'ies'}:`);
|
||||
for (const path of removed) {
|
||||
logger.log('info', ` • ${path}`);
|
||||
}
|
||||
}
|
||||
@@ -6,23 +6,36 @@ import * as paths from '../paths.js';
|
||||
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
|
||||
export let run = () => {
|
||||
export let run = async () => {
|
||||
const done = plugins.smartpromise.defer();
|
||||
logger.log('warn', 'no action specified');
|
||||
|
||||
const dirEntries = await plugins.smartfs.directory(paths.templatesDir).list();
|
||||
const templates: string[] = [];
|
||||
for (const entry of dirEntries) {
|
||||
try {
|
||||
const stats = await plugins.smartfs
|
||||
.file(plugins.path.join(paths.templatesDir, entry.path))
|
||||
.stat();
|
||||
if (stats.isDirectory) {
|
||||
templates.push(entry.name);
|
||||
}
|
||||
} catch {
|
||||
// Skip entries that can't be accessed
|
||||
}
|
||||
}
|
||||
|
||||
let projects = `\n`;
|
||||
for (const template of templates) {
|
||||
projects += ` - ${template}\n`;
|
||||
}
|
||||
|
||||
logger.log(
|
||||
'info',
|
||||
`
|
||||
You can do one of the following things:
|
||||
* create a new project with 'gitzone template [template]'
|
||||
the following templates exist: ${(() => {
|
||||
let projects = `\n`;
|
||||
for (const template of plugins.smartfile.fs.listFoldersSync(
|
||||
paths.templatesDir,
|
||||
)) {
|
||||
projects += ` - ${template}\n`;
|
||||
}
|
||||
return projects;
|
||||
})()}
|
||||
the following templates exist: ${projects}
|
||||
* format a project with 'gitzone format'
|
||||
`,
|
||||
);
|
||||
|
||||
@@ -11,7 +11,7 @@ export const getTemplatePath = (templateNameArg: string) => {
|
||||
* receives a template name and returns wether there is a corresponding template
|
||||
*/
|
||||
export const isTemplate = async (templateNameArg: string) => {
|
||||
return plugins.smartfile.fs.isDirectory(getTemplatePath(templateNameArg));
|
||||
return plugins.smartfs.directory(getTemplatePath(templateNameArg)).exists();
|
||||
};
|
||||
|
||||
export const getTemplate = async (templateNameArg: string) => {
|
||||
|
||||
@@ -10,9 +10,13 @@ import * as smartupdate from '@push.rocks/smartupdate';
|
||||
import * as smartshell from '@push.rocks/smartshell';
|
||||
import * as smartnetwork from '@push.rocks/smartnetwork';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import { SmartFs, SmartFsProviderNode } from '@push.rocks/smartfs';
|
||||
import * as smartinteract from '@push.rocks/smartinteract';
|
||||
import * as smartdelay from '@push.rocks/smartdelay';
|
||||
|
||||
// Create smartfs instance for filesystem operations
|
||||
export const smartfs = new SmartFs(new SmartFsProviderNode());
|
||||
|
||||
export {
|
||||
smartlog,
|
||||
smartlogDestinationLocal,
|
||||
|
||||
Reference in New Issue
Block a user