Compare commits
38 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 27f2d265de | |||
| af3e15e922 | |||
| b44624f2e7 | |||
| 847e679e92 | |||
| ddf5023ecb | |||
| e1d28bc10a | |||
| 2f3d67f9e3 | |||
| 6304953234 | |||
| 8d84620bc4 | |||
| efd6f04e63 | |||
| 97ce9db28e | |||
| 362b4c106e | |||
| 3efe385952 | |||
| f6886f172d | |||
| 81d6273346 | |||
| 7e6cf5f046 | |||
| 89cf7dca04 | |||
| 9639a64437 | |||
| 48305ebb6a | |||
| 485c0a3855 | |||
| adc828d9bb | |||
| fff1d39338 | |||
| 5afbe6ccbc | |||
| 9de17a428d | |||
| c9985102c3 | |||
| 73f98c1c3f | |||
| ae93e6f146 | |||
| 2abaeee500 | |||
| 0538ba2586 | |||
| a451779724 | |||
| cd3246d659 | |||
| d37ffd7177 | |||
| a69b613087 | |||
| 1ea186d233 | |||
| f5e7d43cf3 | |||
| d80faa044a | |||
| 64062e5c43 | |||
| bd22844280 |
9
assets/templates/multienv/deno.json
Normal file
9
assets/templates/multienv/deno.json
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"experimentalDecorators": true,
|
||||||
|
"lib": ["ES2022", "DOM"],
|
||||||
|
"target": "ES2022",
|
||||||
|
"checkJs": true
|
||||||
|
},
|
||||||
|
"nodeModulesDir": true
|
||||||
|
}
|
||||||
@@ -17,12 +17,10 @@ fileName: package.json
|
|||||||
"buildDocs": "(tsdoc)"
|
"buildDocs": "(tsdoc)"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.1.25",
|
"@git.zone/tsbuild": "^3.1.2",
|
||||||
"@git.zone/tsbundle": "^2.0.5",
|
"@git.zone/tsrun": "^2.0.0",
|
||||||
"@git.zone/tsrun": "^1.2.46",
|
"@git.zone/tstest": "^3.1.3",
|
||||||
"@git.zone/tstest": "^1.0.44",
|
"@types/node": "^24.10.1"
|
||||||
"@push.rocks/tapbundle": "^5.0.15",
|
|
||||||
"@types/node": "^20.8.7"
|
|
||||||
},
|
},
|
||||||
"dependencies": {}
|
"dependencies": {}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
import * as {{module.name}} from '../ts/index.js'
|
import * as {{module.name}} from '../ts/index.js'
|
||||||
|
|
||||||
tap.test('first test', async () => {
|
tap.test('first test', async () => {
|
||||||
console.log({{module.name}})
|
console.log({{module.name}})
|
||||||
})
|
})
|
||||||
|
|
||||||
tap.start()
|
export default tap.start()
|
||||||
|
|||||||
@@ -17,18 +17,18 @@ fileName: package.json
|
|||||||
"build": "(tsbuild --web --allowimplicitany)"
|
"build": "(tsbuild --web --allowimplicitany)"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.1.17",
|
"@git.zone/tsbuild": "^3.1.2",
|
||||||
"@git.zone/tsrun": "^1.2.8",
|
"@git.zone/tsrun": "^2.0.0",
|
||||||
"@git.zone/tstest": "^1.0.28",
|
"@git.zone/tstest": "^3.1.3",
|
||||||
"@git.zone/tswatch": "^2.0.1",
|
"@git.zone/tswatch": "^2.0.1",
|
||||||
"@push.rocks/tapbundle": "^5.5.4"
|
"@types/node": "^24.10.1"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@api.global/typedserver": "^3.0.53",
|
"@api.global/typedserver": "^3.0.53",
|
||||||
"@push.rocks/projectinfo": "^5.0.1",
|
"@push.rocks/projectinfo": "^5.0.2",
|
||||||
"@push.rocks/qenv": "^6.1.0",
|
"@push.rocks/qenv": "^6.1.0",
|
||||||
"@push.rocks/smartdata": "^5.0.7",
|
"@push.rocks/smartdata": "^5.0.7",
|
||||||
"@push.rocks/smartpath": "^5.0.5",
|
"@push.rocks/smartpath": "^6.0.0",
|
||||||
"@push.rocks/smartstate": "^2.0.0"
|
"@push.rocks/smartstate": "^2.0.0"
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,8 +1,5 @@
|
|||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"experimentalDecorators": true,
|
|
||||||
"emitDecoratorMetadata": true,
|
|
||||||
"useDefineForClassFields": false,
|
|
||||||
"target": "ES2022",
|
"target": "ES2022",
|
||||||
"module": "NodeNext",
|
"module": "NodeNext",
|
||||||
"moduleResolution": "NodeNext",
|
"moduleResolution": "NodeNext",
|
||||||
|
|||||||
127
changelog.md
127
changelog.md
@@ -1,5 +1,132 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 2025-12-02 - 2.2.0 - feat(services)
|
||||||
|
Improve services manager and configuration; switch test templates to @git.zone/tstest; bump dev dependencies and update docs
|
||||||
|
|
||||||
|
- services: Add robust ServiceConfiguration (creates .nogit/env.json with sane defaults, syncs ports from existing Docker containers, validates and can reconfigure ports)
|
||||||
|
- services CLI: improved start/stop/restart flows, better logging/help output and enhanced global commands (list/status/stop/cleanup)
|
||||||
|
- templates/tests: replace @push.rocks/tapbundle with @git.zone/tstest and update template test.ts to export default tap.start()
|
||||||
|
- format: stop auto-updating tslint template and mark @push.rocks/tapbundle as deprecated in package formatting logic
|
||||||
|
- dependencies: bump @git.zone/tsbuild, @git.zone/tsrun, @git.zone/tstest, @git.zone/tsdoc, @push.rocks/projectinfo, @push.rocks/smartpath, @push.rocks/smartfs, prettier and other dev deps
|
||||||
|
- docs: README updates — add issue reporting/security section, AI-powered commit recommendation notes, and clarify trademark/legal wording
|
||||||
|
|
||||||
|
## 2025-11-29 - 2.1.0 - feat(mod_services)
|
||||||
|
Add global service registry and global commands for managing project containers
|
||||||
|
|
||||||
|
- Introduce GlobalRegistry class to track registered projects, their containers, ports and last activity (ts/mod_services/classes.globalregistry.ts)
|
||||||
|
- Add global CLI mode for services (use -g/--global) with commands: list, status, stop, cleanup (ts/mod_services/index.ts)
|
||||||
|
- ServiceManager now registers the current project with the global registry when starting services and unregisters when all containers are removed (ts/mod_services/classes.servicemanager.ts)
|
||||||
|
- Global handlers to list projects, show aggregated status, stop containers across projects and cleanup stale entries
|
||||||
|
- Bump dependency @push.rocks/smartfile to ^13.1.0 in package.json
|
||||||
|
|
||||||
|
## 2025-11-27 - 2.0.0 - BREAKING CHANGE(core)
|
||||||
|
Migrate filesystem to smartfs (async) and add Elasticsearch service support; refactor format/commit/meta modules
|
||||||
|
|
||||||
|
- Replace @push.rocks/smartfile usage with @push.rocks/smartfs across the codebase; all filesystem operations are now async (SmartFs.file(...).read()/write(), SmartFs.directory(...).list()/create()/delete(), etc.)
|
||||||
|
- Convert formerly synchronous helpers and APIs to async (notable: detectProjectType, getProjectName, readCurrentVersion and related version bumping logic). Callers updated accordingly.
|
||||||
|
- Add Elasticsearch support to services: new config fields (ELASTICSEARCH_*), Docker run/start/stop/logs/status handling, and ELASTICSEARCH_URL in service configuration.
|
||||||
|
- Refactor formatting subsystem: cache and rollback/backup systems removed/disabled for stability, format planner execution simplified (sequential), diff/stats reporting updated to use smartfs.
|
||||||
|
- Update package.json dependencies: bump @git.zone/tsbuild, tsrun, tstest; upgrade @push.rocks/smartfile to v13 and add @push.rocks/smartfs dependency; update @types/node.
|
||||||
|
- Update commit flow and changelog generation to use smartfs for reading/writing files and to await version/branch detection where necessary.
|
||||||
|
- Expose a SmartFs instance via plugins and adjust all mod.* plugin files to import/use smartfs where required.
|
||||||
|
- Breaking change: Public and internal APIs that previously used synchronous smartfile APIs are now asynchronous. Consumers and scripts must await these functions and use the new smartfs API.
|
||||||
|
|
||||||
|
## 2025-11-17 - 1.21.5 - fix(tsconfig)
|
||||||
|
Remove emitDecoratorMetadata from tsconfig template
|
||||||
|
|
||||||
|
- Removed the "emitDecoratorMetadata" compiler option from assets/templates/tsconfig_update/tsconfig.json
|
||||||
|
- This updates the tsconfig template to avoid emitting decorator metadata when targeting ES2022
|
||||||
|
|
||||||
|
## 2025-11-17 - 1.21.4 - fix(tsconfig template)
|
||||||
|
Remove experimentalDecorators and useDefineForClassFields from tsconfig template
|
||||||
|
|
||||||
|
- Removed experimentalDecorators option from assets/templates/tsconfig_update/tsconfig.json
|
||||||
|
- Removed useDefineForClassFields option from assets/templates/tsconfig_update/tsconfig.json
|
||||||
|
|
||||||
|
## 2025-11-17 - 1.21.3 - fix(assets/templates/multienv)
|
||||||
|
Remove unused Bun configuration template (assets/templates/multienv/bunfig.toml)
|
||||||
|
|
||||||
|
- Deleted assets/templates/multienv/bunfig.toml which previously provided Bun TypeScript decorator configuration
|
||||||
|
- Cleans up stale/unused template to avoid shipping obsolete Bun config
|
||||||
|
- No functional code changes; removes an unused asset file
|
||||||
|
|
||||||
|
## 2025-11-17 - 1.21.2 - fix(templates/multienv)
|
||||||
|
Disable useDefineForClassFields in multienv TypeScript configs to ensure decorator compatibility
|
||||||
|
|
||||||
|
- Set useDefineForClassFields = false in assets/templates/multienv/bunfig.toml to keep Bun's transpiler compatible with decorator usage
|
||||||
|
- Set "useDefineForClassFields": false in assets/templates/multienv/deno.json to ensure Deno/TypeScript compiler emits class fields compatible with decorators
|
||||||
|
|
||||||
|
## 2025-11-17 - 1.21.1 - fix(templates.multienv)
|
||||||
|
Enable checkJs in multienv Deno template to enable JS type checking
|
||||||
|
|
||||||
|
- Added "checkJs": true to compilerOptions in assets/templates/multienv/deno.json to enable JavaScript type checking for the Deno multienv template
|
||||||
|
|
||||||
|
## 2025-11-17 - 1.21.0 - feat(multienv)
|
||||||
|
Add multi-env templates enabling TypeScript decorators for Bun and Deno; rename npmextra config key to szci
|
||||||
|
|
||||||
|
- Added assets/templates/multienv/bunfig.toml to enable Bun TypeScript transpiler experimentalDecorators
|
||||||
|
- Added assets/templates/multienv/deno.json with experimentalDecorators, lib and target set for ES2022
|
||||||
|
- Updated npmextra.json: renamed top-level config key from "npmci" to "szci" (keeps npmGlobalTools, npmAccessLevel and npmRegistryUrl unchanged)
|
||||||
|
|
||||||
|
## 2025-11-06 - 1.20.0 - feat(commit)
|
||||||
|
Add non-interactive --yes (-y) flag to commit command to auto-accept AI recommendations and optionally push with -p
|
||||||
|
|
||||||
|
- Add -y / --yes flag to gitzone commit to auto-accept AI-generated commit recommendations without interactive prompts
|
||||||
|
- Support -yp or -y -p combinations to auto-accept and push to origin; -p / --push remains the separate control for pushing
|
||||||
|
- Implementation creates a smartinteract AnswerBucket programmatically when -y is used and populates commitType, commitScope, commitDescription and pushToOrigin
|
||||||
|
- Preserves existing UI output and interactive flow when -y is not used; fully backward compatible and CI/CD friendly
|
||||||
|
- Updated CLI usage and documentation (readme.hints.md) to document the new flags
|
||||||
|
|
||||||
|
## 2025-11-05 - 1.19.9 - fix(mod_commit)
|
||||||
|
Refactor version bumping to a unified implementation for npm and Deno; remove npm-exec based helpers and add file-based version readers/updaters to avoid npm warning pollution
|
||||||
|
|
||||||
|
- Removed legacy npm/deno-specific helpers (bumpNpmVersion, syncVersionToDenoJson, bumpDenoVersion) that relied on executing npm and caused warning pollution
|
||||||
|
- Added readCurrentVersion() to read version from package.json or deno.json
|
||||||
|
- Added updateVersionFile() helper to write version directly into JSON files
|
||||||
|
- Added unified bumpProjectVersion() that handles npm, deno and both with a single code path; reuses calculateNewVersion()
|
||||||
|
- Stages updated files, commits v<newVersion> and creates a tag v<newVersion>
|
||||||
|
- Benefits: no npm warning pollution in deno.json, simpler git history, consistent behavior across project types
|
||||||
|
|
||||||
|
## 2025-11-04 - 1.19.8 - fix(package.json)
|
||||||
|
Bump @git.zone/tsdoc dependency to ^1.9.2
|
||||||
|
|
||||||
|
- Updated dependency @git.zone/tsdoc from ^1.9.1 to ^1.9.2 in package.json
|
||||||
|
|
||||||
|
## 2025-11-04 - 1.19.7 - fix(dependencies)
|
||||||
|
Bump @git.zone/tsdoc to ^1.9.1
|
||||||
|
|
||||||
|
- Updated package.json dependency @git.zone/tsdoc from ^1.9.0 to ^1.9.1
|
||||||
|
|
||||||
|
## 2025-11-04 - 1.19.6 - fix(cli)
|
||||||
|
Bump @git.zone/tsdoc dependency to ^1.9.0
|
||||||
|
|
||||||
|
- Updated dependency @git.zone/tsdoc from ^1.8.3 to ^1.9.0 in package.json
|
||||||
|
|
||||||
|
## 2025-11-04 - 1.19.5 - fix(cli)
|
||||||
|
Bump @git.zone/tsdoc to ^1.8.3 and add local .claude settings for allowed permissions
|
||||||
|
|
||||||
|
- Updated dependency @git.zone/tsdoc from ^1.8.2 to ^1.8.3
|
||||||
|
- Added .claude/settings.local.json to declare allowed permissions for local tooling (Bash commands, Docker, npm, WebFetch and MCP actions)
|
||||||
|
|
||||||
|
## 2025-11-03 - 1.19.3 - fix(tsdoc)
|
||||||
|
Bump @git.zone/tsdoc to ^1.8.0 and add .claude local settings
|
||||||
|
|
||||||
|
- Upgrade dependency @git.zone/tsdoc from ^1.6.1 to ^1.8.0 in package.json
|
||||||
|
- Add .claude/settings.local.json for local assistant permissions/configuration
|
||||||
|
|
||||||
|
## 2025-11-03 - 1.19.2 - fix(tsdoc)
|
||||||
|
Bump @git.zone/tsdoc to ^1.6.1 and add .claude/settings.local.json
|
||||||
|
|
||||||
|
- Update dependency @git.zone/tsdoc from ^1.6.0 to ^1.6.1
|
||||||
|
- Add .claude/settings.local.json to include local Claude settings/permissions
|
||||||
|
|
||||||
|
## 2025-11-02 - 1.19.1 - fix(dependencies)
|
||||||
|
Bump dependencies and add local Claude settings
|
||||||
|
|
||||||
|
- Bump devDependencies: @git.zone/tsbuild -> ^2.7.1, @git.zone/tsrun -> ^1.6.2, @git.zone/tstest -> ^2.7.0
|
||||||
|
- Upgrade runtime dependencies: @git.zone/tsdoc -> ^1.6.0; update @push.rocks packages (smartcli ^4.0.19, smartjson ^5.2.0, smartlog ^3.1.10, smartnetwork ^4.4.0, etc.)
|
||||||
|
- Add .claude/settings.local.json (local project permissions/settings file)
|
||||||
|
|
||||||
## 2025-10-23 - 1.19.0 - feat(mod_commit)
|
## 2025-10-23 - 1.19.0 - feat(mod_commit)
|
||||||
Add CLI UI helpers and improve commit workflow with progress, recommendations and summary
|
Add CLI UI helpers and improve commit workflow with progress, recommendations and summary
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
{
|
{
|
||||||
"npmci": {
|
"szci": {
|
||||||
"npmGlobalTools": [],
|
"npmGlobalTools": [],
|
||||||
"npmAccessLevel": "private",
|
"npmAccessLevel": "private",
|
||||||
"npmRegistryUrl": "verdaccio.lossless.one"
|
"npmRegistryUrl": "verdaccio.lossless.one"
|
||||||
|
|||||||
26
package.json
26
package.json
@@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "@git.zone/cli",
|
"name": "@git.zone/cli",
|
||||||
"private": false,
|
"private": false,
|
||||||
"version": "1.19.0",
|
"version": "2.2.0",
|
||||||
"description": "A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.",
|
"description": "A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.",
|
||||||
"main": "dist_ts/index.ts",
|
"main": "dist_ts/index.ts",
|
||||||
"typings": "dist_ts/index.d.ts",
|
"typings": "dist_ts/index.d.ts",
|
||||||
@@ -57,18 +57,17 @@
|
|||||||
},
|
},
|
||||||
"homepage": "https://gitlab.com/gitzone/private/gitzone#readme",
|
"homepage": "https://gitlab.com/gitzone/private/gitzone#readme",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.6.8",
|
"@git.zone/tsbuild": "^3.1.2",
|
||||||
"@git.zone/tsrun": "^1.3.3",
|
"@git.zone/tsrun": "^2.0.0",
|
||||||
"@git.zone/tstest": "^2.3.6",
|
"@git.zone/tstest": "^3.1.3",
|
||||||
"@push.rocks/smartdelay": "^3.0.5",
|
"@push.rocks/smartdelay": "^3.0.5",
|
||||||
"@push.rocks/smartfile": "^11.2.7",
|
|
||||||
"@push.rocks/smartinteract": "^2.0.16",
|
"@push.rocks/smartinteract": "^2.0.16",
|
||||||
"@push.rocks/smartnetwork": "^4.1.2",
|
"@push.rocks/smartnetwork": "^4.4.0",
|
||||||
"@push.rocks/smartshell": "^3.3.0",
|
"@push.rocks/smartshell": "^3.3.0",
|
||||||
"@types/node": "^22.15.18"
|
"@types/node": "^24.10.1"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@git.zone/tsdoc": "^1.5.2",
|
"@git.zone/tsdoc": "^1.10.0",
|
||||||
"@git.zone/tspublish": "^1.10.3",
|
"@git.zone/tspublish": "^1.10.3",
|
||||||
"@push.rocks/commitinfo": "^1.0.12",
|
"@push.rocks/commitinfo": "^1.0.12",
|
||||||
"@push.rocks/early": "^4.0.4",
|
"@push.rocks/early": "^4.0.4",
|
||||||
@@ -76,13 +75,14 @@
|
|||||||
"@push.rocks/lik": "^6.2.2",
|
"@push.rocks/lik": "^6.2.2",
|
||||||
"@push.rocks/npmextra": "^5.3.3",
|
"@push.rocks/npmextra": "^5.3.3",
|
||||||
"@push.rocks/projectinfo": "^5.0.2",
|
"@push.rocks/projectinfo": "^5.0.2",
|
||||||
"@push.rocks/smartchok": "^1.1.1",
|
"@push.rocks/smartcli": "^4.0.19",
|
||||||
"@push.rocks/smartcli": "^4.0.11",
|
|
||||||
"@push.rocks/smartdiff": "^1.0.3",
|
"@push.rocks/smartdiff": "^1.0.3",
|
||||||
|
"@push.rocks/smartfile": "^13.1.0",
|
||||||
|
"@push.rocks/smartfs": "^1.2.0",
|
||||||
"@push.rocks/smartgulp": "^3.0.4",
|
"@push.rocks/smartgulp": "^3.0.4",
|
||||||
"@push.rocks/smartjson": "^5.0.20",
|
"@push.rocks/smartjson": "^5.2.0",
|
||||||
"@push.rocks/smartlegal": "^1.0.27",
|
"@push.rocks/smartlegal": "^1.0.27",
|
||||||
"@push.rocks/smartlog": "^3.1.9",
|
"@push.rocks/smartlog": "^3.1.10",
|
||||||
"@push.rocks/smartlog-destination-local": "^9.0.2",
|
"@push.rocks/smartlog-destination-local": "^9.0.2",
|
||||||
"@push.rocks/smartmustache": "^3.0.2",
|
"@push.rocks/smartmustache": "^3.0.2",
|
||||||
"@push.rocks/smartnpm": "^2.0.6",
|
"@push.rocks/smartnpm": "^2.0.6",
|
||||||
@@ -95,7 +95,7 @@
|
|||||||
"@push.rocks/smartunique": "^3.0.9",
|
"@push.rocks/smartunique": "^3.0.9",
|
||||||
"@push.rocks/smartupdate": "^2.0.6",
|
"@push.rocks/smartupdate": "^2.0.6",
|
||||||
"@types/through2": "^2.0.41",
|
"@types/through2": "^2.0.41",
|
||||||
"prettier": "^3.6.2",
|
"prettier": "^3.7.3",
|
||||||
"through2": "^4.0.2"
|
"through2": "^4.0.2"
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
|
|||||||
4002
pnpm-lock.yaml
generated
4002
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
113
readme.hints.md
113
readme.hints.md
@@ -89,6 +89,41 @@ The format module is responsible for project standardization:
|
|||||||
5. **Performance Optimizations**: Parallel execution and caching
|
5. **Performance Optimizations**: Parallel execution and caching
|
||||||
6. **Reporting**: Diff views, statistics, verbose logging
|
6. **Reporting**: Diff views, statistics, verbose logging
|
||||||
7. **Architecture**: Clean separation of concerns with new classes
|
7. **Architecture**: Clean separation of concerns with new classes
|
||||||
|
8. **Unified Version Bumping**: Self-managed version updates eliminating npm warning pollution in deno.json
|
||||||
|
|
||||||
|
### Version Bumping Refactor (Latest)
|
||||||
|
|
||||||
|
The commit module's version bumping has been refactored to eliminate npm command dependencies:
|
||||||
|
|
||||||
|
**Changes:**
|
||||||
|
- Removed `bumpNpmVersion()` - was causing npm warnings to pollute deno.json
|
||||||
|
- Removed `syncVersionToDenoJson()` - no longer needed with unified approach
|
||||||
|
- Removed separate `bumpDenoVersion()` - replaced by unified implementation
|
||||||
|
- Added `readCurrentVersion()` helper - reads from either package.json or deno.json
|
||||||
|
- Added `updateVersionFile()` helper - updates JSON files directly
|
||||||
|
- Unified `bumpProjectVersion()` - handles npm/deno/both with single clean code path
|
||||||
|
|
||||||
|
**Benefits:**
|
||||||
|
- No npm warning pollution in version fields
|
||||||
|
- Full control over version bumping process
|
||||||
|
- Simpler git history (no amending, no force-tagging)
|
||||||
|
- Same code path for all project types
|
||||||
|
- Reuses existing `calculateNewVersion()` function
|
||||||
|
|
||||||
|
### Auto-Accept Flag for Commits
|
||||||
|
|
||||||
|
The commit module now supports `-y/--yes` flag for non-interactive commits:
|
||||||
|
|
||||||
|
**Usage:**
|
||||||
|
- `gitzone commit -y` - Auto-accepts AI recommendations without prompts
|
||||||
|
- `gitzone commit -yp` - Auto-accepts and pushes to origin
|
||||||
|
- Separate `-p/--push` flag controls push behavior
|
||||||
|
|
||||||
|
**Implementation:**
|
||||||
|
- Creates AnswerBucket programmatically when `-y` flag detected
|
||||||
|
- Preserves all UI output for transparency
|
||||||
|
- Fully backward compatible with interactive mode
|
||||||
|
- CI/CD friendly for automated workflows
|
||||||
|
|
||||||
## Development Tips
|
## Development Tips
|
||||||
|
|
||||||
@@ -137,6 +172,27 @@ The format module is responsible for project standardization:
|
|||||||
|
|
||||||
## CLI Usage
|
## CLI Usage
|
||||||
|
|
||||||
|
### Commit Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Interactive commit (default)
|
||||||
|
gitzone commit
|
||||||
|
|
||||||
|
# Auto-accept AI recommendations (no prompts)
|
||||||
|
gitzone commit -y
|
||||||
|
gitzone commit --yes
|
||||||
|
|
||||||
|
# Auto-accept and push to origin
|
||||||
|
gitzone commit -yp
|
||||||
|
gitzone commit -y -p
|
||||||
|
gitzone commit --yes --push
|
||||||
|
|
||||||
|
# Run format before commit
|
||||||
|
gitzone commit --format
|
||||||
|
```
|
||||||
|
|
||||||
|
### Format Commands
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Basic format
|
# Basic format
|
||||||
gitzone format
|
gitzone format
|
||||||
@@ -187,7 +243,60 @@ gitzone format --clean-backups
|
|||||||
|
|
||||||
## API Changes
|
## API Changes
|
||||||
|
|
||||||
- smartfile API updated to use fs._ and memory._ namespaces
|
### Smartfile v13 Migration (Latest - Completed)
|
||||||
|
|
||||||
|
The project has been fully migrated from @push.rocks/smartfile v11 to v13, which introduced a major breaking change where filesystem operations were split into two separate packages:
|
||||||
|
|
||||||
|
**Packages:**
|
||||||
|
- `@push.rocks/smartfile` v13.0.1 - File representation classes (SmartFile, StreamFile, VirtualDirectory)
|
||||||
|
- `@push.rocks/smartfs` v1.1.0 - Filesystem operations (read, write, exists, stat, etc.)
|
||||||
|
|
||||||
|
**Key API Changes:**
|
||||||
|
1. **File Reading**:
|
||||||
|
- Old: `plugins.smartfile.fs.toStringSync(path)` or `plugins.smartfile.fs.toObjectSync(path)`
|
||||||
|
- New: `await plugins.smartfs.file(path).encoding('utf8').read()` + JSON.parse if needed
|
||||||
|
- Important: `read()` returns `string | Buffer` - use `as string` type assertion when encoding is set
|
||||||
|
|
||||||
|
2. **File Writing**:
|
||||||
|
- Old: `plugins.smartfile.memory.toFs(content, path)` or `plugins.smartfile.memory.toFsSync(content, path)`
|
||||||
|
- New: `await plugins.smartfs.file(path).encoding('utf8').write(content)`
|
||||||
|
|
||||||
|
3. **File Existence**:
|
||||||
|
- Old: `plugins.smartfile.fs.fileExists(path)` or `plugins.smartfile.fs.fileExistsSync(path)`
|
||||||
|
- New: `await plugins.smartfs.file(path).exists()`
|
||||||
|
|
||||||
|
4. **Directory Operations**:
|
||||||
|
- Old: `plugins.smartfile.fs.ensureDir(path)`
|
||||||
|
- New: `await plugins.smartfs.directory(path).recursive().create()`
|
||||||
|
- Old: `plugins.smartfile.fs.remove(path)`
|
||||||
|
- New: `await plugins.smartfs.directory(path).recursive().delete()` or `await plugins.smartfs.file(path).delete()`
|
||||||
|
|
||||||
|
5. **Directory Listing**:
|
||||||
|
- Old: `plugins.smartfile.fs.listFolders(path)` or `plugins.smartfile.fs.listFoldersSync(path)`
|
||||||
|
- New: `await plugins.smartfs.directory(path).list()` then filter by `stats.isDirectory`
|
||||||
|
- Note: `list()` returns `IDirectoryEntry[]` with `path` and `name` properties - use `stat()` to check if directory
|
||||||
|
|
||||||
|
6. **File Stats**:
|
||||||
|
- Old: `stats.isDirectory()` (method)
|
||||||
|
- New: `stats.isDirectory` (boolean property)
|
||||||
|
- Old: `stats.mtimeMs`
|
||||||
|
- New: `stats.mtime.getTime()`
|
||||||
|
|
||||||
|
7. **SmartFile Factory**:
|
||||||
|
- Old: Direct SmartFile instantiation
|
||||||
|
- New: `plugins.smartfile.SmartFileFactory.nodeFs()` then factory methods
|
||||||
|
|
||||||
|
**Migration Pattern:**
|
||||||
|
All sync methods must become async. Functions that were previously synchronous (like `getProjectName()`) now return `Promise<T>` and must be awaited.
|
||||||
|
|
||||||
|
**Affected Modules:**
|
||||||
|
- ts/mod_format/* (largest area - 15+ files)
|
||||||
|
- ts/mod_commit/* (version bumping)
|
||||||
|
- ts/mod_services/* (configuration management)
|
||||||
|
- ts/mod_meta/* (meta repository management)
|
||||||
|
- ts/mod_standard/* (template listing)
|
||||||
|
- ts/mod_template/* (template operations)
|
||||||
|
|
||||||
|
**Previous API Changes:**
|
||||||
- smartnpm requires instance creation: `new NpmRegistry()`
|
- smartnpm requires instance creation: `new NpmRegistry()`
|
||||||
- All file operations now use updated APIs
|
|
||||||
- Type imports use `import type` for proper verbatim module syntax
|
- Type imports use `import type` for proper verbatim module syntax
|
||||||
|
|||||||
43
readme.md
43
readme.md
@@ -9,6 +9,10 @@
|
|||||||
|
|
||||||
gitzone is a powerful command-line interface that supercharges your development workflow with automated project management, intelligent code formatting, seamless version control, and development service orchestration. Whether you're bootstrapping a new TypeScript project, maintaining code quality, managing complex multi-repository setups, or spinning up local development databases, gitzone has got you covered.
|
gitzone is a powerful command-line interface that supercharges your development workflow with automated project management, intelligent code formatting, seamless version control, and development service orchestration. Whether you're bootstrapping a new TypeScript project, maintaining code quality, managing complex multi-repository setups, or spinning up local development databases, gitzone has got you covered.
|
||||||
|
|
||||||
|
## Issue Reporting and Security
|
||||||
|
|
||||||
|
For reporting bugs, issues, or security vulnerabilities, please visit [community.foss.global/](https://community.foss.global/). This is the central community hub for all issue reporting. Developers who sign and comply with our contribution agreement and go through identification can also get a [code.foss.global/](https://code.foss.global/) account to submit Pull Requests directly.
|
||||||
|
|
||||||
## 🏃♂️ Quick Start
|
## 🏃♂️ Quick Start
|
||||||
|
|
||||||
### Installation
|
### Installation
|
||||||
@@ -35,13 +39,13 @@ gitzone format
|
|||||||
# Start local MongoDB and MinIO services
|
# Start local MongoDB and MinIO services
|
||||||
gitzone services start
|
gitzone services start
|
||||||
|
|
||||||
# Create a semantic commit
|
# Create a semantic commit with AI-powered suggestions
|
||||||
gitzone commit
|
gitzone commit
|
||||||
```
|
```
|
||||||
|
|
||||||
## 🛠️ Core Features
|
## 🛠️ Core Features
|
||||||
|
|
||||||
### 🐳 Development Services Management (NEW!)
|
### 🐳 Development Services Management
|
||||||
|
|
||||||
Effortlessly manage local MongoDB and MinIO (S3-compatible) services for your development environment:
|
Effortlessly manage local MongoDB and MinIO (S3-compatible) services for your development environment:
|
||||||
|
|
||||||
@@ -110,7 +114,7 @@ gitzone template [template-name]
|
|||||||
Each template comes pre-configured with:
|
Each template comes pre-configured with:
|
||||||
|
|
||||||
- ✅ TypeScript with modern configurations
|
- ✅ TypeScript with modern configurations
|
||||||
- ✅ Automated testing setup
|
- ✅ Automated testing setup with `@git.zone/tstest`
|
||||||
- ✅ CI/CD pipelines (GitLab/GitHub)
|
- ✅ CI/CD pipelines (GitLab/GitHub)
|
||||||
- ✅ Code formatting and linting
|
- ✅ Code formatting and linting
|
||||||
- ✅ Documentation structure
|
- ✅ Documentation structure
|
||||||
@@ -170,18 +174,27 @@ gitzone format --clean-backups
|
|||||||
- **Gitignore** - Repository ignore rules
|
- **Gitignore** - Repository ignore rules
|
||||||
- **Templates** - Project template updates
|
- **Templates** - Project template updates
|
||||||
- **Npmextra** - Extended npm configurations
|
- **Npmextra** - Extended npm configurations
|
||||||
|
- **Cleanup** - Removes obsolete files (yarn.lock, package-lock.json, tslint.json, etc.)
|
||||||
|
|
||||||
### 🔀 Semantic Commits & Versioning
|
### 🔀 Semantic Commits & Versioning
|
||||||
|
|
||||||
Create standardized commits that automatically handle versioning:
|
Create standardized commits with AI-powered suggestions that automatically handle versioning:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
# Interactive commit with AI recommendations
|
||||||
gitzone commit
|
gitzone commit
|
||||||
|
|
||||||
|
# Auto-accept AI recommendations
|
||||||
|
gitzone commit -y
|
||||||
|
|
||||||
|
# Auto-accept and push
|
||||||
|
gitzone commit -y -p
|
||||||
```
|
```
|
||||||
|
|
||||||
Features:
|
Features:
|
||||||
|
|
||||||
- 📝 Interactive commit message builder
|
- 🤖 **AI-powered analysis** - Analyzes your changes and suggests commit type, scope, and message
|
||||||
|
- 📝 Interactive commit message builder with smart defaults
|
||||||
- 🏷️ Automatic version bumping (major/minor/patch)
|
- 🏷️ Automatic version bumping (major/minor/patch)
|
||||||
- 📜 Changelog generation
|
- 📜 Changelog generation
|
||||||
- 🚀 Optional auto-push to origin
|
- 🚀 Optional auto-push to origin
|
||||||
@@ -189,11 +202,10 @@ Features:
|
|||||||
|
|
||||||
The commit wizard guides you through:
|
The commit wizard guides you through:
|
||||||
|
|
||||||
1. **Type selection** (feat/fix/docs/style/refactor/perf/test/chore)
|
1. **Type selection** (fix/feat/BREAKING CHANGE) with AI recommendation
|
||||||
2. **Scope definition** (component/module affected)
|
2. **Scope definition** (component/module affected)
|
||||||
3. **Description crafting**
|
3. **Description crafting**
|
||||||
4. **Breaking change detection**
|
4. **Version bump determination**
|
||||||
5. **Version bump determination**
|
|
||||||
|
|
||||||
### 🏗️ Meta Repository Management
|
### 🏗️ Meta Repository Management
|
||||||
|
|
||||||
@@ -443,7 +455,6 @@ gitzone services clean # ⚠️ Warning: deletes data
|
|||||||
|
|
||||||
- **TypeScript** - First-class support
|
- **TypeScript** - First-class support
|
||||||
- **Prettier** - Code formatting
|
- **Prettier** - Code formatting
|
||||||
- **ESLint** - Linting (via format modules)
|
|
||||||
- **npm/pnpm** - Package management
|
- **npm/pnpm** - Package management
|
||||||
- **MongoDB** - Local database service
|
- **MongoDB** - Local database service
|
||||||
- **MinIO** - S3-compatible object storage
|
- **MinIO** - S3-compatible object storage
|
||||||
@@ -522,19 +533,21 @@ gitzone is optimized for speed:
|
|||||||
|
|
||||||
## License and Legal Information
|
## License and Legal Information
|
||||||
|
|
||||||
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
This repository contains open-source code licensed under the MIT License. A copy of the license can be found in the [LICENSE](./LICENSE) file.
|
||||||
|
|
||||||
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
### Trademarks
|
### Trademarks
|
||||||
|
|
||||||
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
|
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH or third parties, and are not included within the scope of the MIT license granted herein.
|
||||||
|
|
||||||
|
Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines or the guidelines of the respective third-party owners, and any usage must be approved in writing. Third-party trademarks used herein are the property of their respective owners and used only in a descriptive manner, e.g. for an implementation of an API or similar.
|
||||||
|
|
||||||
### Company Information
|
### Company Information
|
||||||
|
|
||||||
Task Venture Capital GmbH
|
Task Venture Capital GmbH
|
||||||
Registered at District court Bremen HRB 35230 HB, Germany
|
Registered at District Court Bremen HRB 35230 HB, Germany
|
||||||
|
|
||||||
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
For any legal inquiries or further information, please contact us via email at hello@task.vc.
|
||||||
|
|
||||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||||
|
|||||||
@@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@git.zone/cli',
|
name: '@git.zone/cli',
|
||||||
version: '1.19.0',
|
version: '2.2.0',
|
||||||
description: 'A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.'
|
description: 'A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.'
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -27,35 +27,63 @@ export const run = async (argvArg: any) => {
|
|||||||
recommendedNextVersionScope: nextCommitObject.recommendedNextVersionScope,
|
recommendedNextVersionScope: nextCommitObject.recommendedNextVersionScope,
|
||||||
recommendedNextVersionMessage: nextCommitObject.recommendedNextVersionMessage,
|
recommendedNextVersionMessage: nextCommitObject.recommendedNextVersionMessage,
|
||||||
});
|
});
|
||||||
const commitInteract = new plugins.smartinteract.SmartInteract();
|
|
||||||
commitInteract.addQuestions([
|
let answerBucket: plugins.smartinteract.AnswerBucket;
|
||||||
{
|
|
||||||
type: 'list',
|
// Check if -y or --yes flag is set to auto-accept recommendations
|
||||||
name: `commitType`,
|
if (argvArg.y || argvArg.yes) {
|
||||||
message: `Choose TYPE of the commit:`,
|
// Auto-mode: create AnswerBucket programmatically
|
||||||
choices: [`fix`, `feat`, `BREAKING CHANGE`],
|
logger.log('info', '✓ Auto-accepting AI recommendations (--yes flag)');
|
||||||
default: nextCommitObject.recommendedNextVersionLevel,
|
|
||||||
},
|
answerBucket = new plugins.smartinteract.AnswerBucket();
|
||||||
{
|
answerBucket.addAnswer({
|
||||||
type: 'input',
|
name: 'commitType',
|
||||||
name: `commitScope`,
|
value: nextCommitObject.recommendedNextVersionLevel,
|
||||||
message: `What is the SCOPE of the commit:`,
|
});
|
||||||
default: nextCommitObject.recommendedNextVersionScope,
|
answerBucket.addAnswer({
|
||||||
},
|
name: 'commitScope',
|
||||||
{
|
value: nextCommitObject.recommendedNextVersionScope,
|
||||||
type: `input`,
|
});
|
||||||
name: `commitDescription`,
|
answerBucket.addAnswer({
|
||||||
message: `What is the DESCRIPTION of the commit?`,
|
name: 'commitDescription',
|
||||||
default: nextCommitObject.recommendedNextVersionMessage,
|
value: nextCommitObject.recommendedNextVersionMessage,
|
||||||
},
|
});
|
||||||
{
|
answerBucket.addAnswer({
|
||||||
type: 'confirm',
|
name: 'pushToOrigin',
|
||||||
name: `pushToOrigin`,
|
value: !!(argvArg.p || argvArg.push), // Only push if -p flag also provided
|
||||||
message: `Do you want to push this version now?`,
|
});
|
||||||
default: true,
|
} else {
|
||||||
},
|
// Interactive mode: prompt user for input
|
||||||
]);
|
const commitInteract = new plugins.smartinteract.SmartInteract();
|
||||||
const answerBucket = await commitInteract.runQueue();
|
commitInteract.addQuestions([
|
||||||
|
{
|
||||||
|
type: 'list',
|
||||||
|
name: `commitType`,
|
||||||
|
message: `Choose TYPE of the commit:`,
|
||||||
|
choices: [`fix`, `feat`, `BREAKING CHANGE`],
|
||||||
|
default: nextCommitObject.recommendedNextVersionLevel,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: 'input',
|
||||||
|
name: `commitScope`,
|
||||||
|
message: `What is the SCOPE of the commit:`,
|
||||||
|
default: nextCommitObject.recommendedNextVersionScope,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: `input`,
|
||||||
|
name: `commitDescription`,
|
||||||
|
message: `What is the DESCRIPTION of the commit?`,
|
||||||
|
default: nextCommitObject.recommendedNextVersionMessage,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
type: 'confirm',
|
||||||
|
name: `pushToOrigin`,
|
||||||
|
message: `Do you want to push this version now?`,
|
||||||
|
default: true,
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
answerBucket = await commitInteract.runQueue();
|
||||||
|
}
|
||||||
const commitString = createCommitStringFromAnswerBucket(answerBucket);
|
const commitString = createCommitStringFromAnswerBucket(answerBucket);
|
||||||
const commitVersionType = (() => {
|
const commitVersionType = (() => {
|
||||||
switch (answerBucket.getAnswerFor('commitType')) {
|
switch (answerBucket.getAnswerFor('commitType')) {
|
||||||
@@ -114,10 +142,10 @@ export const run = async (argvArg: any) => {
|
|||||||
changelog = changelog.replaceAll('\n{{nextVersionDetails}}', '');
|
changelog = changelog.replaceAll('\n{{nextVersionDetails}}', '');
|
||||||
}
|
}
|
||||||
|
|
||||||
await plugins.smartfile.memory.toFs(
|
await plugins.smartfs
|
||||||
changelog,
|
.file(plugins.path.join(paths.cwd, `changelog.md`))
|
||||||
plugins.path.join(paths.cwd, `changelog.md`),
|
.encoding('utf8')
|
||||||
);
|
.write(changelog);
|
||||||
ui.printStep(currentStep, totalSteps, '📄 Generating changelog.md', 'done');
|
ui.printStep(currentStep, totalSteps, '📄 Generating changelog.md', 'done');
|
||||||
|
|
||||||
// Step 3: Staging files
|
// Step 3: Staging files
|
||||||
|
|||||||
@@ -40,8 +40,8 @@ export async function detectProjectType(): Promise<ProjectType> {
|
|||||||
const packageJsonPath = plugins.path.join(paths.cwd, 'package.json');
|
const packageJsonPath = plugins.path.join(paths.cwd, 'package.json');
|
||||||
const denoJsonPath = plugins.path.join(paths.cwd, 'deno.json');
|
const denoJsonPath = plugins.path.join(paths.cwd, 'deno.json');
|
||||||
|
|
||||||
const hasPackageJson = await plugins.smartfile.fs.fileExists(packageJsonPath);
|
const hasPackageJson = await plugins.smartfs.file(packageJsonPath).exists();
|
||||||
const hasDenoJson = await plugins.smartfile.fs.fileExists(denoJsonPath);
|
const hasDenoJson = await plugins.smartfs.file(denoJsonPath).exists();
|
||||||
|
|
||||||
if (hasPackageJson && hasDenoJson) {
|
if (hasPackageJson && hasDenoJson) {
|
||||||
logger.log('info', 'Detected dual project (npm + deno)');
|
logger.log('info', 'Detected dual project (npm + deno)');
|
||||||
@@ -91,118 +91,59 @@ function calculateNewVersion(currentVersion: string, versionType: VersionType):
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Bumps the version in deno.json, commits the change, and creates a tag
|
* Reads the current version from package.json or deno.json
|
||||||
* @param versionType Type of version bump
|
* @param projectType The project type to determine which file to read
|
||||||
* @returns The new version string
|
* @returns The current version string
|
||||||
*/
|
*/
|
||||||
export async function bumpDenoVersion(versionType: VersionType): Promise<string> {
|
async function readCurrentVersion(projectType: ProjectType): Promise<string> {
|
||||||
const denoJsonPath = plugins.path.join(paths.cwd, 'deno.json');
|
if (projectType === 'npm' || projectType === 'both') {
|
||||||
const smartshellInstance = new plugins.smartshell.Smartshell({
|
const packageJsonPath = plugins.path.join(paths.cwd, 'package.json');
|
||||||
executor: 'bash',
|
const content = (await plugins.smartfs
|
||||||
sourceFilePaths: [],
|
.file(packageJsonPath)
|
||||||
});
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
|
const packageJson = JSON.parse(content) as { version?: string };
|
||||||
|
|
||||||
try {
|
if (!packageJson.version) {
|
||||||
// Read deno.json
|
throw new Error('package.json does not contain a version field');
|
||||||
const denoConfig = plugins.smartfile.fs.toObjectSync(
|
}
|
||||||
denoJsonPath
|
return packageJson.version;
|
||||||
) as { version?: string };
|
} else {
|
||||||
|
const denoJsonPath = plugins.path.join(paths.cwd, 'deno.json');
|
||||||
|
const content = (await plugins.smartfs
|
||||||
|
.file(denoJsonPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
|
const denoConfig = JSON.parse(content) as { version?: string };
|
||||||
|
|
||||||
if (!denoConfig.version) {
|
if (!denoConfig.version) {
|
||||||
throw new Error('deno.json does not contain a version field');
|
throw new Error('deno.json does not contain a version field');
|
||||||
}
|
}
|
||||||
|
return denoConfig.version;
|
||||||
const currentVersion = denoConfig.version;
|
|
||||||
const newVersion = calculateNewVersion(currentVersion, versionType);
|
|
||||||
|
|
||||||
logger.log('info', `Bumping deno.json version: ${currentVersion} → ${newVersion}`);
|
|
||||||
|
|
||||||
// Update version
|
|
||||||
denoConfig.version = newVersion;
|
|
||||||
|
|
||||||
// Write back to disk
|
|
||||||
await plugins.smartfile.memory.toFs(
|
|
||||||
JSON.stringify(denoConfig, null, 2) + '\n',
|
|
||||||
denoJsonPath
|
|
||||||
);
|
|
||||||
|
|
||||||
// Stage the deno.json file
|
|
||||||
await smartshellInstance.exec('git add deno.json');
|
|
||||||
|
|
||||||
// Commit the version bump
|
|
||||||
await smartshellInstance.exec(`git commit -m "v${newVersion}"`);
|
|
||||||
|
|
||||||
// Create the version tag
|
|
||||||
await smartshellInstance.exec(`git tag v${newVersion} -m "v${newVersion}"`);
|
|
||||||
|
|
||||||
logger.log('info', `Created commit and tag v${newVersion}`);
|
|
||||||
|
|
||||||
return newVersion;
|
|
||||||
} catch (error) {
|
|
||||||
throw new Error(`Failed to bump deno.json version: ${error.message}`);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Bumps the version in package.json using npm version command
|
* Updates the version field in a JSON file (package.json or deno.json)
|
||||||
* @param versionType Type of version bump
|
* @param filePath Path to the JSON file
|
||||||
* @returns The new version string
|
* @param newVersion The new version to write
|
||||||
*/
|
*/
|
||||||
async function bumpNpmVersion(versionType: VersionType): Promise<string> {
|
async function updateVersionFile(filePath: string, newVersion: string): Promise<void> {
|
||||||
const smartshellInstance = new plugins.smartshell.Smartshell({
|
const content = (await plugins.smartfs
|
||||||
executor: 'bash',
|
.file(filePath)
|
||||||
sourceFilePaths: [],
|
.encoding('utf8')
|
||||||
});
|
.read()) as string;
|
||||||
|
const config = JSON.parse(content) as { version?: string };
|
||||||
logger.log('info', `Bumping package.json version using npm version ${versionType}`);
|
config.version = newVersion;
|
||||||
const result = await smartshellInstance.exec(`npm version ${versionType}`);
|
await plugins.smartfs
|
||||||
|
.file(filePath)
|
||||||
// npm version returns the new version with a 'v' prefix, e.g., "v1.2.3"
|
.encoding('utf8')
|
||||||
const newVersion = result.stdout.trim().replace(/^v/, '');
|
.write(JSON.stringify(config, null, 2) + '\n');
|
||||||
return newVersion;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Syncs the version from package.json to deno.json and amends the npm commit
|
|
||||||
* @param version The version to sync
|
|
||||||
*/
|
|
||||||
async function syncVersionToDenoJson(version: string): Promise<void> {
|
|
||||||
const denoJsonPath = plugins.path.join(paths.cwd, 'deno.json');
|
|
||||||
const smartshellInstance = new plugins.smartshell.Smartshell({
|
|
||||||
executor: 'bash',
|
|
||||||
sourceFilePaths: [],
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
const denoConfig = plugins.smartfile.fs.toObjectSync(
|
|
||||||
denoJsonPath
|
|
||||||
) as { version?: string };
|
|
||||||
|
|
||||||
logger.log('info', `Syncing version to deno.json: ${version}`);
|
|
||||||
denoConfig.version = version;
|
|
||||||
|
|
||||||
await plugins.smartfile.memory.toFs(
|
|
||||||
JSON.stringify(denoConfig, null, 2) + '\n',
|
|
||||||
denoJsonPath
|
|
||||||
);
|
|
||||||
|
|
||||||
// Stage the deno.json file
|
|
||||||
await smartshellInstance.exec('git add deno.json');
|
|
||||||
|
|
||||||
// Amend the npm version commit to include deno.json
|
|
||||||
await smartshellInstance.exec('git commit --amend --no-edit');
|
|
||||||
|
|
||||||
// Re-create the tag with force to update it
|
|
||||||
await smartshellInstance.exec(`git tag -fa v${version} -m "v${version}"`);
|
|
||||||
|
|
||||||
logger.log('info', `Amended commit to include deno.json and updated tag v${version}`);
|
|
||||||
} catch (error) {
|
|
||||||
throw new Error(`Failed to sync version to deno.json: ${error.message}`);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Bumps the project version based on project type
|
* Bumps the project version based on project type
|
||||||
|
* Handles npm-only, deno-only, and dual projects with unified logic
|
||||||
* @param projectType The detected project type
|
* @param projectType The detected project type
|
||||||
* @param versionType The type of version bump
|
* @param versionType The type of version bump
|
||||||
* @param currentStep The current step number for progress display
|
* @param currentStep The current step number for progress display
|
||||||
@@ -215,6 +156,10 @@ export async function bumpProjectVersion(
|
|||||||
currentStep?: number,
|
currentStep?: number,
|
||||||
totalSteps?: number
|
totalSteps?: number
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
|
if (projectType === 'none') {
|
||||||
|
throw new Error('Cannot bump version: no package.json or deno.json found');
|
||||||
|
}
|
||||||
|
|
||||||
const projectEmoji = projectType === 'npm' ? '📦' : projectType === 'deno' ? '🦕' : '🔀';
|
const projectEmoji = projectType === 'npm' ? '📦' : projectType === 'deno' ? '🦕' : '🔀';
|
||||||
const description = `🏷️ Bumping version (${projectEmoji} ${projectType})`;
|
const description = `🏷️ Bumping version (${projectEmoji} ${projectType})`;
|
||||||
|
|
||||||
@@ -222,35 +167,52 @@ export async function bumpProjectVersion(
|
|||||||
ui.printStep(currentStep, totalSteps, description, 'in-progress');
|
ui.printStep(currentStep, totalSteps, description, 'in-progress');
|
||||||
}
|
}
|
||||||
|
|
||||||
let newVersion: string;
|
const smartshellInstance = new plugins.smartshell.Smartshell({
|
||||||
|
executor: 'bash',
|
||||||
|
sourceFilePaths: [],
|
||||||
|
});
|
||||||
|
|
||||||
switch (projectType) {
|
try {
|
||||||
case 'npm':
|
// 1. Read current version
|
||||||
newVersion = await bumpNpmVersion(versionType);
|
const currentVersion = await readCurrentVersion(projectType);
|
||||||
break;
|
|
||||||
|
|
||||||
case 'deno':
|
// 2. Calculate new version (reuse existing function!)
|
||||||
newVersion = await bumpDenoVersion(versionType);
|
const newVersion = calculateNewVersion(currentVersion, versionType);
|
||||||
break;
|
|
||||||
|
|
||||||
case 'both': {
|
logger.log('info', `Bumping version: ${currentVersion} → ${newVersion}`);
|
||||||
// Bump npm version first (it handles git tags)
|
|
||||||
newVersion = await bumpNpmVersion(versionType);
|
// 3. Determine which files to update
|
||||||
// Then sync to deno.json
|
const filesToUpdate: string[] = [];
|
||||||
await syncVersionToDenoJson(newVersion);
|
const packageJsonPath = plugins.path.join(paths.cwd, 'package.json');
|
||||||
break;
|
const denoJsonPath = plugins.path.join(paths.cwd, 'deno.json');
|
||||||
|
|
||||||
|
if (projectType === 'npm' || projectType === 'both') {
|
||||||
|
await updateVersionFile(packageJsonPath, newVersion);
|
||||||
|
filesToUpdate.push('package.json');
|
||||||
}
|
}
|
||||||
|
|
||||||
case 'none':
|
if (projectType === 'deno' || projectType === 'both') {
|
||||||
throw new Error('Cannot bump version: no package.json or deno.json found');
|
await updateVersionFile(denoJsonPath, newVersion);
|
||||||
|
filesToUpdate.push('deno.json');
|
||||||
|
}
|
||||||
|
|
||||||
default:
|
// 4. Stage all updated files
|
||||||
throw new Error(`Unknown project type: ${projectType}`);
|
await smartshellInstance.exec(`git add ${filesToUpdate.join(' ')}`);
|
||||||
|
|
||||||
|
// 5. Create version commit
|
||||||
|
await smartshellInstance.exec(`git commit -m "v${newVersion}"`);
|
||||||
|
|
||||||
|
// 6. Create version tag
|
||||||
|
await smartshellInstance.exec(`git tag v${newVersion} -m "v${newVersion}"`);
|
||||||
|
|
||||||
|
logger.log('info', `Created commit and tag v${newVersion}`);
|
||||||
|
|
||||||
|
if (currentStep && totalSteps) {
|
||||||
|
ui.printStep(currentStep, totalSteps, description, 'done');
|
||||||
|
}
|
||||||
|
|
||||||
|
return newVersion;
|
||||||
|
} catch (error) {
|
||||||
|
throw new Error(`Failed to bump project version: ${error.message}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (currentStep && totalSteps) {
|
|
||||||
ui.printStep(currentStep, totalSteps, description, 'done');
|
|
||||||
}
|
|
||||||
|
|
||||||
return newVersion;
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -65,15 +65,15 @@ export abstract class BaseFormatter {
|
|||||||
normalizedPath = './' + filepath;
|
normalizedPath = './' + filepath;
|
||||||
}
|
}
|
||||||
|
|
||||||
await plugins.smartfile.memory.toFs(content, normalizedPath);
|
await plugins.smartfs.file(normalizedPath).encoding('utf8').write(content);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected async createFile(filepath: string, content: string): Promise<void> {
|
protected async createFile(filepath: string, content: string): Promise<void> {
|
||||||
await plugins.smartfile.memory.toFs(content, filepath);
|
await plugins.smartfs.file(filepath).encoding('utf8').write(content);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected async deleteFile(filepath: string): Promise<void> {
|
protected async deleteFile(filepath: string): Promise<void> {
|
||||||
await plugins.smartfile.fs.remove(filepath);
|
await plugins.smartfs.file(filepath).delete();
|
||||||
}
|
}
|
||||||
|
|
||||||
protected async shouldProcessFile(filepath: string): Promise<boolean> {
|
protected async shouldProcessFile(filepath: string): Promise<boolean> {
|
||||||
|
|||||||
@@ -25,7 +25,7 @@ export class ChangeCache {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async initialize(): Promise<void> {
|
async initialize(): Promise<void> {
|
||||||
await plugins.smartfile.fs.ensureDir(this.cacheDir);
|
await plugins.smartfs.directory(this.cacheDir).recursive().create();
|
||||||
}
|
}
|
||||||
|
|
||||||
async getManifest(): Promise<ICacheManifest> {
|
async getManifest(): Promise<ICacheManifest> {
|
||||||
@@ -35,13 +35,16 @@ export class ChangeCache {
|
|||||||
files: [],
|
files: [],
|
||||||
};
|
};
|
||||||
|
|
||||||
const exists = await plugins.smartfile.fs.fileExists(this.manifestPath);
|
const exists = await plugins.smartfs.file(this.manifestPath).exists();
|
||||||
if (!exists) {
|
if (!exists) {
|
||||||
return defaultManifest;
|
return defaultManifest;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const content = plugins.smartfile.fs.toStringSync(this.manifestPath);
|
const content = (await plugins.smartfs
|
||||||
|
.file(this.manifestPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
const manifest = JSON.parse(content);
|
const manifest = JSON.parse(content);
|
||||||
|
|
||||||
// Validate the manifest structure
|
// Validate the manifest structure
|
||||||
@@ -57,7 +60,7 @@ export class ChangeCache {
|
|||||||
);
|
);
|
||||||
// Try to delete the corrupted file
|
// Try to delete the corrupted file
|
||||||
try {
|
try {
|
||||||
await plugins.smartfile.fs.remove(this.manifestPath);
|
await plugins.smartfs.file(this.manifestPath).delete();
|
||||||
} catch (removeError) {
|
} catch (removeError) {
|
||||||
// Ignore removal errors
|
// Ignore removal errors
|
||||||
}
|
}
|
||||||
@@ -72,11 +75,14 @@ export class ChangeCache {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Ensure directory exists
|
// Ensure directory exists
|
||||||
await plugins.smartfile.fs.ensureDir(this.cacheDir);
|
await plugins.smartfs.directory(this.cacheDir).recursive().create();
|
||||||
|
|
||||||
// Write directly with proper JSON stringification
|
// Write directly with proper JSON stringification
|
||||||
const jsonContent = JSON.stringify(manifest, null, 2);
|
const jsonContent = JSON.stringify(manifest, null, 2);
|
||||||
await plugins.smartfile.memory.toFs(jsonContent, this.manifestPath);
|
await plugins.smartfs
|
||||||
|
.file(this.manifestPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.write(jsonContent);
|
||||||
}
|
}
|
||||||
|
|
||||||
async hasFileChanged(filePath: string): Promise<boolean> {
|
async hasFileChanged(filePath: string): Promise<boolean> {
|
||||||
@@ -85,20 +91,23 @@ export class ChangeCache {
|
|||||||
: plugins.path.join(paths.cwd, filePath);
|
: plugins.path.join(paths.cwd, filePath);
|
||||||
|
|
||||||
// Check if file exists
|
// Check if file exists
|
||||||
const exists = await plugins.smartfile.fs.fileExists(absolutePath);
|
const exists = await plugins.smartfs.file(absolutePath).exists();
|
||||||
if (!exists) {
|
if (!exists) {
|
||||||
return true; // File doesn't exist, so it's "changed" (will be created)
|
return true; // File doesn't exist, so it's "changed" (will be created)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get current file stats
|
// Get current file stats
|
||||||
const stats = await plugins.smartfile.fs.stat(absolutePath);
|
const stats = await plugins.smartfs.file(absolutePath).stat();
|
||||||
|
|
||||||
// Skip directories
|
// Skip directories
|
||||||
if (stats.isDirectory()) {
|
if (stats.isDirectory) {
|
||||||
return false; // Directories are not processed
|
return false; // Directories are not processed
|
||||||
}
|
}
|
||||||
|
|
||||||
const content = plugins.smartfile.fs.toStringSync(absolutePath);
|
const content = (await plugins.smartfs
|
||||||
|
.file(absolutePath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
const currentChecksum = this.calculateChecksum(content);
|
const currentChecksum = this.calculateChecksum(content);
|
||||||
|
|
||||||
// Get cached info
|
// Get cached info
|
||||||
@@ -113,7 +122,7 @@ export class ChangeCache {
|
|||||||
return (
|
return (
|
||||||
cachedFile.checksum !== currentChecksum ||
|
cachedFile.checksum !== currentChecksum ||
|
||||||
cachedFile.size !== stats.size ||
|
cachedFile.size !== stats.size ||
|
||||||
cachedFile.modified !== stats.mtimeMs
|
cachedFile.modified !== stats.mtime.getTime()
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -123,14 +132,17 @@ export class ChangeCache {
|
|||||||
: plugins.path.join(paths.cwd, filePath);
|
: plugins.path.join(paths.cwd, filePath);
|
||||||
|
|
||||||
// Get current file stats
|
// Get current file stats
|
||||||
const stats = await plugins.smartfile.fs.stat(absolutePath);
|
const stats = await plugins.smartfs.file(absolutePath).stat();
|
||||||
|
|
||||||
// Skip directories
|
// Skip directories
|
||||||
if (stats.isDirectory()) {
|
if (stats.isDirectory) {
|
||||||
return; // Don't cache directories
|
return; // Don't cache directories
|
||||||
}
|
}
|
||||||
|
|
||||||
const content = plugins.smartfile.fs.toStringSync(absolutePath);
|
const content = (await plugins.smartfs
|
||||||
|
.file(absolutePath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
const checksum = this.calculateChecksum(content);
|
const checksum = this.calculateChecksum(content);
|
||||||
|
|
||||||
// Update manifest
|
// Update manifest
|
||||||
@@ -140,7 +152,7 @@ export class ChangeCache {
|
|||||||
const cacheEntry: IFileCache = {
|
const cacheEntry: IFileCache = {
|
||||||
path: filePath,
|
path: filePath,
|
||||||
checksum,
|
checksum,
|
||||||
modified: stats.mtimeMs,
|
modified: stats.mtime.getTime(),
|
||||||
size: stats.size,
|
size: stats.size,
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -176,7 +188,7 @@ export class ChangeCache {
|
|||||||
? file.path
|
? file.path
|
||||||
: plugins.path.join(paths.cwd, file.path);
|
: plugins.path.join(paths.cwd, file.path);
|
||||||
|
|
||||||
if (await plugins.smartfile.fs.fileExists(absolutePath)) {
|
if (await plugins.smartfs.file(absolutePath).exists()) {
|
||||||
validFiles.push(file);
|
validFiles.push(file);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -21,14 +21,15 @@ export class DiffReporter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const exists = await plugins.smartfile.fs.fileExists(change.path);
|
const exists = await plugins.smartfs.file(change.path).exists();
|
||||||
if (!exists) {
|
if (!exists) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const currentContent = await plugins.smartfile.fs.toStringSync(
|
const currentContent = (await plugins.smartfs
|
||||||
change.path,
|
.file(change.path)
|
||||||
);
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
|
|
||||||
// For planned changes, we need the new content
|
// For planned changes, we need the new content
|
||||||
if (!change.content) {
|
if (!change.content) {
|
||||||
@@ -107,10 +108,10 @@ export class DiffReporter {
|
|||||||
})),
|
})),
|
||||||
};
|
};
|
||||||
|
|
||||||
await plugins.smartfile.memory.toFs(
|
await plugins.smartfs
|
||||||
JSON.stringify(report, null, 2),
|
.file(outputPath)
|
||||||
outputPath,
|
.encoding('utf8')
|
||||||
);
|
.write(JSON.stringify(report, null, 2));
|
||||||
logger.log('info', `Diff report saved to ${outputPath}`);
|
logger.log('info', `Diff report saved to ${outputPath}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -192,10 +192,10 @@ export class FormatStats {
|
|||||||
moduleStats: Array.from(this.stats.moduleStats.values()),
|
moduleStats: Array.from(this.stats.moduleStats.values()),
|
||||||
};
|
};
|
||||||
|
|
||||||
await plugins.smartfile.memory.toFs(
|
await plugins.smartfs
|
||||||
JSON.stringify(report, null, 2),
|
.file(outputPath)
|
||||||
outputPath,
|
.encoding('utf8')
|
||||||
);
|
.write(JSON.stringify(report, null, 2));
|
||||||
logger.log('info', `Statistics report saved to ${outputPath}`);
|
logger.log('info', `Statistics report saved to ${outputPath}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -36,21 +36,27 @@ export class RollbackManager {
|
|||||||
: plugins.path.join(paths.cwd, filepath);
|
: plugins.path.join(paths.cwd, filepath);
|
||||||
|
|
||||||
// Check if file exists
|
// Check if file exists
|
||||||
const exists = await plugins.smartfile.fs.fileExists(absolutePath);
|
const exists = await plugins.smartfs.file(absolutePath).exists();
|
||||||
if (!exists) {
|
if (!exists) {
|
||||||
// File doesn't exist yet (will be created), so we skip backup
|
// File doesn't exist yet (will be created), so we skip backup
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read file content and metadata
|
// Read file content and metadata
|
||||||
const content = plugins.smartfile.fs.toStringSync(absolutePath);
|
const content = (await plugins.smartfs
|
||||||
const stats = await plugins.smartfile.fs.stat(absolutePath);
|
.file(absolutePath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
|
const stats = await plugins.smartfs.file(absolutePath).stat();
|
||||||
const checksum = this.calculateChecksum(content);
|
const checksum = this.calculateChecksum(content);
|
||||||
|
|
||||||
// Create backup
|
// Create backup
|
||||||
const backupPath = this.getBackupPath(operationId, filepath);
|
const backupPath = this.getBackupPath(operationId, filepath);
|
||||||
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(backupPath));
|
await plugins.smartfs
|
||||||
await plugins.smartfile.memory.toFs(content, backupPath);
|
.directory(plugins.path.dirname(backupPath))
|
||||||
|
.recursive()
|
||||||
|
.create();
|
||||||
|
await plugins.smartfs.file(backupPath).encoding('utf8').write(content);
|
||||||
|
|
||||||
// Update operation
|
// Update operation
|
||||||
operation.files.push({
|
operation.files.push({
|
||||||
@@ -84,7 +90,10 @@ export class RollbackManager {
|
|||||||
|
|
||||||
// Verify backup integrity
|
// Verify backup integrity
|
||||||
const backupPath = this.getBackupPath(operationId, file.path);
|
const backupPath = this.getBackupPath(operationId, file.path);
|
||||||
const backupContent = plugins.smartfile.fs.toStringSync(backupPath);
|
const backupContent = await plugins.smartfs
|
||||||
|
.file(backupPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read();
|
||||||
const backupChecksum = this.calculateChecksum(backupContent);
|
const backupChecksum = this.calculateChecksum(backupContent);
|
||||||
|
|
||||||
if (backupChecksum !== file.checksum) {
|
if (backupChecksum !== file.checksum) {
|
||||||
@@ -92,7 +101,10 @@ export class RollbackManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Restore file
|
// Restore file
|
||||||
await plugins.smartfile.memory.toFs(file.originalContent, absolutePath);
|
await plugins.smartfs
|
||||||
|
.file(absolutePath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.write(file.originalContent);
|
||||||
|
|
||||||
// Restore permissions
|
// Restore permissions
|
||||||
const mode = parseInt(file.permissions, 8);
|
const mode = parseInt(file.permissions, 8);
|
||||||
@@ -129,7 +141,7 @@ export class RollbackManager {
|
|||||||
'operations',
|
'operations',
|
||||||
operation.id,
|
operation.id,
|
||||||
);
|
);
|
||||||
await plugins.smartfile.fs.remove(operationDir);
|
await plugins.smartfs.directory(operationDir).recursive().delete();
|
||||||
|
|
||||||
// Remove from manifest
|
// Remove from manifest
|
||||||
manifest.operations = manifest.operations.filter(
|
manifest.operations = manifest.operations.filter(
|
||||||
@@ -148,13 +160,16 @@ export class RollbackManager {
|
|||||||
|
|
||||||
for (const file of operation.files) {
|
for (const file of operation.files) {
|
||||||
const backupPath = this.getBackupPath(operationId, file.path);
|
const backupPath = this.getBackupPath(operationId, file.path);
|
||||||
const exists = await plugins.smartfile.fs.fileExists(backupPath);
|
const exists = await plugins.smartfs.file(backupPath).exists();
|
||||||
|
|
||||||
if (!exists) {
|
if (!exists) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
const content = plugins.smartfile.fs.toStringSync(backupPath);
|
const content = await plugins.smartfs
|
||||||
|
.file(backupPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read();
|
||||||
const checksum = this.calculateChecksum(content);
|
const checksum = this.calculateChecksum(content);
|
||||||
|
|
||||||
if (checksum !== file.checksum) {
|
if (checksum !== file.checksum) {
|
||||||
@@ -171,10 +186,11 @@ export class RollbackManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private async ensureBackupDir(): Promise<void> {
|
private async ensureBackupDir(): Promise<void> {
|
||||||
await plugins.smartfile.fs.ensureDir(this.backupDir);
|
await plugins.smartfs.directory(this.backupDir).recursive().create();
|
||||||
await plugins.smartfile.fs.ensureDir(
|
await plugins.smartfs
|
||||||
plugins.path.join(this.backupDir, 'operations'),
|
.directory(plugins.path.join(this.backupDir, 'operations'))
|
||||||
);
|
.recursive()
|
||||||
|
.create();
|
||||||
}
|
}
|
||||||
|
|
||||||
private generateOperationId(): string {
|
private generateOperationId(): string {
|
||||||
@@ -204,13 +220,16 @@ export class RollbackManager {
|
|||||||
private async getManifest(): Promise<{ operations: IFormatOperation[] }> {
|
private async getManifest(): Promise<{ operations: IFormatOperation[] }> {
|
||||||
const defaultManifest = { operations: [] };
|
const defaultManifest = { operations: [] };
|
||||||
|
|
||||||
const exists = await plugins.smartfile.fs.fileExists(this.manifestPath);
|
const exists = await plugins.smartfs.file(this.manifestPath).exists();
|
||||||
if (!exists) {
|
if (!exists) {
|
||||||
return defaultManifest;
|
return defaultManifest;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const content = plugins.smartfile.fs.toStringSync(this.manifestPath);
|
const content = (await plugins.smartfs
|
||||||
|
.file(this.manifestPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
const manifest = JSON.parse(content);
|
const manifest = JSON.parse(content);
|
||||||
|
|
||||||
// Validate the manifest structure
|
// Validate the manifest structure
|
||||||
@@ -228,7 +247,7 @@ export class RollbackManager {
|
|||||||
);
|
);
|
||||||
// Try to delete the corrupted file
|
// Try to delete the corrupted file
|
||||||
try {
|
try {
|
||||||
await plugins.smartfile.fs.remove(this.manifestPath);
|
await plugins.smartfs.file(this.manifestPath).delete();
|
||||||
} catch (removeError) {
|
} catch (removeError) {
|
||||||
// Ignore removal errors
|
// Ignore removal errors
|
||||||
}
|
}
|
||||||
@@ -249,7 +268,10 @@ export class RollbackManager {
|
|||||||
|
|
||||||
// Write directly with proper JSON stringification
|
// Write directly with proper JSON stringification
|
||||||
const jsonContent = JSON.stringify(manifest, null, 2);
|
const jsonContent = JSON.stringify(manifest, null, 2);
|
||||||
await plugins.smartfile.memory.toFs(jsonContent, this.manifestPath);
|
await plugins.smartfs
|
||||||
|
.file(this.manifestPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.write(jsonContent);
|
||||||
}
|
}
|
||||||
|
|
||||||
private async getOperation(
|
private async getOperation(
|
||||||
|
|||||||
@@ -13,12 +13,12 @@ const filesToDelete = [
|
|||||||
|
|
||||||
export const run = async (projectArg: Project) => {
|
export const run = async (projectArg: Project) => {
|
||||||
for (const relativeFilePath of filesToDelete) {
|
for (const relativeFilePath of filesToDelete) {
|
||||||
const fileExists = plugins.smartfile.fs.fileExistsSync(relativeFilePath);
|
const fileExists = await plugins.smartfs.file(relativeFilePath).exists();
|
||||||
if (fileExists) {
|
if (fileExists) {
|
||||||
logger.log('info', `Found ${relativeFilePath}! Removing it!`);
|
logger.log('info', `Found ${relativeFilePath}! Removing it!`);
|
||||||
plugins.smartfile.fs.removeSync(
|
await plugins.smartfs
|
||||||
plugins.path.join(paths.cwd, relativeFilePath),
|
.file(plugins.path.join(paths.cwd, relativeFilePath))
|
||||||
);
|
.delete();
|
||||||
} else {
|
} else {
|
||||||
logger.log('info', `Project is free of ${relativeFilePath}`);
|
logger.log('info', `Project is free of ${relativeFilePath}`);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -24,7 +24,12 @@ export const run = async (projectArg: Project) => {
|
|||||||
|
|
||||||
try {
|
try {
|
||||||
// Handle glob patterns
|
// Handle glob patterns
|
||||||
const files = await plugins.smartfile.fs.listFileTree('.', pattern.from);
|
const entries = await plugins.smartfs
|
||||||
|
.directory('.')
|
||||||
|
.recursive()
|
||||||
|
.filter(pattern.from)
|
||||||
|
.list();
|
||||||
|
const files = entries.map((entry) => entry.path);
|
||||||
|
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
const sourcePath = file;
|
const sourcePath = file;
|
||||||
@@ -46,10 +51,13 @@ export const run = async (projectArg: Project) => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Ensure destination directory exists
|
// Ensure destination directory exists
|
||||||
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(destPath));
|
await plugins.smartfs
|
||||||
|
.directory(plugins.path.dirname(destPath))
|
||||||
|
.recursive()
|
||||||
|
.create();
|
||||||
|
|
||||||
// Copy file
|
// Copy file
|
||||||
await plugins.smartfile.fs.copy(sourcePath, destPath);
|
await plugins.smartfs.file(sourcePath).copy(destPath);
|
||||||
logger.log('info', `Copied ${sourcePath} to ${destPath}`);
|
logger.log('info', `Copied ${sourcePath} to ${destPath}`);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -7,13 +7,15 @@ import { logger } from '../gitzone.logging.js';
|
|||||||
const gitignorePath = plugins.path.join(paths.cwd, './.gitignore');
|
const gitignorePath = plugins.path.join(paths.cwd, './.gitignore');
|
||||||
|
|
||||||
export const run = async (projectArg: Project) => {
|
export const run = async (projectArg: Project) => {
|
||||||
const gitignoreExists = await plugins.smartfile.fs.fileExists(gitignorePath);
|
const gitignoreExists = await plugins.smartfs.file(gitignorePath).exists();
|
||||||
let customContent = '';
|
let customContent = '';
|
||||||
|
|
||||||
if (gitignoreExists) {
|
if (gitignoreExists) {
|
||||||
// lets get the existing gitignore file
|
// lets get the existing gitignore file
|
||||||
const existingGitIgnoreString =
|
const existingGitIgnoreString = (await plugins.smartfs
|
||||||
plugins.smartfile.fs.toStringSync(gitignorePath);
|
.file(gitignorePath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
|
|
||||||
// Check for different custom section markers
|
// Check for different custom section markers
|
||||||
const customMarkers = ['#------# custom', '# custom'];
|
const customMarkers = ['#------# custom', '# custom'];
|
||||||
@@ -34,12 +36,17 @@ export const run = async (projectArg: Project) => {
|
|||||||
|
|
||||||
// Append the custom content if it exists
|
// Append the custom content if it exists
|
||||||
if (customContent) {
|
if (customContent) {
|
||||||
const newGitignoreContent =
|
const newGitignoreContent = (await plugins.smartfs
|
||||||
plugins.smartfile.fs.toStringSync(gitignorePath);
|
.file(gitignorePath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
// The template already ends with "#------# custom", so just append the content
|
// The template already ends with "#------# custom", so just append the content
|
||||||
const finalContent =
|
const finalContent =
|
||||||
newGitignoreContent.trimEnd() + '\n' + customContent + '\n';
|
newGitignoreContent.trimEnd() + '\n' + customContent + '\n';
|
||||||
await plugins.smartfile.fs.toFs(finalContent, gitignorePath);
|
await plugins.smartfs
|
||||||
|
.file(gitignorePath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.write(finalContent);
|
||||||
logger.log('info', 'Updated .gitignore while preserving custom section!');
|
logger.log('info', 'Updated .gitignore while preserving custom section!');
|
||||||
} else {
|
} else {
|
||||||
logger.log('info', 'Added a .gitignore!');
|
logger.log('info', 'Added a .gitignore!');
|
||||||
|
|||||||
@@ -7,9 +7,9 @@ import { logger } from '../gitzone.logging.js';
|
|||||||
const incompatibleLicenses: string[] = ['AGPL', 'GPL', 'SSPL'];
|
const incompatibleLicenses: string[] = ['AGPL', 'GPL', 'SSPL'];
|
||||||
|
|
||||||
export const run = async (projectArg: Project) => {
|
export const run = async (projectArg: Project) => {
|
||||||
const nodeModulesInstalled = await plugins.smartfile.fs.isDirectory(
|
const nodeModulesInstalled = await plugins.smartfs
|
||||||
plugins.path.join(paths.cwd, 'node_modules'),
|
.directory(plugins.path.join(paths.cwd, 'node_modules'))
|
||||||
);
|
.exists();
|
||||||
if (!nodeModulesInstalled) {
|
if (!nodeModulesInstalled) {
|
||||||
logger.log('warn', 'No node_modules found. Skipping license check');
|
logger.log('warn', 'No node_modules found. Skipping license check');
|
||||||
return;
|
return;
|
||||||
|
|||||||
@@ -154,10 +154,11 @@ export const run = async (projectArg: Project) => {
|
|||||||
];
|
];
|
||||||
|
|
||||||
// check for dependencies
|
// check for dependencies
|
||||||
|
// Note: @push.rocks/tapbundle is deprecated - use @git.zone/tstest/tapbundle instead
|
||||||
await ensureDependency(
|
await ensureDependency(
|
||||||
packageJson,
|
packageJson,
|
||||||
'devDep',
|
'devDep',
|
||||||
'latest',
|
'exclude',
|
||||||
'@push.rocks/tapbundle',
|
'@push.rocks/tapbundle',
|
||||||
);
|
);
|
||||||
await ensureDependency(
|
await ensureDependency(
|
||||||
@@ -174,9 +175,11 @@ export const run = async (projectArg: Project) => {
|
|||||||
);
|
);
|
||||||
|
|
||||||
// set overrides
|
// set overrides
|
||||||
const overrides = plugins.smartfile.fs.toObjectSync(
|
const overridesContent = (await plugins.smartfs
|
||||||
plugins.path.join(paths.assetsDir, 'overrides.json'),
|
.file(plugins.path.join(paths.assetsDir, 'overrides.json'))
|
||||||
);
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
|
const overrides = JSON.parse(overridesContent);
|
||||||
packageJson.pnpm = packageJson.pnpm || {};
|
packageJson.pnpm = packageJson.pnpm || {};
|
||||||
packageJson.pnpm.overrides = overrides;
|
packageJson.pnpm.overrides = overrides;
|
||||||
|
|
||||||
|
|||||||
@@ -6,25 +6,22 @@ export const run = async () => {
|
|||||||
const readmeHintsPath = plugins.path.join(paths.cwd, 'readme.hints.md');
|
const readmeHintsPath = plugins.path.join(paths.cwd, 'readme.hints.md');
|
||||||
|
|
||||||
// Check and initialize readme.md if it doesn't exist
|
// Check and initialize readme.md if it doesn't exist
|
||||||
const readmeExists = await plugins.smartfile.fs.fileExists(readmePath);
|
const readmeExists = await plugins.smartfs.file(readmePath).exists();
|
||||||
if (!readmeExists) {
|
if (!readmeExists) {
|
||||||
await plugins.smartfile.fs.toFs(
|
await plugins.smartfs.file(readmePath)
|
||||||
'# Project Readme\n\nThis is the initial readme file.',
|
.encoding('utf8')
|
||||||
readmePath,
|
.write('# Project Readme\n\nThis is the initial readme file.');
|
||||||
);
|
|
||||||
console.log('Initialized readme.md');
|
console.log('Initialized readme.md');
|
||||||
} else {
|
} else {
|
||||||
console.log('readme.md already exists');
|
console.log('readme.md already exists');
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check and initialize readme.hints.md if it doesn't exist
|
// Check and initialize readme.hints.md if it doesn't exist
|
||||||
const readmeHintsExists =
|
const readmeHintsExists = await plugins.smartfs.file(readmeHintsPath).exists();
|
||||||
await plugins.smartfile.fs.fileExists(readmeHintsPath);
|
|
||||||
if (!readmeHintsExists) {
|
if (!readmeHintsExists) {
|
||||||
await plugins.smartfile.fs.toFs(
|
await plugins.smartfs.file(readmeHintsPath)
|
||||||
'# Project Readme Hints\n\nThis is the initial readme hints file.',
|
.encoding('utf8')
|
||||||
readmeHintsPath,
|
.write('# Project Readme Hints\n\nThis is the initial readme hints file.');
|
||||||
);
|
|
||||||
console.log('Initialized readme.hints.md');
|
console.log('Initialized readme.hints.md');
|
||||||
} else {
|
} else {
|
||||||
console.log('readme.hints.md already exists');
|
console.log('readme.hints.md already exists');
|
||||||
|
|||||||
@@ -10,12 +10,6 @@ import { Project } from '../classes.project.js';
|
|||||||
export const run = async (project: Project) => {
|
export const run = async (project: Project) => {
|
||||||
const templateModule = await import('../mod_template/index.js');
|
const templateModule = await import('../mod_template/index.js');
|
||||||
|
|
||||||
// update tslint
|
|
||||||
// getting template
|
|
||||||
const tslintTemplate = await templateModule.getTemplate('tslint');
|
|
||||||
await tslintTemplate.writeToDisk(paths.cwd);
|
|
||||||
logger.log('info', 'Updated tslint.json!');
|
|
||||||
|
|
||||||
// update vscode
|
// update vscode
|
||||||
const vscodeTemplate = await templateModule.getTemplate('vscode');
|
const vscodeTemplate = await templateModule.getTemplate('vscode');
|
||||||
await vscodeTemplate.writeToDisk(paths.cwd);
|
await vscodeTemplate.writeToDisk(paths.cwd);
|
||||||
|
|||||||
@@ -7,10 +7,11 @@ import { Project } from '../classes.project.js';
|
|||||||
export const run = async (projectArg: Project) => {
|
export const run = async (projectArg: Project) => {
|
||||||
// lets care about tsconfig.json
|
// lets care about tsconfig.json
|
||||||
logger.log('info', 'Formatting tsconfig.json...');
|
logger.log('info', 'Formatting tsconfig.json...');
|
||||||
const tsconfigSmartfile = await plugins.smartfile.SmartFile.fromFilePath(
|
const factory = plugins.smartfile.SmartFileFactory.nodeFs();
|
||||||
|
const tsconfigSmartfile = await factory.fromFilePath(
|
||||||
plugins.path.join(paths.cwd, 'tsconfig.json'),
|
plugins.path.join(paths.cwd, 'tsconfig.json'),
|
||||||
);
|
);
|
||||||
const tsconfigObject = JSON.parse(tsconfigSmartfile.contentBuffer.toString());
|
const tsconfigObject = JSON.parse(tsconfigSmartfile.parseContentAsString());
|
||||||
tsconfigObject.compilerOptions = tsconfigObject.compilerOptions || {};
|
tsconfigObject.compilerOptions = tsconfigObject.compilerOptions || {};
|
||||||
tsconfigObject.compilerOptions.baseUrl = '.';
|
tsconfigObject.compilerOptions.baseUrl = '.';
|
||||||
tsconfigObject.compilerOptions.paths = {};
|
tsconfigObject.compilerOptions.paths = {};
|
||||||
@@ -23,8 +24,8 @@ export const run = async (projectArg: Project) => {
|
|||||||
`./${publishModule}/index.js`,
|
`./${publishModule}/index.js`,
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
tsconfigSmartfile.setContentsFromString(
|
await tsconfigSmartfile.editContentAsString(async () => {
|
||||||
JSON.stringify(tsconfigObject, null, 2),
|
return JSON.stringify(tsconfigObject, null, 2);
|
||||||
);
|
});
|
||||||
await tsconfigSmartfile.write();
|
await tsconfigSmartfile.write();
|
||||||
};
|
};
|
||||||
|
|||||||
@@ -20,7 +20,7 @@ export class CleanupFormatter extends BaseFormatter {
|
|||||||
];
|
];
|
||||||
|
|
||||||
for (const file of filesToRemove) {
|
for (const file of filesToRemove) {
|
||||||
const exists = await plugins.smartfile.fs.fileExists(file);
|
const exists = await plugins.smartfs.file(file).exists();
|
||||||
if (exists) {
|
if (exists) {
|
||||||
changes.push({
|
changes.push({
|
||||||
type: 'delete',
|
type: 'delete',
|
||||||
|
|||||||
@@ -41,16 +41,23 @@ export class PrettierFormatter extends BaseFormatter {
|
|||||||
// Add files from TypeScript directories
|
// Add files from TypeScript directories
|
||||||
for (const dir of includeDirs) {
|
for (const dir of includeDirs) {
|
||||||
const globPattern = `${dir}/**/*.${extensions}`;
|
const globPattern = `${dir}/**/*.${extensions}`;
|
||||||
const dirFiles = await plugins.smartfile.fs.listFileTree(
|
const dirEntries = await plugins.smartfs
|
||||||
'.',
|
.directory('.')
|
||||||
globPattern,
|
.recursive()
|
||||||
);
|
.filter(globPattern)
|
||||||
|
.list();
|
||||||
|
const dirFiles = dirEntries.map((entry) => entry.path);
|
||||||
allFiles.push(...dirFiles);
|
allFiles.push(...dirFiles);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add root config files
|
// Add root config files
|
||||||
for (const pattern of rootConfigFiles) {
|
for (const pattern of rootConfigFiles) {
|
||||||
const rootFiles = await plugins.smartfile.fs.listFileTree('.', pattern);
|
const rootEntries = await plugins.smartfs
|
||||||
|
.directory('.')
|
||||||
|
.recursive()
|
||||||
|
.filter(pattern)
|
||||||
|
.list();
|
||||||
|
const rootFiles = rootEntries.map((entry) => entry.path);
|
||||||
// Only include files at root level (no slashes in path)
|
// Only include files at root level (no slashes in path)
|
||||||
const rootLevelFiles = rootFiles.filter((f) => !f.includes('/'));
|
const rootLevelFiles = rootFiles.filter((f) => !f.includes('/'));
|
||||||
allFiles.push(...rootLevelFiles);
|
allFiles.push(...rootLevelFiles);
|
||||||
@@ -66,8 +73,8 @@ export class PrettierFormatter extends BaseFormatter {
|
|||||||
const validFiles: string[] = [];
|
const validFiles: string[] = [];
|
||||||
for (const file of files) {
|
for (const file of files) {
|
||||||
try {
|
try {
|
||||||
const stats = await plugins.smartfile.fs.stat(file);
|
const stats = await plugins.smartfs.file(file).stat();
|
||||||
if (!stats.isDirectory()) {
|
if (!stats.isDirectory) {
|
||||||
validFiles.push(file);
|
validFiles.push(file);
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -148,7 +155,10 @@ export class PrettierFormatter extends BaseFormatter {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Read current content
|
// Read current content
|
||||||
const content = plugins.smartfile.fs.toStringSync(change.path);
|
const content = (await plugins.smartfs
|
||||||
|
.file(change.path)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
|
|
||||||
// Format with prettier
|
// Format with prettier
|
||||||
const prettier = await import('prettier');
|
const prettier = await import('prettier');
|
||||||
|
|||||||
@@ -101,7 +101,12 @@ export let run = async (
|
|||||||
// Plan phase
|
// Plan phase
|
||||||
logger.log('info', 'Analyzing project for format operations...');
|
logger.log('info', 'Analyzing project for format operations...');
|
||||||
let plan = options.fromPlan
|
let plan = options.fromPlan
|
||||||
? JSON.parse(await plugins.smartfile.fs.toStringSync(options.fromPlan))
|
? JSON.parse(
|
||||||
|
(await plugins.smartfs
|
||||||
|
.file(options.fromPlan)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string,
|
||||||
|
)
|
||||||
: await planner.planFormat(activeFormatters);
|
: await planner.planFormat(activeFormatters);
|
||||||
|
|
||||||
// Display plan
|
// Display plan
|
||||||
@@ -109,10 +114,10 @@ export let run = async (
|
|||||||
|
|
||||||
// Save plan if requested
|
// Save plan if requested
|
||||||
if (options.savePlan) {
|
if (options.savePlan) {
|
||||||
await plugins.smartfile.memory.toFs(
|
await plugins.smartfs
|
||||||
JSON.stringify(plan, null, 2),
|
.file(options.savePlan)
|
||||||
options.savePlan,
|
.encoding('utf8')
|
||||||
);
|
.write(JSON.stringify(plan, null, 2));
|
||||||
logger.log('info', `Plan saved to ${options.savePlan}`);
|
logger.log('info', `Plan saved to ${options.savePlan}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -48,15 +48,17 @@ export class Meta {
|
|||||||
public async readDirectory() {
|
public async readDirectory() {
|
||||||
await this.syncToRemote(true);
|
await this.syncToRemote(true);
|
||||||
logger.log('info', `reading directory`);
|
logger.log('info', `reading directory`);
|
||||||
const metaFileExists = plugins.smartfile.fs.fileExistsSync(
|
const metaFileExists = await plugins.smartfs
|
||||||
this.filePaths.metaJson,
|
.file(this.filePaths.metaJson)
|
||||||
);
|
.exists();
|
||||||
if (!metaFileExists) {
|
if (!metaFileExists) {
|
||||||
throw new Error(`meta file does not exist at ${this.filePaths.metaJson}`);
|
throw new Error(`meta file does not exist at ${this.filePaths.metaJson}`);
|
||||||
}
|
}
|
||||||
this.metaRepoData = plugins.smartfile.fs.toObjectSync(
|
const content = (await plugins.smartfs
|
||||||
this.filePaths.metaJson,
|
.file(this.filePaths.metaJson)
|
||||||
);
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
|
this.metaRepoData = JSON.parse(content);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -78,15 +80,15 @@ export class Meta {
|
|||||||
*/
|
*/
|
||||||
public async writeToDisk() {
|
public async writeToDisk() {
|
||||||
// write .meta.json to disk
|
// write .meta.json to disk
|
||||||
plugins.smartfile.memory.toFsSync(
|
await plugins.smartfs
|
||||||
JSON.stringify(this.metaRepoData, null, 2),
|
.file(this.filePaths.metaJson)
|
||||||
this.filePaths.metaJson,
|
.encoding('utf8')
|
||||||
);
|
.write(JSON.stringify(this.metaRepoData, null, 2));
|
||||||
// write .gitignore to disk
|
// write .gitignore to disk
|
||||||
plugins.smartfile.memory.toFsSync(
|
await plugins.smartfs
|
||||||
await this.generateGitignore(),
|
.file(this.filePaths.gitIgnore)
|
||||||
this.filePaths.gitIgnore,
|
.encoding('utf8')
|
||||||
);
|
.write(await this.generateGitignore());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -112,10 +114,25 @@ export class Meta {
|
|||||||
*/
|
*/
|
||||||
public async updateLocalRepos() {
|
public async updateLocalRepos() {
|
||||||
await this.syncToRemote();
|
await this.syncToRemote();
|
||||||
const projects = plugins.smartfile.fs.toObjectSync(
|
const metaContent = (await plugins.smartfs
|
||||||
this.filePaths.metaJson,
|
.file(this.filePaths.metaJson)
|
||||||
).projects;
|
.encoding('utf8')
|
||||||
const preExistingFolders = plugins.smartfile.fs.listFoldersSync(this.cwd);
|
.read()) as string;
|
||||||
|
const projects = JSON.parse(metaContent).projects;
|
||||||
|
const entries = await plugins.smartfs.directory(this.cwd).list();
|
||||||
|
const preExistingFolders: string[] = [];
|
||||||
|
for (const entry of entries) {
|
||||||
|
try {
|
||||||
|
const stats = await plugins.smartfs
|
||||||
|
.file(plugins.path.join(this.cwd, entry.path))
|
||||||
|
.stat();
|
||||||
|
if (stats.isDirectory) {
|
||||||
|
preExistingFolders.push(entry.name);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Skip entries that can't be accessed
|
||||||
|
}
|
||||||
|
}
|
||||||
for (const preExistingFolderArg of preExistingFolders) {
|
for (const preExistingFolderArg of preExistingFolders) {
|
||||||
if (
|
if (
|
||||||
preExistingFolderArg !== '.git' &&
|
preExistingFolderArg !== '.git' &&
|
||||||
@@ -143,9 +160,17 @@ export class Meta {
|
|||||||
await this.sortMetaRepoData();
|
await this.sortMetaRepoData();
|
||||||
const missingRepos: string[] = [];
|
const missingRepos: string[] = [];
|
||||||
for (const key of Object.keys(this.metaRepoData.projects)) {
|
for (const key of Object.keys(this.metaRepoData.projects)) {
|
||||||
plugins.smartfile.fs.isDirectory(key)
|
const fullPath = plugins.path.join(this.cwd, key);
|
||||||
? logger.log('ok', `${key} -> is already cloned`)
|
try {
|
||||||
: missingRepos.push(key);
|
const stats = await plugins.smartfs.file(fullPath).stat();
|
||||||
|
if (stats.isDirectory) {
|
||||||
|
logger.log('ok', `${key} -> is already cloned`);
|
||||||
|
} else {
|
||||||
|
missingRepos.push(key);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
missingRepos.push(key);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.log('info', `found ${missingRepos.length} missing repos`);
|
logger.log('info', `found ${missingRepos.length} missing repos`);
|
||||||
@@ -165,7 +190,20 @@ export class Meta {
|
|||||||
await this.syncToRemote();
|
await this.syncToRemote();
|
||||||
|
|
||||||
// go recursive
|
// go recursive
|
||||||
const folders = await plugins.smartfile.fs.listFolders(this.cwd);
|
const listEntries = await plugins.smartfs.directory(this.cwd).list();
|
||||||
|
const folders: string[] = [];
|
||||||
|
for (const entry of listEntries) {
|
||||||
|
try {
|
||||||
|
const stats = await plugins.smartfs
|
||||||
|
.file(plugins.path.join(this.cwd, entry.path))
|
||||||
|
.stat();
|
||||||
|
if (stats.isDirectory) {
|
||||||
|
folders.push(entry.name);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Skip entries that can't be accessed
|
||||||
|
}
|
||||||
|
}
|
||||||
const childMetaRepositories: string[] = [];
|
const childMetaRepositories: string[] = [];
|
||||||
for (const folder of folders) {
|
for (const folder of folders) {
|
||||||
logger.log('info', folder);
|
logger.log('info', folder);
|
||||||
@@ -180,27 +218,31 @@ export class Meta {
|
|||||||
*/
|
*/
|
||||||
public async initProject() {
|
public async initProject() {
|
||||||
await this.syncToRemote(true);
|
await this.syncToRemote(true);
|
||||||
const fileExists = await plugins.smartfile.fs.fileExists(
|
const fileExists = await plugins.smartfs
|
||||||
this.filePaths.metaJson,
|
.file(this.filePaths.metaJson)
|
||||||
);
|
.exists();
|
||||||
if (!fileExists) {
|
if (!fileExists) {
|
||||||
await plugins.smartfile.memory.toFs(
|
await plugins.smartfs
|
||||||
JSON.stringify({
|
.file(this.filePaths.metaJson)
|
||||||
projects: {},
|
.encoding('utf8')
|
||||||
}),
|
.write(
|
||||||
this.filePaths.metaJson,
|
JSON.stringify({
|
||||||
);
|
projects: {},
|
||||||
|
}),
|
||||||
|
);
|
||||||
logger.log(
|
logger.log(
|
||||||
`success`,
|
`success`,
|
||||||
`created a new .meta.json in directory ${this.cwd}`,
|
`created a new .meta.json in directory ${this.cwd}`,
|
||||||
);
|
);
|
||||||
await plugins.smartfile.memory.toFs(
|
await plugins.smartfs
|
||||||
JSON.stringify({
|
.file(this.filePaths.packageJson)
|
||||||
name: this.dirName,
|
.encoding('utf8')
|
||||||
version: '1.0.0',
|
.write(
|
||||||
}),
|
JSON.stringify({
|
||||||
this.filePaths.packageJson,
|
name: this.dirName,
|
||||||
);
|
version: '1.0.0',
|
||||||
|
}),
|
||||||
|
);
|
||||||
logger.log(
|
logger.log(
|
||||||
`success`,
|
`success`,
|
||||||
`created a new package.json in directory ${this.cwd}`,
|
`created a new package.json in directory ${this.cwd}`,
|
||||||
@@ -264,9 +306,10 @@ export class Meta {
|
|||||||
await this.writeToDisk();
|
await this.writeToDisk();
|
||||||
|
|
||||||
logger.log('info', 'removing directory from cwd');
|
logger.log('info', 'removing directory from cwd');
|
||||||
await plugins.smartfile.fs.remove(
|
await plugins.smartfs
|
||||||
plugins.path.join(paths.cwd, projectNameArg),
|
.directory(plugins.path.join(paths.cwd, projectNameArg))
|
||||||
);
|
.recursive()
|
||||||
|
.delete();
|
||||||
await this.updateLocalRepos();
|
await this.updateLocalRepos();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
190
ts/mod_services/classes.globalregistry.ts
Normal file
190
ts/mod_services/classes.globalregistry.ts
Normal file
@@ -0,0 +1,190 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import { DockerContainer } from './classes.dockercontainer.js';
|
||||||
|
import { logger } from '../gitzone.logging.js';
|
||||||
|
|
||||||
|
export interface IRegisteredProject {
|
||||||
|
projectPath: string;
|
||||||
|
projectName: string;
|
||||||
|
containers: {
|
||||||
|
mongo?: string;
|
||||||
|
minio?: string;
|
||||||
|
elasticsearch?: string;
|
||||||
|
};
|
||||||
|
ports: {
|
||||||
|
mongo?: number;
|
||||||
|
s3?: number;
|
||||||
|
s3Console?: number;
|
||||||
|
elasticsearch?: number;
|
||||||
|
};
|
||||||
|
enabledServices: string[];
|
||||||
|
lastActive: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IGlobalRegistryData {
|
||||||
|
projects: { [projectPath: string]: IRegisteredProject };
|
||||||
|
}
|
||||||
|
|
||||||
|
export class GlobalRegistry {
|
||||||
|
private static instance: GlobalRegistry | null = null;
|
||||||
|
private kvStore: plugins.npmextra.KeyValueStore<IGlobalRegistryData>;
|
||||||
|
private docker: DockerContainer;
|
||||||
|
|
||||||
|
private constructor() {
|
||||||
|
this.kvStore = new plugins.npmextra.KeyValueStore({
|
||||||
|
typeArg: 'userHomeDir',
|
||||||
|
identityArg: 'gitzone-services',
|
||||||
|
});
|
||||||
|
this.docker = new DockerContainer();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the singleton instance
|
||||||
|
*/
|
||||||
|
public static getInstance(): GlobalRegistry {
|
||||||
|
if (!GlobalRegistry.instance) {
|
||||||
|
GlobalRegistry.instance = new GlobalRegistry();
|
||||||
|
}
|
||||||
|
return GlobalRegistry.instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register or update a project in the global registry
|
||||||
|
*/
|
||||||
|
public async registerProject(data: Omit<IRegisteredProject, 'lastActive'>): Promise<void> {
|
||||||
|
const allData = await this.kvStore.readAll();
|
||||||
|
const projects = allData.projects || {};
|
||||||
|
|
||||||
|
projects[data.projectPath] = {
|
||||||
|
...data,
|
||||||
|
lastActive: Date.now(),
|
||||||
|
};
|
||||||
|
|
||||||
|
await this.kvStore.writeKey('projects', projects);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove a project from the registry
|
||||||
|
*/
|
||||||
|
public async unregisterProject(projectPath: string): Promise<void> {
|
||||||
|
const allData = await this.kvStore.readAll();
|
||||||
|
const projects = allData.projects || {};
|
||||||
|
|
||||||
|
if (projects[projectPath]) {
|
||||||
|
delete projects[projectPath];
|
||||||
|
await this.kvStore.writeKey('projects', projects);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update the lastActive timestamp for a project
|
||||||
|
*/
|
||||||
|
public async touchProject(projectPath: string): Promise<void> {
|
||||||
|
const allData = await this.kvStore.readAll();
|
||||||
|
const projects = allData.projects || {};
|
||||||
|
|
||||||
|
if (projects[projectPath]) {
|
||||||
|
projects[projectPath].lastActive = Date.now();
|
||||||
|
await this.kvStore.writeKey('projects', projects);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get all registered projects
|
||||||
|
*/
|
||||||
|
public async getAllProjects(): Promise<{ [path: string]: IRegisteredProject }> {
|
||||||
|
const allData = await this.kvStore.readAll();
|
||||||
|
return allData.projects || {};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a project is registered
|
||||||
|
*/
|
||||||
|
public async isRegistered(projectPath: string): Promise<boolean> {
|
||||||
|
const projects = await this.getAllProjects();
|
||||||
|
return !!projects[projectPath];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get status of all containers across all registered projects
|
||||||
|
*/
|
||||||
|
public async getGlobalStatus(): Promise<
|
||||||
|
Array<{
|
||||||
|
projectPath: string;
|
||||||
|
projectName: string;
|
||||||
|
containers: Array<{ name: string; status: string }>;
|
||||||
|
lastActive: number;
|
||||||
|
}>
|
||||||
|
> {
|
||||||
|
const projects = await this.getAllProjects();
|
||||||
|
const result: Array<{
|
||||||
|
projectPath: string;
|
||||||
|
projectName: string;
|
||||||
|
containers: Array<{ name: string; status: string }>;
|
||||||
|
lastActive: number;
|
||||||
|
}> = [];
|
||||||
|
|
||||||
|
for (const [path, project] of Object.entries(projects)) {
|
||||||
|
const containerStatuses: Array<{ name: string; status: string }> = [];
|
||||||
|
|
||||||
|
for (const containerName of Object.values(project.containers)) {
|
||||||
|
if (containerName) {
|
||||||
|
const status = await this.docker.getStatus(containerName);
|
||||||
|
containerStatuses.push({ name: containerName, status });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result.push({
|
||||||
|
projectPath: path,
|
||||||
|
projectName: project.projectName,
|
||||||
|
containers: containerStatuses,
|
||||||
|
lastActive: project.lastActive,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop all containers across all registered projects
|
||||||
|
*/
|
||||||
|
public async stopAll(): Promise<{ stopped: string[]; failed: string[] }> {
|
||||||
|
const projects = await this.getAllProjects();
|
||||||
|
const stopped: string[] = [];
|
||||||
|
const failed: string[] = [];
|
||||||
|
|
||||||
|
for (const project of Object.values(projects)) {
|
||||||
|
for (const containerName of Object.values(project.containers)) {
|
||||||
|
if (containerName) {
|
||||||
|
const status = await this.docker.getStatus(containerName);
|
||||||
|
if (status === 'running') {
|
||||||
|
if (await this.docker.stop(containerName)) {
|
||||||
|
stopped.push(containerName);
|
||||||
|
} else {
|
||||||
|
failed.push(containerName);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { stopped, failed };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove stale registry entries (projects that no longer exist on disk)
|
||||||
|
*/
|
||||||
|
public async cleanup(): Promise<string[]> {
|
||||||
|
const projects = await this.getAllProjects();
|
||||||
|
const removed: string[] = [];
|
||||||
|
|
||||||
|
for (const projectPath of Object.keys(projects)) {
|
||||||
|
const exists = await plugins.smartfs.directory(projectPath).exists();
|
||||||
|
if (!exists) {
|
||||||
|
await this.unregisterProject(projectPath);
|
||||||
|
removed.push(projectPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return removed;
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -19,6 +19,11 @@ export interface IServiceConfig {
|
|||||||
S3_BUCKET: string;
|
S3_BUCKET: string;
|
||||||
S3_ENDPOINT: string;
|
S3_ENDPOINT: string;
|
||||||
S3_USESSL: boolean;
|
S3_USESSL: boolean;
|
||||||
|
ELASTICSEARCH_HOST: string;
|
||||||
|
ELASTICSEARCH_PORT: string;
|
||||||
|
ELASTICSEARCH_USER: string;
|
||||||
|
ELASTICSEARCH_PASS: string;
|
||||||
|
ELASTICSEARCH_URL: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export class ServiceConfiguration {
|
export class ServiceConfiguration {
|
||||||
@@ -61,10 +66,10 @@ export class ServiceConfiguration {
|
|||||||
* Save the configuration to file
|
* Save the configuration to file
|
||||||
*/
|
*/
|
||||||
public async saveConfig(): Promise<void> {
|
public async saveConfig(): Promise<void> {
|
||||||
await plugins.smartfile.memory.toFs(
|
await plugins.smartfs
|
||||||
JSON.stringify(this.config, null, 2),
|
.file(this.configPath)
|
||||||
this.configPath
|
.encoding('utf8')
|
||||||
);
|
.write(JSON.stringify(this.config, null, 2));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -72,21 +77,24 @@ export class ServiceConfiguration {
|
|||||||
*/
|
*/
|
||||||
private async ensureNogitDirectory(): Promise<void> {
|
private async ensureNogitDirectory(): Promise<void> {
|
||||||
const nogitPath = plugins.path.join(process.cwd(), '.nogit');
|
const nogitPath = plugins.path.join(process.cwd(), '.nogit');
|
||||||
await plugins.smartfile.fs.ensureDir(nogitPath);
|
await plugins.smartfs.directory(nogitPath).recursive().create();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if configuration file exists
|
* Check if configuration file exists
|
||||||
*/
|
*/
|
||||||
private async configExists(): Promise<boolean> {
|
private async configExists(): Promise<boolean> {
|
||||||
return plugins.smartfile.fs.fileExists(this.configPath);
|
return plugins.smartfs.file(this.configPath).exists();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Load configuration from file
|
* Load configuration from file
|
||||||
*/
|
*/
|
||||||
private async loadConfig(): Promise<void> {
|
private async loadConfig(): Promise<void> {
|
||||||
const configContent = plugins.smartfile.fs.toStringSync(this.configPath);
|
const configContent = (await plugins.smartfs
|
||||||
|
.file(this.configPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
this.config = JSON.parse(configContent);
|
this.config = JSON.parse(configContent);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -94,16 +102,16 @@ export class ServiceConfiguration {
|
|||||||
* Create default configuration
|
* Create default configuration
|
||||||
*/
|
*/
|
||||||
private async createDefaultConfig(): Promise<void> {
|
private async createDefaultConfig(): Promise<void> {
|
||||||
const projectName = helpers.getProjectName();
|
const projectName = await helpers.getProjectName();
|
||||||
const mongoPort = await helpers.getRandomAvailablePort();
|
const mongoPort = await helpers.getRandomAvailablePort();
|
||||||
const s3Port = await helpers.getRandomAvailablePort();
|
const s3Port = await helpers.getRandomAvailablePort();
|
||||||
let s3ConsolePort = s3Port + 1;
|
let s3ConsolePort = s3Port + 1;
|
||||||
|
|
||||||
// Ensure console port is also available
|
// Ensure console port is also available
|
||||||
while (!(await helpers.isPortAvailable(s3ConsolePort))) {
|
while (!(await helpers.isPortAvailable(s3ConsolePort))) {
|
||||||
s3ConsolePort++;
|
s3ConsolePort++;
|
||||||
}
|
}
|
||||||
|
|
||||||
const mongoUser = 'defaultadmin';
|
const mongoUser = 'defaultadmin';
|
||||||
const mongoPass = 'defaultpass';
|
const mongoPass = 'defaultpass';
|
||||||
const mongoHost = 'localhost';
|
const mongoHost = 'localhost';
|
||||||
@@ -111,7 +119,11 @@ export class ServiceConfiguration {
|
|||||||
const mongoPortStr = mongoPort.toString();
|
const mongoPortStr = mongoPort.toString();
|
||||||
const s3Host = 'localhost';
|
const s3Host = 'localhost';
|
||||||
const s3PortStr = s3Port.toString();
|
const s3PortStr = s3Port.toString();
|
||||||
|
const esHost = 'localhost';
|
||||||
|
const esPort = '9200';
|
||||||
|
const esUser = 'elastic';
|
||||||
|
const esPass = 'elastic';
|
||||||
|
|
||||||
this.config = {
|
this.config = {
|
||||||
PROJECT_NAME: projectName,
|
PROJECT_NAME: projectName,
|
||||||
MONGODB_HOST: mongoHost,
|
MONGODB_HOST: mongoHost,
|
||||||
@@ -127,22 +139,28 @@ export class ServiceConfiguration {
|
|||||||
S3_SECRETKEY: 'defaultpass',
|
S3_SECRETKEY: 'defaultpass',
|
||||||
S3_BUCKET: `${projectName}-documents`,
|
S3_BUCKET: `${projectName}-documents`,
|
||||||
S3_ENDPOINT: s3Host,
|
S3_ENDPOINT: s3Host,
|
||||||
S3_USESSL: false
|
S3_USESSL: false,
|
||||||
|
ELASTICSEARCH_HOST: esHost,
|
||||||
|
ELASTICSEARCH_PORT: esPort,
|
||||||
|
ELASTICSEARCH_USER: esUser,
|
||||||
|
ELASTICSEARCH_PASS: esPass,
|
||||||
|
ELASTICSEARCH_URL: `http://${esUser}:${esPass}@${esHost}:${esPort}`
|
||||||
};
|
};
|
||||||
|
|
||||||
await this.saveConfig();
|
await this.saveConfig();
|
||||||
|
|
||||||
logger.log('ok', '✅ Created .nogit/env.json with project defaults');
|
logger.log('ok', '✅ Created .nogit/env.json with project defaults');
|
||||||
logger.log('info', `📍 MongoDB port: ${mongoPort}`);
|
logger.log('info', `📍 MongoDB port: ${mongoPort}`);
|
||||||
logger.log('info', `📍 S3 API port: ${s3Port}`);
|
logger.log('info', `📍 S3 API port: ${s3Port}`);
|
||||||
logger.log('info', `📍 S3 Console port: ${s3ConsolePort}`);
|
logger.log('info', `📍 S3 Console port: ${s3ConsolePort}`);
|
||||||
|
logger.log('info', `📍 Elasticsearch port: ${esPort}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Update missing fields in existing configuration
|
* Update missing fields in existing configuration
|
||||||
*/
|
*/
|
||||||
private async updateMissingFields(): Promise<void> {
|
private async updateMissingFields(): Promise<void> {
|
||||||
const projectName = helpers.getProjectName();
|
const projectName = await helpers.getProjectName();
|
||||||
let updated = false;
|
let updated = false;
|
||||||
const fieldsAdded: string[] = [];
|
const fieldsAdded: string[] = [];
|
||||||
|
|
||||||
@@ -249,7 +267,39 @@ export class ServiceConfiguration {
|
|||||||
fieldsAdded.push('S3_ENDPOINT');
|
fieldsAdded.push('S3_ENDPOINT');
|
||||||
updated = true;
|
updated = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!this.config.ELASTICSEARCH_HOST) {
|
||||||
|
this.config.ELASTICSEARCH_HOST = 'localhost';
|
||||||
|
fieldsAdded.push('ELASTICSEARCH_HOST');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.config.ELASTICSEARCH_PORT) {
|
||||||
|
this.config.ELASTICSEARCH_PORT = '9200';
|
||||||
|
fieldsAdded.push('ELASTICSEARCH_PORT');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.config.ELASTICSEARCH_USER) {
|
||||||
|
this.config.ELASTICSEARCH_USER = 'elastic';
|
||||||
|
fieldsAdded.push('ELASTICSEARCH_USER');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.config.ELASTICSEARCH_PASS) {
|
||||||
|
this.config.ELASTICSEARCH_PASS = 'elastic';
|
||||||
|
fieldsAdded.push('ELASTICSEARCH_PASS');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Always update ELASTICSEARCH_URL based on current settings
|
||||||
|
const oldEsUrl = this.config.ELASTICSEARCH_URL;
|
||||||
|
this.config.ELASTICSEARCH_URL = `http://${this.config.ELASTICSEARCH_USER}:${this.config.ELASTICSEARCH_PASS}@${this.config.ELASTICSEARCH_HOST}:${this.config.ELASTICSEARCH_PORT}`;
|
||||||
|
if (oldEsUrl !== this.config.ELASTICSEARCH_URL) {
|
||||||
|
fieldsAdded.push('ELASTICSEARCH_URL');
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
|
||||||
if (updated) {
|
if (updated) {
|
||||||
await this.saveConfig();
|
await this.saveConfig();
|
||||||
logger.log('ok', `✅ Added missing fields: ${fieldsAdded.join(', ')}`);
|
logger.log('ok', `✅ Added missing fields: ${fieldsAdded.join(', ')}`);
|
||||||
@@ -272,17 +322,19 @@ export class ServiceConfiguration {
|
|||||||
public getContainerNames() {
|
public getContainerNames() {
|
||||||
return {
|
return {
|
||||||
mongo: `${this.config.PROJECT_NAME}-mongodb`,
|
mongo: `${this.config.PROJECT_NAME}-mongodb`,
|
||||||
minio: `${this.config.PROJECT_NAME}-minio`
|
minio: `${this.config.PROJECT_NAME}-minio`,
|
||||||
|
elasticsearch: `${this.config.PROJECT_NAME}-elasticsearch`
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get data directories
|
* Get data directories
|
||||||
*/
|
*/
|
||||||
public getDataDirectories() {
|
public getDataDirectories() {
|
||||||
return {
|
return {
|
||||||
mongo: plugins.path.join(process.cwd(), '.nogit', 'mongodata'),
|
mongo: plugins.path.join(process.cwd(), '.nogit', 'mongodata'),
|
||||||
minio: plugins.path.join(process.cwd(), '.nogit', 'miniodata')
|
minio: plugins.path.join(process.cwd(), '.nogit', 'miniodata'),
|
||||||
|
elasticsearch: plugins.path.join(process.cwd(), '.nogit', 'esdata')
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -330,12 +382,27 @@ export class ServiceConfiguration {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check Elasticsearch container
|
||||||
|
const esStatus = await this.docker.getStatus(containers.elasticsearch);
|
||||||
|
if (esStatus !== 'not_exists') {
|
||||||
|
const portMappings = await this.docker.getPortMappings(containers.elasticsearch);
|
||||||
|
if (portMappings && portMappings['9200']) {
|
||||||
|
const dockerPort = portMappings['9200'];
|
||||||
|
if (this.config.ELASTICSEARCH_PORT !== dockerPort) {
|
||||||
|
logger.log('note', `📍 Syncing Elasticsearch port from Docker: ${dockerPort}`);
|
||||||
|
this.config.ELASTICSEARCH_PORT = dockerPort;
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (updated) {
|
if (updated) {
|
||||||
// Update derived fields
|
// Update derived fields
|
||||||
this.config.MONGODB_URL = `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${this.config.MONGODB_HOST}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
this.config.MONGODB_URL = `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${this.config.MONGODB_HOST}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
||||||
this.config.S3_ENDPOINT = this.config.S3_HOST;
|
this.config.S3_ENDPOINT = this.config.S3_HOST;
|
||||||
|
this.config.ELASTICSEARCH_URL = `http://${this.config.ELASTICSEARCH_USER}:${this.config.ELASTICSEARCH_PASS}@${this.config.ELASTICSEARCH_HOST}:${this.config.ELASTICSEARCH_PORT}`;
|
||||||
|
|
||||||
await this.saveConfig();
|
await this.saveConfig();
|
||||||
logger.log('ok', '✅ Configuration synced with Docker containers');
|
logger.log('ok', '✅ Configuration synced with Docker containers');
|
||||||
}
|
}
|
||||||
@@ -347,11 +414,12 @@ export class ServiceConfiguration {
|
|||||||
public async validateAndUpdatePorts(): Promise<boolean> {
|
public async validateAndUpdatePorts(): Promise<boolean> {
|
||||||
let updated = false;
|
let updated = false;
|
||||||
const containers = this.getContainerNames();
|
const containers = this.getContainerNames();
|
||||||
|
|
||||||
// Check if containers exist - if they do, ports are fine
|
// Check if containers exist - if they do, ports are fine
|
||||||
const mongoExists = await this.docker.exists(containers.mongo);
|
const mongoExists = await this.docker.exists(containers.mongo);
|
||||||
const minioExists = await this.docker.exists(containers.minio);
|
const minioExists = await this.docker.exists(containers.minio);
|
||||||
|
const esExists = await this.docker.exists(containers.elasticsearch);
|
||||||
|
|
||||||
// Only check port availability if containers don't exist
|
// Only check port availability if containers don't exist
|
||||||
if (!mongoExists) {
|
if (!mongoExists) {
|
||||||
const mongoPort = parseInt(this.config.MONGODB_PORT);
|
const mongoPort = parseInt(this.config.MONGODB_PORT);
|
||||||
@@ -363,11 +431,11 @@ export class ServiceConfiguration {
|
|||||||
updated = true;
|
updated = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!minioExists) {
|
if (!minioExists) {
|
||||||
const s3Port = parseInt(this.config.S3_PORT);
|
const s3Port = parseInt(this.config.S3_PORT);
|
||||||
const s3ConsolePort = parseInt(this.config.S3_CONSOLE_PORT);
|
const s3ConsolePort = parseInt(this.config.S3_CONSOLE_PORT);
|
||||||
|
|
||||||
if (!(await helpers.isPortAvailable(s3Port))) {
|
if (!(await helpers.isPortAvailable(s3Port))) {
|
||||||
logger.log('note', `⚠️ S3 API port ${s3Port} is in use, finding new port...`);
|
logger.log('note', `⚠️ S3 API port ${s3Port} is in use, finding new port...`);
|
||||||
const newPort = await helpers.getRandomAvailablePort();
|
const newPort = await helpers.getRandomAvailablePort();
|
||||||
@@ -375,7 +443,7 @@ export class ServiceConfiguration {
|
|||||||
logger.log('ok', `✅ New S3 API port: ${newPort}`);
|
logger.log('ok', `✅ New S3 API port: ${newPort}`);
|
||||||
updated = true;
|
updated = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!(await helpers.isPortAvailable(s3ConsolePort))) {
|
if (!(await helpers.isPortAvailable(s3ConsolePort))) {
|
||||||
logger.log('note', `⚠️ S3 Console port ${s3ConsolePort} is in use, finding new port...`);
|
logger.log('note', `⚠️ S3 Console port ${s3ConsolePort} is in use, finding new port...`);
|
||||||
let newPort = parseInt(this.config.S3_PORT) + 1;
|
let newPort = parseInt(this.config.S3_PORT) + 1;
|
||||||
@@ -387,15 +455,27 @@ export class ServiceConfiguration {
|
|||||||
updated = true;
|
updated = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!esExists) {
|
||||||
|
const esPort = parseInt(this.config.ELASTICSEARCH_PORT);
|
||||||
|
if (!(await helpers.isPortAvailable(esPort))) {
|
||||||
|
logger.log('note', `⚠️ Elasticsearch port ${esPort} is in use, finding new port...`);
|
||||||
|
const newPort = await helpers.getRandomAvailablePort();
|
||||||
|
this.config.ELASTICSEARCH_PORT = newPort.toString();
|
||||||
|
logger.log('ok', `✅ New Elasticsearch port: ${newPort}`);
|
||||||
|
updated = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (updated) {
|
if (updated) {
|
||||||
// Update derived fields
|
// Update derived fields
|
||||||
this.config.MONGODB_URL = `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${this.config.MONGODB_HOST}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
this.config.MONGODB_URL = `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${this.config.MONGODB_HOST}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
||||||
this.config.S3_ENDPOINT = this.config.S3_HOST;
|
this.config.S3_ENDPOINT = this.config.S3_HOST;
|
||||||
|
this.config.ELASTICSEARCH_URL = `http://${this.config.ELASTICSEARCH_USER}:${this.config.ELASTICSEARCH_PASS}@${this.config.ELASTICSEARCH_HOST}:${this.config.ELASTICSEARCH_PORT}`;
|
||||||
|
|
||||||
await this.saveConfig();
|
await this.saveConfig();
|
||||||
}
|
}
|
||||||
|
|
||||||
return updated;
|
return updated;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -404,29 +484,35 @@ export class ServiceConfiguration {
|
|||||||
*/
|
*/
|
||||||
public async reconfigurePorts(): Promise<void> {
|
public async reconfigurePorts(): Promise<void> {
|
||||||
logger.log('note', '🔄 Finding new available ports...');
|
logger.log('note', '🔄 Finding new available ports...');
|
||||||
|
|
||||||
const mongoPort = await helpers.getRandomAvailablePort();
|
const mongoPort = await helpers.getRandomAvailablePort();
|
||||||
const s3Port = await helpers.getRandomAvailablePort();
|
const s3Port = await helpers.getRandomAvailablePort();
|
||||||
let s3ConsolePort = s3Port + 1;
|
let s3ConsolePort = s3Port + 1;
|
||||||
|
|
||||||
// Ensure console port is also available
|
// Ensure console port is also available
|
||||||
while (!(await helpers.isPortAvailable(s3ConsolePort))) {
|
while (!(await helpers.isPortAvailable(s3ConsolePort))) {
|
||||||
s3ConsolePort++;
|
s3ConsolePort++;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Elasticsearch uses standard port 9200
|
||||||
|
const esPort = '9200';
|
||||||
|
|
||||||
this.config.MONGODB_PORT = mongoPort.toString();
|
this.config.MONGODB_PORT = mongoPort.toString();
|
||||||
this.config.S3_PORT = s3Port.toString();
|
this.config.S3_PORT = s3Port.toString();
|
||||||
this.config.S3_CONSOLE_PORT = s3ConsolePort.toString();
|
this.config.S3_CONSOLE_PORT = s3ConsolePort.toString();
|
||||||
|
this.config.ELASTICSEARCH_PORT = esPort;
|
||||||
|
|
||||||
// Update derived fields
|
// Update derived fields
|
||||||
this.config.MONGODB_URL = `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${this.config.MONGODB_HOST}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
this.config.MONGODB_URL = `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${this.config.MONGODB_HOST}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
||||||
this.config.S3_ENDPOINT = this.config.S3_HOST;
|
this.config.S3_ENDPOINT = this.config.S3_HOST;
|
||||||
|
this.config.ELASTICSEARCH_URL = `http://${this.config.ELASTICSEARCH_USER}:${this.config.ELASTICSEARCH_PASS}@${this.config.ELASTICSEARCH_HOST}:${this.config.ELASTICSEARCH_PORT}`;
|
||||||
|
|
||||||
await this.saveConfig();
|
await this.saveConfig();
|
||||||
|
|
||||||
logger.log('ok', '✅ New port configuration:');
|
logger.log('ok', '✅ New port configuration:');
|
||||||
logger.log('info', ` 📍 MongoDB: ${mongoPort}`);
|
logger.log('info', ` 📍 MongoDB: ${mongoPort}`);
|
||||||
logger.log('info', ` 📍 S3 API: ${s3Port}`);
|
logger.log('info', ` 📍 S3 API: ${s3Port}`);
|
||||||
logger.log('info', ` 📍 S3 Console: ${s3ConsolePort}`);
|
logger.log('info', ` 📍 S3 Console: ${s3ConsolePort}`);
|
||||||
|
logger.log('info', ` 📍 Elasticsearch: ${esPort}`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -2,17 +2,21 @@ import * as plugins from './mod.plugins.js';
|
|||||||
import * as helpers from './helpers.js';
|
import * as helpers from './helpers.js';
|
||||||
import { ServiceConfiguration } from './classes.serviceconfiguration.js';
|
import { ServiceConfiguration } from './classes.serviceconfiguration.js';
|
||||||
import { DockerContainer } from './classes.dockercontainer.js';
|
import { DockerContainer } from './classes.dockercontainer.js';
|
||||||
|
import { GlobalRegistry } from './classes.globalregistry.js';
|
||||||
import { logger } from '../gitzone.logging.js';
|
import { logger } from '../gitzone.logging.js';
|
||||||
|
|
||||||
export class ServiceManager {
|
export class ServiceManager {
|
||||||
private config: ServiceConfiguration;
|
private config: ServiceConfiguration;
|
||||||
private docker: DockerContainer;
|
private docker: DockerContainer;
|
||||||
|
private enabledServices: string[] | null = null;
|
||||||
|
private globalRegistry: GlobalRegistry;
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
this.config = new ServiceConfiguration();
|
this.config = new ServiceConfiguration();
|
||||||
this.docker = new DockerContainer();
|
this.docker = new DockerContainer();
|
||||||
|
this.globalRegistry = GlobalRegistry.getInstance();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initialize the service manager
|
* Initialize the service manager
|
||||||
*/
|
*/
|
||||||
@@ -22,15 +26,162 @@ export class ServiceManager {
|
|||||||
logger.log('error', 'Error: Docker is not installed. Please install Docker first.');
|
logger.log('error', 'Error: Docker is not installed. Please install Docker first.');
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Load or create configuration
|
// Load or create configuration
|
||||||
await this.config.loadOrCreate();
|
await this.config.loadOrCreate();
|
||||||
logger.log('info', `📋 Project: ${this.config.getConfig().PROJECT_NAME}`);
|
logger.log('info', `📋 Project: ${this.config.getConfig().PROJECT_NAME}`);
|
||||||
|
|
||||||
|
// Load service selection from npmextra.json
|
||||||
|
await this.loadServiceConfiguration();
|
||||||
|
|
||||||
// Validate and update ports if needed
|
// Validate and update ports if needed
|
||||||
await this.config.validateAndUpdatePorts();
|
await this.config.validateAndUpdatePorts();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load service configuration from npmextra.json
|
||||||
|
*/
|
||||||
|
private async loadServiceConfiguration(): Promise<void> {
|
||||||
|
const npmextraConfig = new plugins.npmextra.Npmextra(process.cwd());
|
||||||
|
const gitzoneConfig = npmextraConfig.dataFor<any>('gitzone', {});
|
||||||
|
|
||||||
|
// Check if services array exists
|
||||||
|
if (!gitzoneConfig.services || !Array.isArray(gitzoneConfig.services) || gitzoneConfig.services.length === 0) {
|
||||||
|
// Prompt user to select services
|
||||||
|
const smartinteract = new plugins.smartinteract.SmartInteract();
|
||||||
|
const response = await smartinteract.askQuestion({
|
||||||
|
name: 'services',
|
||||||
|
type: 'checkbox',
|
||||||
|
message: 'Which services do you want to enable for this project?',
|
||||||
|
choices: [
|
||||||
|
{ name: 'MongoDB', value: 'mongodb' },
|
||||||
|
{ name: 'MinIO (S3)', value: 'minio' },
|
||||||
|
{ name: 'Elasticsearch', value: 'elasticsearch' }
|
||||||
|
],
|
||||||
|
default: ['mongodb', 'minio', 'elasticsearch']
|
||||||
|
});
|
||||||
|
|
||||||
|
this.enabledServices = response.value || ['mongodb', 'minio', 'elasticsearch'];
|
||||||
|
|
||||||
|
// Save to npmextra.json
|
||||||
|
await this.saveServiceConfiguration(this.enabledServices);
|
||||||
|
} else {
|
||||||
|
this.enabledServices = gitzoneConfig.services;
|
||||||
|
logger.log('info', `🔧 Enabled services: ${this.enabledServices.join(', ')}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save service configuration to npmextra.json
|
||||||
|
*/
|
||||||
|
private async saveServiceConfiguration(services: string[]): Promise<void> {
|
||||||
|
const npmextraPath = plugins.path.join(process.cwd(), 'npmextra.json');
|
||||||
|
let npmextraData: any = {};
|
||||||
|
|
||||||
|
// Read existing npmextra.json if it exists
|
||||||
|
if (await plugins.smartfs.file(npmextraPath).exists()) {
|
||||||
|
const content = await plugins.smartfs.file(npmextraPath).encoding('utf8').read();
|
||||||
|
npmextraData = JSON.parse(content as string);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update gitzone.services
|
||||||
|
if (!npmextraData.gitzone) {
|
||||||
|
npmextraData.gitzone = {};
|
||||||
|
}
|
||||||
|
npmextraData.gitzone.services = services;
|
||||||
|
|
||||||
|
// Write back to npmextra.json
|
||||||
|
await plugins.smartfs
|
||||||
|
.file(npmextraPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.write(JSON.stringify(npmextraData, null, 2));
|
||||||
|
|
||||||
|
logger.log('ok', `✅ Saved service configuration to npmextra.json`);
|
||||||
|
logger.log('info', `🔧 Enabled services: ${services.join(', ')}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a service is enabled
|
||||||
|
*/
|
||||||
|
private isServiceEnabled(service: string): boolean {
|
||||||
|
if (!this.enabledServices) {
|
||||||
|
return true; // If no configuration, enable all
|
||||||
|
}
|
||||||
|
return this.enabledServices.includes(service);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Register this project with the global registry
|
||||||
|
*/
|
||||||
|
private async registerWithGlobalRegistry(): Promise<void> {
|
||||||
|
const config = this.config.getConfig();
|
||||||
|
const containers = this.config.getContainerNames();
|
||||||
|
|
||||||
|
await this.globalRegistry.registerProject({
|
||||||
|
projectPath: process.cwd(),
|
||||||
|
projectName: config.PROJECT_NAME,
|
||||||
|
containers: {
|
||||||
|
mongo: containers.mongo,
|
||||||
|
minio: containers.minio,
|
||||||
|
elasticsearch: containers.elasticsearch,
|
||||||
|
},
|
||||||
|
ports: {
|
||||||
|
mongo: parseInt(config.MONGODB_PORT),
|
||||||
|
s3: parseInt(config.S3_PORT),
|
||||||
|
s3Console: parseInt(config.S3_CONSOLE_PORT),
|
||||||
|
elasticsearch: parseInt(config.ELASTICSEARCH_PORT),
|
||||||
|
},
|
||||||
|
enabledServices: this.enabledServices || ['mongodb', 'minio', 'elasticsearch'],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start all enabled services
|
||||||
|
*/
|
||||||
|
public async startAll(): Promise<void> {
|
||||||
|
let first = true;
|
||||||
|
if (this.isServiceEnabled('mongodb')) {
|
||||||
|
if (!first) console.log();
|
||||||
|
await this.startMongoDB();
|
||||||
|
first = false;
|
||||||
|
}
|
||||||
|
if (this.isServiceEnabled('minio')) {
|
||||||
|
if (!first) console.log();
|
||||||
|
await this.startMinIO();
|
||||||
|
first = false;
|
||||||
|
}
|
||||||
|
if (this.isServiceEnabled('elasticsearch')) {
|
||||||
|
if (!first) console.log();
|
||||||
|
await this.startElasticsearch();
|
||||||
|
first = false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Register with global registry
|
||||||
|
await this.registerWithGlobalRegistry();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop all enabled services
|
||||||
|
*/
|
||||||
|
public async stopAll(): Promise<void> {
|
||||||
|
let first = true;
|
||||||
|
if (this.isServiceEnabled('mongodb')) {
|
||||||
|
if (!first) console.log();
|
||||||
|
await this.stopMongoDB();
|
||||||
|
first = false;
|
||||||
|
}
|
||||||
|
if (this.isServiceEnabled('minio')) {
|
||||||
|
if (!first) console.log();
|
||||||
|
await this.stopMinIO();
|
||||||
|
first = false;
|
||||||
|
}
|
||||||
|
if (this.isServiceEnabled('elasticsearch')) {
|
||||||
|
if (!first) console.log();
|
||||||
|
await this.stopElasticsearch();
|
||||||
|
first = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Start MongoDB service
|
* Start MongoDB service
|
||||||
*/
|
*/
|
||||||
@@ -42,7 +193,7 @@ export class ServiceManager {
|
|||||||
const directories = this.config.getDataDirectories();
|
const directories = this.config.getDataDirectories();
|
||||||
|
|
||||||
// Ensure data directory exists
|
// Ensure data directory exists
|
||||||
await plugins.smartfile.fs.ensureDir(directories.mongo);
|
await plugins.smartfs.directory(directories.mongo).recursive().create();
|
||||||
|
|
||||||
const status = await this.docker.getStatus(containers.mongo);
|
const status = await this.docker.getStatus(containers.mongo);
|
||||||
|
|
||||||
@@ -141,7 +292,7 @@ export class ServiceManager {
|
|||||||
const directories = this.config.getDataDirectories();
|
const directories = this.config.getDataDirectories();
|
||||||
|
|
||||||
// Ensure data directory exists
|
// Ensure data directory exists
|
||||||
await plugins.smartfile.fs.ensureDir(directories.minio);
|
await plugins.smartfs.directory(directories.minio).recursive().create();
|
||||||
|
|
||||||
const status = await this.docker.getStatus(containers.minio);
|
const status = await this.docker.getStatus(containers.minio);
|
||||||
|
|
||||||
@@ -259,7 +410,103 @@ export class ServiceManager {
|
|||||||
logger.log('info', ` API: http://${config.S3_HOST}:${config.S3_PORT}`);
|
logger.log('info', ` API: http://${config.S3_HOST}:${config.S3_PORT}`);
|
||||||
logger.log('info', ` Console: http://${config.S3_HOST}:${config.S3_CONSOLE_PORT} (login: ${config.S3_ACCESSKEY}/***)`);
|
logger.log('info', ` Console: http://${config.S3_HOST}:${config.S3_CONSOLE_PORT} (login: ${config.S3_ACCESSKEY}/***)`);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start Elasticsearch service
|
||||||
|
*/
|
||||||
|
public async startElasticsearch(): Promise<void> {
|
||||||
|
logger.log('note', '📦 Elasticsearch:');
|
||||||
|
|
||||||
|
const config = this.config.getConfig();
|
||||||
|
const containers = this.config.getContainerNames();
|
||||||
|
const directories = this.config.getDataDirectories();
|
||||||
|
|
||||||
|
// Ensure data directory exists
|
||||||
|
await plugins.smartfs.directory(directories.elasticsearch).recursive().create();
|
||||||
|
|
||||||
|
const status = await this.docker.getStatus(containers.elasticsearch);
|
||||||
|
|
||||||
|
switch (status) {
|
||||||
|
case 'running':
|
||||||
|
logger.log('ok', ' Already running ✓');
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'stopped':
|
||||||
|
// Check if port mapping matches config
|
||||||
|
const esPortMappings = await this.docker.getPortMappings(containers.elasticsearch);
|
||||||
|
if (esPortMappings && esPortMappings['9200'] !== config.ELASTICSEARCH_PORT) {
|
||||||
|
logger.log('note', ' Port configuration changed, recreating container...');
|
||||||
|
await this.docker.remove(containers.elasticsearch, true);
|
||||||
|
// Fall through to create new container
|
||||||
|
const success = await this.docker.run({
|
||||||
|
name: containers.elasticsearch,
|
||||||
|
image: 'elasticsearch:8.11.0',
|
||||||
|
ports: {
|
||||||
|
[`0.0.0.0:${config.ELASTICSEARCH_PORT}`]: '9200'
|
||||||
|
},
|
||||||
|
volumes: {
|
||||||
|
[directories.elasticsearch]: '/usr/share/elasticsearch/data'
|
||||||
|
},
|
||||||
|
environment: {
|
||||||
|
'discovery.type': 'single-node',
|
||||||
|
'xpack.security.enabled': 'true',
|
||||||
|
'ELASTIC_PASSWORD': config.ELASTICSEARCH_PASS,
|
||||||
|
'ES_JAVA_OPTS': '-Xms512m -Xmx512m'
|
||||||
|
},
|
||||||
|
restart: 'unless-stopped'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (success) {
|
||||||
|
logger.log('ok', ' Recreated with new port ✓');
|
||||||
|
} else {
|
||||||
|
logger.log('error', ' Failed to recreate container');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Ports match, just start the container
|
||||||
|
if (await this.docker.start(containers.elasticsearch)) {
|
||||||
|
logger.log('ok', ' Started ✓');
|
||||||
|
} else {
|
||||||
|
logger.log('error', ' Failed to start');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'not_exists':
|
||||||
|
logger.log('note', ' Creating container...');
|
||||||
|
|
||||||
|
const success = await this.docker.run({
|
||||||
|
name: containers.elasticsearch,
|
||||||
|
image: 'elasticsearch:8.11.0',
|
||||||
|
ports: {
|
||||||
|
[`0.0.0.0:${config.ELASTICSEARCH_PORT}`]: '9200'
|
||||||
|
},
|
||||||
|
volumes: {
|
||||||
|
[directories.elasticsearch]: '/usr/share/elasticsearch/data'
|
||||||
|
},
|
||||||
|
environment: {
|
||||||
|
'discovery.type': 'single-node',
|
||||||
|
'xpack.security.enabled': 'true',
|
||||||
|
'ELASTIC_PASSWORD': config.ELASTICSEARCH_PASS,
|
||||||
|
'ES_JAVA_OPTS': '-Xms512m -Xmx512m'
|
||||||
|
},
|
||||||
|
restart: 'unless-stopped'
|
||||||
|
});
|
||||||
|
|
||||||
|
if (success) {
|
||||||
|
logger.log('ok', ' Created and started ✓');
|
||||||
|
} else {
|
||||||
|
logger.log('error', ' Failed to create container');
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log('info', ` Container: ${containers.elasticsearch}`);
|
||||||
|
logger.log('info', ` Port: ${config.ELASTICSEARCH_PORT}`);
|
||||||
|
logger.log('info', ` Connection: ${config.ELASTICSEARCH_URL}`);
|
||||||
|
logger.log('info', ` Username: ${config.ELASTICSEARCH_USER}`);
|
||||||
|
logger.log('info', ` Password: ${config.ELASTICSEARCH_PASS}`);
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Stop MongoDB service
|
* Stop MongoDB service
|
||||||
*/
|
*/
|
||||||
@@ -285,10 +532,10 @@ export class ServiceManager {
|
|||||||
*/
|
*/
|
||||||
public async stopMinIO(): Promise<void> {
|
public async stopMinIO(): Promise<void> {
|
||||||
logger.log('note', '📦 S3/MinIO:');
|
logger.log('note', '📦 S3/MinIO:');
|
||||||
|
|
||||||
const containers = this.config.getContainerNames();
|
const containers = this.config.getContainerNames();
|
||||||
const status = await this.docker.getStatus(containers.minio);
|
const status = await this.docker.getStatus(containers.minio);
|
||||||
|
|
||||||
if (status === 'running') {
|
if (status === 'running') {
|
||||||
if (await this.docker.stop(containers.minio)) {
|
if (await this.docker.stop(containers.minio)) {
|
||||||
logger.log('ok', ' Stopped ✓');
|
logger.log('ok', ' Stopped ✓');
|
||||||
@@ -299,7 +546,27 @@ export class ServiceManager {
|
|||||||
logger.log('note', ' Not running');
|
logger.log('note', ' Not running');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop Elasticsearch service
|
||||||
|
*/
|
||||||
|
public async stopElasticsearch(): Promise<void> {
|
||||||
|
logger.log('note', '📦 Elasticsearch:');
|
||||||
|
|
||||||
|
const containers = this.config.getContainerNames();
|
||||||
|
const status = await this.docker.getStatus(containers.elasticsearch);
|
||||||
|
|
||||||
|
if (status === 'running') {
|
||||||
|
if (await this.docker.stop(containers.elasticsearch)) {
|
||||||
|
logger.log('ok', ' Stopped ✓');
|
||||||
|
} else {
|
||||||
|
logger.log('error', ' Failed to stop');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.log('note', ' Not running');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Show service status
|
* Show service status
|
||||||
*/
|
*/
|
||||||
@@ -385,8 +652,36 @@ export class ServiceManager {
|
|||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Elasticsearch status
|
||||||
|
const esStatus = await this.docker.getStatus(containers.elasticsearch);
|
||||||
|
switch (esStatus) {
|
||||||
|
case 'running':
|
||||||
|
logger.log('ok', '📦 Elasticsearch: 🟢 Running');
|
||||||
|
logger.log('info', ` ├─ Container: ${containers.elasticsearch}`);
|
||||||
|
logger.log('info', ` ├─ Port: ${config.ELASTICSEARCH_PORT}`);
|
||||||
|
logger.log('info', ` ├─ Connection: ${config.ELASTICSEARCH_URL}`);
|
||||||
|
logger.log('info', ` └─ Credentials: ${config.ELASTICSEARCH_USER}/${config.ELASTICSEARCH_PASS}`);
|
||||||
|
break;
|
||||||
|
case 'stopped':
|
||||||
|
logger.log('note', '📦 Elasticsearch: 🟡 Stopped');
|
||||||
|
logger.log('info', ` ├─ Container: ${containers.elasticsearch}`);
|
||||||
|
logger.log('info', ` └─ Port: ${config.ELASTICSEARCH_PORT}`);
|
||||||
|
break;
|
||||||
|
case 'not_exists':
|
||||||
|
logger.log('info', '📦 Elasticsearch: ⚪ Not installed');
|
||||||
|
// Check port availability
|
||||||
|
const esPort = parseInt(config.ELASTICSEARCH_PORT);
|
||||||
|
const esAvailable = await helpers.isPortAvailable(esPort);
|
||||||
|
if (!esAvailable) {
|
||||||
|
logger.log('error', ` └─ ⚠️ Port ${esPort} is in use by another process`);
|
||||||
|
} else {
|
||||||
|
logger.log('info', ` └─ Port ${esPort} is available`);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Show configuration
|
* Show configuration
|
||||||
*/
|
*/
|
||||||
@@ -420,6 +715,15 @@ export class ServiceManager {
|
|||||||
logger.log('info', ` Data: ${this.config.getDataDirectories().minio}`);
|
logger.log('info', ` Data: ${this.config.getDataDirectories().minio}`);
|
||||||
logger.log('info', ` Endpoint: ${config.S3_ENDPOINT}`);
|
logger.log('info', ` Endpoint: ${config.S3_ENDPOINT}`);
|
||||||
logger.log('info', ` Console URL: http://${config.S3_HOST}:${config.S3_CONSOLE_PORT}`);
|
logger.log('info', ` Console URL: http://${config.S3_HOST}:${config.S3_CONSOLE_PORT}`);
|
||||||
|
|
||||||
|
console.log();
|
||||||
|
logger.log('note', 'Elasticsearch:');
|
||||||
|
logger.log('info', ` Host: ${config.ELASTICSEARCH_HOST}:${config.ELASTICSEARCH_PORT}`);
|
||||||
|
logger.log('info', ` User: ${config.ELASTICSEARCH_USER}`);
|
||||||
|
logger.log('info', ' Password: ***');
|
||||||
|
logger.log('info', ` Container: ${this.config.getContainerNames().elasticsearch}`);
|
||||||
|
logger.log('info', ` Data: ${this.config.getDataDirectories().elasticsearch}`);
|
||||||
|
logger.log('info', ` Connection: ${config.ELASTICSEARCH_URL}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -477,16 +781,29 @@ export class ServiceManager {
|
|||||||
logger.log('note', 'S3/MinIO container is not running');
|
logger.log('note', 'S3/MinIO container is not running');
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
case 'elasticsearch':
|
||||||
|
case 'es':
|
||||||
|
if (await this.docker.isRunning(containers.elasticsearch)) {
|
||||||
|
helpers.printHeader(`Elasticsearch Logs (last ${lines} lines)`);
|
||||||
|
const logs = await this.docker.logs(containers.elasticsearch, lines);
|
||||||
|
console.log(logs);
|
||||||
|
} else {
|
||||||
|
logger.log('note', 'Elasticsearch container is not running');
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
case 'all':
|
case 'all':
|
||||||
case '':
|
case '':
|
||||||
await this.showLogs('mongo', lines);
|
await this.showLogs('mongo', lines);
|
||||||
console.log();
|
console.log();
|
||||||
await this.showLogs('minio', lines);
|
await this.showLogs('minio', lines);
|
||||||
|
console.log();
|
||||||
|
await this.showLogs('elasticsearch', lines);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
logger.log('note', 'Usage: gitzone services logs [mongo|s3|all] [lines]');
|
logger.log('note', 'Usage: gitzone services logs [mongo|s3|elasticsearch|all] [lines]');
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -497,24 +814,40 @@ export class ServiceManager {
|
|||||||
public async removeContainers(): Promise<void> {
|
public async removeContainers(): Promise<void> {
|
||||||
const containers = this.config.getContainerNames();
|
const containers = this.config.getContainerNames();
|
||||||
let removed = false;
|
let removed = false;
|
||||||
|
|
||||||
if (await this.docker.exists(containers.mongo)) {
|
if (await this.docker.exists(containers.mongo)) {
|
||||||
if (await this.docker.remove(containers.mongo, true)) {
|
if (await this.docker.remove(containers.mongo, true)) {
|
||||||
logger.log('ok', ' MongoDB container removed ✓');
|
logger.log('ok', ' MongoDB container removed ✓');
|
||||||
removed = true;
|
removed = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (await this.docker.exists(containers.minio)) {
|
if (await this.docker.exists(containers.minio)) {
|
||||||
if (await this.docker.remove(containers.minio, true)) {
|
if (await this.docker.remove(containers.minio, true)) {
|
||||||
logger.log('ok', ' S3/MinIO container removed ✓');
|
logger.log('ok', ' S3/MinIO container removed ✓');
|
||||||
removed = true;
|
removed = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (await this.docker.exists(containers.elasticsearch)) {
|
||||||
|
if (await this.docker.remove(containers.elasticsearch, true)) {
|
||||||
|
logger.log('ok', ' Elasticsearch container removed ✓');
|
||||||
|
removed = true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if (!removed) {
|
if (!removed) {
|
||||||
logger.log('note', ' No containers to remove');
|
logger.log('note', ' No containers to remove');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check if all containers are gone, then unregister from global registry
|
||||||
|
const mongoExists = await this.docker.exists(containers.mongo);
|
||||||
|
const minioExists = await this.docker.exists(containers.minio);
|
||||||
|
const esExists = await this.docker.exists(containers.elasticsearch);
|
||||||
|
|
||||||
|
if (!mongoExists && !minioExists && !esExists) {
|
||||||
|
await this.globalRegistry.unregisterProject(process.cwd());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -523,24 +856,60 @@ export class ServiceManager {
|
|||||||
public async cleanData(): Promise<void> {
|
public async cleanData(): Promise<void> {
|
||||||
const directories = this.config.getDataDirectories();
|
const directories = this.config.getDataDirectories();
|
||||||
let cleaned = false;
|
let cleaned = false;
|
||||||
|
|
||||||
if (await plugins.smartfile.fs.fileExists(directories.mongo)) {
|
if (await plugins.smartfs.directory(directories.mongo).exists()) {
|
||||||
await plugins.smartfile.fs.remove(directories.mongo);
|
await plugins.smartfs.directory(directories.mongo).recursive().delete();
|
||||||
logger.log('ok', ' MongoDB data removed ✓');
|
logger.log('ok', ' MongoDB data removed ✓');
|
||||||
cleaned = true;
|
cleaned = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (await plugins.smartfile.fs.fileExists(directories.minio)) {
|
if (await plugins.smartfs.directory(directories.minio).exists()) {
|
||||||
await plugins.smartfile.fs.remove(directories.minio);
|
await plugins.smartfs.directory(directories.minio).recursive().delete();
|
||||||
logger.log('ok', ' S3/MinIO data removed ✓');
|
logger.log('ok', ' S3/MinIO data removed ✓');
|
||||||
cleaned = true;
|
cleaned = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (await plugins.smartfs.directory(directories.elasticsearch).exists()) {
|
||||||
|
await plugins.smartfs.directory(directories.elasticsearch).recursive().delete();
|
||||||
|
logger.log('ok', ' Elasticsearch data removed ✓');
|
||||||
|
cleaned = true;
|
||||||
|
}
|
||||||
|
|
||||||
if (!cleaned) {
|
if (!cleaned) {
|
||||||
logger.log('note', ' No data to clean');
|
logger.log('note', ' No data to clean');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configure which services are enabled
|
||||||
|
*/
|
||||||
|
public async configureServices(): Promise<void> {
|
||||||
|
logger.log('note', 'Select which services to enable for this project:');
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
const currentServices = this.enabledServices || ['mongodb', 'minio', 'elasticsearch'];
|
||||||
|
|
||||||
|
const smartinteract = new plugins.smartinteract.SmartInteract();
|
||||||
|
const response = await smartinteract.askQuestion({
|
||||||
|
name: 'services',
|
||||||
|
type: 'checkbox',
|
||||||
|
message: 'Which services do you want to enable?',
|
||||||
|
choices: [
|
||||||
|
{ name: 'MongoDB', value: 'mongodb' },
|
||||||
|
{ name: 'MinIO (S3)', value: 'minio' },
|
||||||
|
{ name: 'Elasticsearch', value: 'elasticsearch' }
|
||||||
|
],
|
||||||
|
default: currentServices
|
||||||
|
});
|
||||||
|
|
||||||
|
this.enabledServices = response.value || ['mongodb', 'minio', 'elasticsearch'];
|
||||||
|
|
||||||
|
// Save to npmextra.json
|
||||||
|
await this.saveServiceConfiguration(this.enabledServices);
|
||||||
|
|
||||||
|
logger.log('ok', '✅ Service configuration updated');
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Reconfigure services with new ports
|
* Reconfigure services with new ports
|
||||||
*/
|
*/
|
||||||
@@ -551,20 +920,25 @@ export class ServiceManager {
|
|||||||
|
|
||||||
// Stop existing containers
|
// Stop existing containers
|
||||||
logger.log('note', '🛑 Stopping existing containers...');
|
logger.log('note', '🛑 Stopping existing containers...');
|
||||||
|
|
||||||
if (await this.docker.exists(containers.mongo)) {
|
if (await this.docker.exists(containers.mongo)) {
|
||||||
await this.docker.stop(containers.mongo);
|
await this.docker.stop(containers.mongo);
|
||||||
logger.log('ok', ' MongoDB stopped ✓');
|
logger.log('ok', ' MongoDB stopped ✓');
|
||||||
}
|
}
|
||||||
|
|
||||||
if (await this.docker.exists(containers.minio)) {
|
if (await this.docker.exists(containers.minio)) {
|
||||||
await this.docker.stop(containers.minio);
|
await this.docker.stop(containers.minio);
|
||||||
logger.log('ok', ' S3/MinIO stopped ✓');
|
logger.log('ok', ' S3/MinIO stopped ✓');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (await this.docker.exists(containers.elasticsearch)) {
|
||||||
|
await this.docker.stop(containers.elasticsearch);
|
||||||
|
logger.log('ok', ' Elasticsearch stopped ✓');
|
||||||
|
}
|
||||||
|
|
||||||
// Reconfigure ports
|
// Reconfigure ports
|
||||||
await this.config.reconfigurePorts();
|
await this.config.reconfigurePorts();
|
||||||
|
|
||||||
// Ask if user wants to restart services
|
// Ask if user wants to restart services
|
||||||
const smartinteract = new plugins.smartinteract.SmartInteract();
|
const smartinteract = new plugins.smartinteract.SmartInteract();
|
||||||
const response = await smartinteract.askQuestion({
|
const response = await smartinteract.askQuestion({
|
||||||
@@ -573,11 +947,10 @@ export class ServiceManager {
|
|||||||
message: 'Do you want to start services with new ports?',
|
message: 'Do you want to start services with new ports?',
|
||||||
default: true
|
default: true
|
||||||
});
|
});
|
||||||
|
|
||||||
if (response.value) {
|
if (response.value) {
|
||||||
console.log();
|
console.log();
|
||||||
await this.startMongoDB();
|
await this.startAll();
|
||||||
await this.startMinIO();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -42,11 +42,15 @@ export const getRandomAvailablePort = async (): Promise<number> => {
|
|||||||
/**
|
/**
|
||||||
* Get the project name from package.json or directory
|
* Get the project name from package.json or directory
|
||||||
*/
|
*/
|
||||||
export const getProjectName = (): string => {
|
export const getProjectName = async (): Promise<string> => {
|
||||||
try {
|
try {
|
||||||
const packageJsonPath = plugins.path.join(process.cwd(), 'package.json');
|
const packageJsonPath = plugins.path.join(process.cwd(), 'package.json');
|
||||||
if (plugins.smartfile.fs.fileExistsSync(packageJsonPath)) {
|
if (await plugins.smartfs.file(packageJsonPath).exists()) {
|
||||||
const packageJson = plugins.smartfile.fs.toObjectSync(packageJsonPath);
|
const content = (await plugins.smartfs
|
||||||
|
.file(packageJsonPath)
|
||||||
|
.encoding('utf8')
|
||||||
|
.read()) as string;
|
||||||
|
const packageJson = JSON.parse(content);
|
||||||
if (packageJson.name) {
|
if (packageJson.name) {
|
||||||
// Sanitize: @fin.cx/skr → fin-cx-skr
|
// Sanitize: @fin.cx/skr → fin-cx-skr
|
||||||
return packageJson.name.replace(/@/g, '').replace(/[\/\.]/g, '-');
|
return packageJson.name.replace(/@/g, '').replace(/[\/\.]/g, '-');
|
||||||
@@ -55,7 +59,7 @@ export const getProjectName = (): string => {
|
|||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Ignore errors and fall back to directory name
|
// Ignore errors and fall back to directory name
|
||||||
}
|
}
|
||||||
|
|
||||||
return plugins.path.basename(process.cwd());
|
return plugins.path.basename(process.cwd());
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
@@ -1,15 +1,25 @@
|
|||||||
import * as plugins from './mod.plugins.js';
|
import * as plugins from './mod.plugins.js';
|
||||||
import * as helpers from './helpers.js';
|
import * as helpers from './helpers.js';
|
||||||
import { ServiceManager } from './classes.servicemanager.js';
|
import { ServiceManager } from './classes.servicemanager.js';
|
||||||
|
import { GlobalRegistry } from './classes.globalregistry.js';
|
||||||
import { logger } from '../gitzone.logging.js';
|
import { logger } from '../gitzone.logging.js';
|
||||||
|
|
||||||
export const run = async (argvArg: any) => {
|
export const run = async (argvArg: any) => {
|
||||||
|
const isGlobal = argvArg.g || argvArg.global;
|
||||||
|
const command = argvArg._[1] || 'help';
|
||||||
|
|
||||||
|
// Handle global commands first
|
||||||
|
if (isGlobal) {
|
||||||
|
await handleGlobalCommand(command);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Local project commands
|
||||||
const serviceManager = new ServiceManager();
|
const serviceManager = new ServiceManager();
|
||||||
await serviceManager.init();
|
await serviceManager.init();
|
||||||
|
|
||||||
const command = argvArg._[1] || 'help';
|
|
||||||
const service = argvArg._[2] || 'all';
|
const service = argvArg._[2] || 'all';
|
||||||
|
|
||||||
switch (command) {
|
switch (command) {
|
||||||
case 'start':
|
case 'start':
|
||||||
await handleStart(serviceManager, service);
|
await handleStart(serviceManager, service);
|
||||||
@@ -28,9 +38,13 @@ export const run = async (argvArg: any) => {
|
|||||||
break;
|
break;
|
||||||
|
|
||||||
case 'config':
|
case 'config':
|
||||||
await serviceManager.showConfig();
|
if (service === 'services' || argvArg._[2] === 'services') {
|
||||||
|
await handleConfigureServices(serviceManager);
|
||||||
|
} else {
|
||||||
|
await serviceManager.showConfig();
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case 'compass':
|
case 'compass':
|
||||||
await serviceManager.showCompassConnection();
|
await serviceManager.showCompassConnection();
|
||||||
break;
|
break;
|
||||||
@@ -61,63 +75,69 @@ export const run = async (argvArg: any) => {
|
|||||||
|
|
||||||
async function handleStart(serviceManager: ServiceManager, service: string) {
|
async function handleStart(serviceManager: ServiceManager, service: string) {
|
||||||
helpers.printHeader('Starting Services');
|
helpers.printHeader('Starting Services');
|
||||||
|
|
||||||
switch (service) {
|
switch (service) {
|
||||||
case 'mongo':
|
case 'mongo':
|
||||||
case 'mongodb':
|
case 'mongodb':
|
||||||
await serviceManager.startMongoDB();
|
await serviceManager.startMongoDB();
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case 'minio':
|
case 'minio':
|
||||||
case 's3':
|
case 's3':
|
||||||
await serviceManager.startMinIO();
|
await serviceManager.startMinIO();
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
case 'elasticsearch':
|
||||||
|
case 'es':
|
||||||
|
await serviceManager.startElasticsearch();
|
||||||
|
break;
|
||||||
|
|
||||||
case 'all':
|
case 'all':
|
||||||
case '':
|
case '':
|
||||||
await serviceManager.startMongoDB();
|
await serviceManager.startAll();
|
||||||
console.log();
|
|
||||||
await serviceManager.startMinIO();
|
|
||||||
break;
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
logger.log('error', `Unknown service: ${service}`);
|
logger.log('error', `Unknown service: ${service}`);
|
||||||
logger.log('note', 'Use: mongo, s3, or all');
|
logger.log('note', 'Use: mongo, s3, elasticsearch, or all');
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function handleStop(serviceManager: ServiceManager, service: string) {
|
async function handleStop(serviceManager: ServiceManager, service: string) {
|
||||||
helpers.printHeader('Stopping Services');
|
helpers.printHeader('Stopping Services');
|
||||||
|
|
||||||
switch (service) {
|
switch (service) {
|
||||||
case 'mongo':
|
case 'mongo':
|
||||||
case 'mongodb':
|
case 'mongodb':
|
||||||
await serviceManager.stopMongoDB();
|
await serviceManager.stopMongoDB();
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case 'minio':
|
case 'minio':
|
||||||
case 's3':
|
case 's3':
|
||||||
await serviceManager.stopMinIO();
|
await serviceManager.stopMinIO();
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
case 'elasticsearch':
|
||||||
|
case 'es':
|
||||||
|
await serviceManager.stopElasticsearch();
|
||||||
|
break;
|
||||||
|
|
||||||
case 'all':
|
case 'all':
|
||||||
case '':
|
case '':
|
||||||
await serviceManager.stopMongoDB();
|
await serviceManager.stopAll();
|
||||||
console.log();
|
|
||||||
await serviceManager.stopMinIO();
|
|
||||||
break;
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
logger.log('error', `Unknown service: ${service}`);
|
logger.log('error', `Unknown service: ${service}`);
|
||||||
logger.log('note', 'Use: mongo, s3, or all');
|
logger.log('note', 'Use: mongo, s3, elasticsearch, or all');
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function handleRestart(serviceManager: ServiceManager, service: string) {
|
async function handleRestart(serviceManager: ServiceManager, service: string) {
|
||||||
helpers.printHeader('Restarting Services');
|
helpers.printHeader('Restarting Services');
|
||||||
|
|
||||||
switch (service) {
|
switch (service) {
|
||||||
case 'mongo':
|
case 'mongo':
|
||||||
case 'mongodb':
|
case 'mongodb':
|
||||||
@@ -125,24 +145,28 @@ async function handleRestart(serviceManager: ServiceManager, service: string) {
|
|||||||
await plugins.smartdelay.delayFor(2000);
|
await plugins.smartdelay.delayFor(2000);
|
||||||
await serviceManager.startMongoDB();
|
await serviceManager.startMongoDB();
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case 'minio':
|
case 'minio':
|
||||||
case 's3':
|
case 's3':
|
||||||
await serviceManager.stopMinIO();
|
await serviceManager.stopMinIO();
|
||||||
await plugins.smartdelay.delayFor(2000);
|
await plugins.smartdelay.delayFor(2000);
|
||||||
await serviceManager.startMinIO();
|
await serviceManager.startMinIO();
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
case 'elasticsearch':
|
||||||
|
case 'es':
|
||||||
|
await serviceManager.stopElasticsearch();
|
||||||
|
await plugins.smartdelay.delayFor(2000);
|
||||||
|
await serviceManager.startElasticsearch();
|
||||||
|
break;
|
||||||
|
|
||||||
case 'all':
|
case 'all':
|
||||||
case '':
|
case '':
|
||||||
await serviceManager.stopMongoDB();
|
await serviceManager.stopAll();
|
||||||
await serviceManager.stopMinIO();
|
|
||||||
await plugins.smartdelay.delayFor(2000);
|
await plugins.smartdelay.delayFor(2000);
|
||||||
await serviceManager.startMongoDB();
|
await serviceManager.startAll();
|
||||||
console.log();
|
|
||||||
await serviceManager.startMinIO();
|
|
||||||
break;
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
logger.log('error', `Unknown service: ${service}`);
|
logger.log('error', `Unknown service: ${service}`);
|
||||||
break;
|
break;
|
||||||
@@ -166,7 +190,7 @@ async function handleClean(serviceManager: ServiceManager) {
|
|||||||
helpers.printHeader('Clean All');
|
helpers.printHeader('Clean All');
|
||||||
logger.log('error', '⚠️ WARNING: This will remove all containers and data!');
|
logger.log('error', '⚠️ WARNING: This will remove all containers and data!');
|
||||||
logger.log('error', 'This action cannot be undone!');
|
logger.log('error', 'This action cannot be undone!');
|
||||||
|
|
||||||
const smartinteraction = new plugins.smartinteract.SmartInteract();
|
const smartinteraction = new plugins.smartinteract.SmartInteract();
|
||||||
const confirmAnswer = await smartinteraction.askQuestion({
|
const confirmAnswer = await smartinteraction.askQuestion({
|
||||||
name: 'confirm',
|
name: 'confirm',
|
||||||
@@ -174,7 +198,7 @@ async function handleClean(serviceManager: ServiceManager) {
|
|||||||
message: 'Type "yes" to confirm:',
|
message: 'Type "yes" to confirm:',
|
||||||
default: 'no'
|
default: 'no'
|
||||||
});
|
});
|
||||||
|
|
||||||
if (confirmAnswer.value === 'yes') {
|
if (confirmAnswer.value === 'yes') {
|
||||||
await serviceManager.removeContainers();
|
await serviceManager.removeContainers();
|
||||||
console.log();
|
console.log();
|
||||||
@@ -185,40 +209,225 @@ async function handleClean(serviceManager: ServiceManager) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async function handleConfigureServices(serviceManager: ServiceManager) {
|
||||||
|
helpers.printHeader('Configure Services');
|
||||||
|
await serviceManager.configureServices();
|
||||||
|
}
|
||||||
|
|
||||||
function showHelp() {
|
function showHelp() {
|
||||||
helpers.printHeader('GitZone Services Manager');
|
helpers.printHeader('GitZone Services Manager');
|
||||||
|
|
||||||
logger.log('ok', 'Usage: gitzone services [command] [options]');
|
logger.log('ok', 'Usage: gitzone services [command] [options]');
|
||||||
console.log();
|
console.log();
|
||||||
|
|
||||||
logger.log('note', 'Commands:');
|
logger.log('note', 'Commands:');
|
||||||
logger.log('info', ' start [service] Start services (mongo|s3|all)');
|
logger.log('info', ' start [service] Start services (mongo|s3|elasticsearch|all)');
|
||||||
logger.log('info', ' stop [service] Stop services (mongo|s3|all)');
|
logger.log('info', ' stop [service] Stop services (mongo|s3|elasticsearch|all)');
|
||||||
logger.log('info', ' restart [service] Restart services (mongo|s3|all)');
|
logger.log('info', ' restart [service] Restart services (mongo|s3|elasticsearch|all)');
|
||||||
logger.log('info', ' status Show service status');
|
logger.log('info', ' status Show service status');
|
||||||
logger.log('info', ' config Show current configuration');
|
logger.log('info', ' config Show current configuration');
|
||||||
|
logger.log('info', ' config services Configure which services are enabled');
|
||||||
logger.log('info', ' compass Show MongoDB Compass connection string');
|
logger.log('info', ' compass Show MongoDB Compass connection string');
|
||||||
logger.log('info', ' logs [service] Show logs (mongo|s3|all) [lines]');
|
logger.log('info', ' logs [service] Show logs (mongo|s3|elasticsearch|all) [lines]');
|
||||||
logger.log('info', ' reconfigure Reassign ports and restart services');
|
logger.log('info', ' reconfigure Reassign ports and restart services');
|
||||||
logger.log('info', ' remove Remove all containers');
|
logger.log('info', ' remove Remove all containers');
|
||||||
logger.log('info', ' clean Remove all containers and data ⚠️');
|
logger.log('info', ' clean Remove all containers and data ⚠️');
|
||||||
logger.log('info', ' help Show this help message');
|
logger.log('info', ' help Show this help message');
|
||||||
console.log();
|
console.log();
|
||||||
|
|
||||||
|
logger.log('note', 'Available Services:');
|
||||||
|
logger.log('info', ' • MongoDB (mongo) - Document database');
|
||||||
|
logger.log('info', ' • MinIO (s3) - S3-compatible object storage');
|
||||||
|
logger.log('info', ' • Elasticsearch (elasticsearch) - Search and analytics engine');
|
||||||
|
console.log();
|
||||||
|
|
||||||
logger.log('note', 'Features:');
|
logger.log('note', 'Features:');
|
||||||
logger.log('info', ' • Auto-creates .nogit/env.json with smart defaults');
|
logger.log('info', ' • Auto-creates .nogit/env.json with smart defaults');
|
||||||
logger.log('info', ' • Random ports (20000-30000) to avoid conflicts');
|
logger.log('info', ' • Random ports (20000-30000) for MongoDB/MinIO to avoid conflicts');
|
||||||
|
logger.log('info', ' • Elasticsearch uses standard port 9200');
|
||||||
logger.log('info', ' • Project-specific containers for multi-project support');
|
logger.log('info', ' • Project-specific containers for multi-project support');
|
||||||
logger.log('info', ' • Preserves custom configuration values');
|
logger.log('info', ' • Preserves custom configuration values');
|
||||||
logger.log('info', ' • MongoDB Compass connection support');
|
logger.log('info', ' • MongoDB Compass connection support');
|
||||||
console.log();
|
console.log();
|
||||||
|
|
||||||
logger.log('note', 'Examples:');
|
logger.log('note', 'Examples:');
|
||||||
logger.log('info', ' gitzone services start # Start all services');
|
logger.log('info', ' gitzone services start # Start all services');
|
||||||
logger.log('info', ' gitzone services start mongo # Start only MongoDB');
|
logger.log('info', ' gitzone services start mongo # Start only MongoDB');
|
||||||
logger.log('info', ' gitzone services stop # Stop all services');
|
logger.log('info', ' gitzone services start elasticsearch # Start only Elasticsearch');
|
||||||
logger.log('info', ' gitzone services status # Check service status');
|
logger.log('info', ' gitzone services stop # Stop all services');
|
||||||
logger.log('info', ' gitzone services config # Show configuration');
|
logger.log('info', ' gitzone services status # Check service status');
|
||||||
logger.log('info', ' gitzone services compass # Get MongoDB Compass connection');
|
logger.log('info', ' gitzone services config # Show configuration');
|
||||||
logger.log('info', ' gitzone services logs mongo 50 # Show last 50 lines of MongoDB logs');
|
logger.log('info', ' gitzone services compass # Get MongoDB Compass connection');
|
||||||
|
logger.log('info', ' gitzone services logs elasticsearch # Show Elasticsearch logs');
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
logger.log('note', 'Global Commands (-g/--global):');
|
||||||
|
logger.log('info', ' list -g List all registered projects');
|
||||||
|
logger.log('info', ' status -g Show status across all projects');
|
||||||
|
logger.log('info', ' stop -g Stop all containers across all projects');
|
||||||
|
logger.log('info', ' cleanup -g Remove stale registry entries');
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
logger.log('note', 'Global Examples:');
|
||||||
|
logger.log('info', ' gitzone services list -g # List all registered projects');
|
||||||
|
logger.log('info', ' gitzone services status -g # Show global container status');
|
||||||
|
logger.log('info', ' gitzone services stop -g # Stop all (prompts for confirmation)');
|
||||||
|
}
|
||||||
|
|
||||||
|
// ==================== Global Command Handlers ====================
|
||||||
|
|
||||||
|
async function handleGlobalCommand(command: string) {
|
||||||
|
const globalRegistry = GlobalRegistry.getInstance();
|
||||||
|
|
||||||
|
switch (command) {
|
||||||
|
case 'list':
|
||||||
|
await handleGlobalList(globalRegistry);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'status':
|
||||||
|
await handleGlobalStatus(globalRegistry);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'stop':
|
||||||
|
await handleGlobalStop(globalRegistry);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'cleanup':
|
||||||
|
await handleGlobalCleanup(globalRegistry);
|
||||||
|
break;
|
||||||
|
|
||||||
|
case 'help':
|
||||||
|
default:
|
||||||
|
showHelp();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleGlobalList(globalRegistry: GlobalRegistry) {
|
||||||
|
helpers.printHeader('Registered Projects (Global)');
|
||||||
|
|
||||||
|
const projects = await globalRegistry.getAllProjects();
|
||||||
|
const projectPaths = Object.keys(projects);
|
||||||
|
|
||||||
|
if (projectPaths.length === 0) {
|
||||||
|
logger.log('note', 'No projects registered');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const path of projectPaths) {
|
||||||
|
const project = projects[path];
|
||||||
|
const lastActive = new Date(project.lastActive).toLocaleString();
|
||||||
|
|
||||||
|
console.log();
|
||||||
|
logger.log('ok', `📁 ${project.projectName}`);
|
||||||
|
logger.log('info', ` Path: ${project.projectPath}`);
|
||||||
|
logger.log('info', ` Services: ${project.enabledServices.join(', ')}`);
|
||||||
|
logger.log('info', ` Last Active: ${lastActive}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleGlobalStatus(globalRegistry: GlobalRegistry) {
|
||||||
|
helpers.printHeader('Global Service Status');
|
||||||
|
|
||||||
|
const statuses = await globalRegistry.getGlobalStatus();
|
||||||
|
|
||||||
|
if (statuses.length === 0) {
|
||||||
|
logger.log('note', 'No projects registered');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let runningCount = 0;
|
||||||
|
let totalContainers = 0;
|
||||||
|
|
||||||
|
for (const project of statuses) {
|
||||||
|
console.log();
|
||||||
|
logger.log('ok', `📁 ${project.projectName}`);
|
||||||
|
logger.log('info', ` Path: ${project.projectPath}`);
|
||||||
|
|
||||||
|
if (project.containers.length === 0) {
|
||||||
|
logger.log('note', ' No containers configured');
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const container of project.containers) {
|
||||||
|
totalContainers++;
|
||||||
|
const statusIcon = container.status === 'running' ? '🟢' : container.status === 'exited' ? '🟡' : '⚪';
|
||||||
|
if (container.status === 'running') runningCount++;
|
||||||
|
logger.log('info', ` ${statusIcon} ${container.name}: ${container.status}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log();
|
||||||
|
logger.log('note', `Summary: ${runningCount}/${totalContainers} containers running across ${statuses.length} project(s)`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleGlobalStop(globalRegistry: GlobalRegistry) {
|
||||||
|
helpers.printHeader('Stop All Containers (Global)');
|
||||||
|
|
||||||
|
const statuses = await globalRegistry.getGlobalStatus();
|
||||||
|
|
||||||
|
// Count running containers
|
||||||
|
let runningCount = 0;
|
||||||
|
for (const project of statuses) {
|
||||||
|
for (const container of project.containers) {
|
||||||
|
if (container.status === 'running') runningCount++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (runningCount === 0) {
|
||||||
|
logger.log('note', 'No running containers found');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log('note', `Found ${runningCount} running container(s) across ${statuses.length} project(s)`);
|
||||||
|
console.log();
|
||||||
|
|
||||||
|
// Show what will be stopped
|
||||||
|
for (const project of statuses) {
|
||||||
|
const runningContainers = project.containers.filter(c => c.status === 'running');
|
||||||
|
if (runningContainers.length > 0) {
|
||||||
|
logger.log('info', `${project.projectName}:`);
|
||||||
|
for (const container of runningContainers) {
|
||||||
|
logger.log('info', ` • ${container.name}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log();
|
||||||
|
const shouldContinue = await plugins.smartinteract.SmartInteract.getCliConfirmation(
|
||||||
|
'Stop all containers?',
|
||||||
|
false
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!shouldContinue) {
|
||||||
|
logger.log('note', 'Cancelled');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log('note', 'Stopping all containers...');
|
||||||
|
const result = await globalRegistry.stopAll();
|
||||||
|
|
||||||
|
if (result.stopped.length > 0) {
|
||||||
|
logger.log('ok', `Stopped: ${result.stopped.join(', ')}`);
|
||||||
|
}
|
||||||
|
if (result.failed.length > 0) {
|
||||||
|
logger.log('error', `Failed to stop: ${result.failed.join(', ')}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function handleGlobalCleanup(globalRegistry: GlobalRegistry) {
|
||||||
|
helpers.printHeader('Cleanup Registry (Global)');
|
||||||
|
|
||||||
|
logger.log('note', 'Checking for stale registry entries...');
|
||||||
|
const removed = await globalRegistry.cleanup();
|
||||||
|
|
||||||
|
if (removed.length === 0) {
|
||||||
|
logger.log('ok', 'No stale entries found');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.log('ok', `Removed ${removed.length} stale entr${removed.length === 1 ? 'y' : 'ies'}:`);
|
||||||
|
for (const path of removed) {
|
||||||
|
logger.log('info', ` • ${path}`);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@@ -6,23 +6,36 @@ import * as paths from '../paths.js';
|
|||||||
|
|
||||||
import { logger } from '../gitzone.logging.js';
|
import { logger } from '../gitzone.logging.js';
|
||||||
|
|
||||||
export let run = () => {
|
export let run = async () => {
|
||||||
const done = plugins.smartpromise.defer();
|
const done = plugins.smartpromise.defer();
|
||||||
logger.log('warn', 'no action specified');
|
logger.log('warn', 'no action specified');
|
||||||
|
|
||||||
|
const dirEntries = await plugins.smartfs.directory(paths.templatesDir).list();
|
||||||
|
const templates: string[] = [];
|
||||||
|
for (const entry of dirEntries) {
|
||||||
|
try {
|
||||||
|
const stats = await plugins.smartfs
|
||||||
|
.file(plugins.path.join(paths.templatesDir, entry.path))
|
||||||
|
.stat();
|
||||||
|
if (stats.isDirectory) {
|
||||||
|
templates.push(entry.name);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// Skip entries that can't be accessed
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let projects = `\n`;
|
||||||
|
for (const template of templates) {
|
||||||
|
projects += ` - ${template}\n`;
|
||||||
|
}
|
||||||
|
|
||||||
logger.log(
|
logger.log(
|
||||||
'info',
|
'info',
|
||||||
`
|
`
|
||||||
You can do one of the following things:
|
You can do one of the following things:
|
||||||
* create a new project with 'gitzone template [template]'
|
* create a new project with 'gitzone template [template]'
|
||||||
the following templates exist: ${(() => {
|
the following templates exist: ${projects}
|
||||||
let projects = `\n`;
|
|
||||||
for (const template of plugins.smartfile.fs.listFoldersSync(
|
|
||||||
paths.templatesDir,
|
|
||||||
)) {
|
|
||||||
projects += ` - ${template}\n`;
|
|
||||||
}
|
|
||||||
return projects;
|
|
||||||
})()}
|
|
||||||
* format a project with 'gitzone format'
|
* format a project with 'gitzone format'
|
||||||
`,
|
`,
|
||||||
);
|
);
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ export const getTemplatePath = (templateNameArg: string) => {
|
|||||||
* receives a template name and returns wether there is a corresponding template
|
* receives a template name and returns wether there is a corresponding template
|
||||||
*/
|
*/
|
||||||
export const isTemplate = async (templateNameArg: string) => {
|
export const isTemplate = async (templateNameArg: string) => {
|
||||||
return plugins.smartfile.fs.isDirectory(getTemplatePath(templateNameArg));
|
return plugins.smartfs.directory(getTemplatePath(templateNameArg)).exists();
|
||||||
};
|
};
|
||||||
|
|
||||||
export const getTemplate = async (templateNameArg: string) => {
|
export const getTemplate = async (templateNameArg: string) => {
|
||||||
|
|||||||
@@ -10,9 +10,13 @@ import * as smartupdate from '@push.rocks/smartupdate';
|
|||||||
import * as smartshell from '@push.rocks/smartshell';
|
import * as smartshell from '@push.rocks/smartshell';
|
||||||
import * as smartnetwork from '@push.rocks/smartnetwork';
|
import * as smartnetwork from '@push.rocks/smartnetwork';
|
||||||
import * as smartfile from '@push.rocks/smartfile';
|
import * as smartfile from '@push.rocks/smartfile';
|
||||||
|
import { SmartFs, SmartFsProviderNode } from '@push.rocks/smartfs';
|
||||||
import * as smartinteract from '@push.rocks/smartinteract';
|
import * as smartinteract from '@push.rocks/smartinteract';
|
||||||
import * as smartdelay from '@push.rocks/smartdelay';
|
import * as smartdelay from '@push.rocks/smartdelay';
|
||||||
|
|
||||||
|
// Create smartfs instance for filesystem operations
|
||||||
|
export const smartfs = new SmartFs(new SmartFsProviderNode());
|
||||||
|
|
||||||
export {
|
export {
|
||||||
smartlog,
|
smartlog,
|
||||||
smartlogDestinationLocal,
|
smartlogDestinationLocal,
|
||||||
|
|||||||
Reference in New Issue
Block a user