Compare commits

...

59 Commits

Author SHA1 Message Date
jkunz 06f2de3230 v2.16.1
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2026-05-10 11:11:03 +00:00
jkunz cc3128f07b fix(cli): guard startup update check 2026-05-10 11:10:30 +00:00
jkunz 358d677e72 v2.16.0
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2026-05-10 11:05:17 +00:00
jkunz f421c5851d feat(cli): add toolchain management command 2026-05-10 11:04:57 +00:00
jkunz a420157287 v2.15.0
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2026-05-10 10:01:18 +00:00
jkunz 0e27d54ad2 feat(cli): split commit and release into target-based workflows 2026-05-10 10:01:09 +00:00
jkunz 738fbaa64f v2.14.3
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2026-04-30 13:03:04 +00:00
jkunz fe7a9d93d1 fix(test): move test workspace into .nogit and add bundled fixture project files 2026-04-30 13:03:04 +00:00
jkunz 9a4c8795d4 v2.14.2
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2026-04-30 12:59:00 +00:00
jkunz faee6a1698 fix(package): correct package entry point extension and align test scripts with pnpm 2026-04-30 12:59:00 +00:00
jkunz 9a1044783d v2.14.1
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2026-04-16 19:44:17 +00:00
jkunz b16eb75d81 fix(repo): no changes to commit 2026-04-16 19:44:17 +00:00
jkunz 261f7ee6b2 v2.14.0
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2026-04-16 18:54:07 +00:00
jkunz fd7a73398c feat(cli): add machine-readable CLI help, recommendation, and configuration flows 2026-04-16 18:54:07 +00:00
jkunz f43f88a3cb v2.13.16
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2026-04-16 13:05:47 +00:00
jkunz 4c86ad62fb fix(mod_format): stop package.json formatter from modifying buildDocs and dependency entries 2026-04-16 13:05:47 +00:00
jkunz 4214a1fdf1 v2.13.15
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2026-03-24 19:59:26 +00:00
jkunz 1c33735799 fix(repo): no changes to commit 2026-03-24 19:59:26 +00:00
jkunz 274405e364 v2.13.14
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2026-03-24 19:59:13 +00:00
jkunz bf858c8650 fix(mod_format): move smartconfig file renaming into the formatter orchestrator 2026-03-24 19:59:13 +00:00
jkunz b257c82bd6 v2.13.13
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2026-03-24 19:42:12 +00:00
jkunz 5a1f6d8c76 fix(vscode-template): update VS Code schema matching to use .smartconfig.json 2026-03-24 19:42:12 +00:00
jkunz d44ad6e4e4 v2.13.12
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2026-03-24 16:56:34 +00:00
jkunz 142adfd396 fix(mod_format): render format templates through smartscaf before comparing generated files 2026-03-24 16:56:34 +00:00
jkunz b55e75d169 2.13.11
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2026-03-24 16:10:56 +00:00
jkunz d0d922e53b update to smartconfig 2026-03-24 16:10:51 +00:00
jkunz eda67395fe v2.13.10
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2026-03-24 15:05:07 +00:00
jkunz 470e87eb79 fix(config): migrate configuration handling from npmextra to smartconfig 2026-03-24 15:05:07 +00:00
jkunz 3358a0eacc v2.13.9
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 10m42s
Default (tags) / release (push) Has been cancelled
Default (tags) / metadata (push) Has been cancelled
2026-03-11 19:10:18 +00:00
jkunz b65fac6257 fix(deps,readme): bump dependencies and update README to prefer pnpm and document semantic commit flags 2026-03-11 19:10:18 +00:00
jkunz 4ab59609e6 v2.13.8
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 12m24s
Default (tags) / release (push) Has been cancelled
Default (tags) / metadata (push) Has been cancelled
2026-03-05 11:30:34 +00:00
jkunz 32f106291f fix(dependencies): move runtime tooling packages from devDependencies to dependencies 2026-03-05 11:30:34 +00:00
jkunz b8aa5d61f6 v2.13.7
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 12m15s
Default (tags) / release (push) Has been cancelled
Default (tags) / metadata (push) Has been cancelled
2026-03-05 10:25:44 +00:00
jkunz 71759c276e fix(deps): bump devDependencies: @git.zone/tsbuild to ^4.1.4 and @push.rocks/smartshell to ^3.3.7 2026-03-05 10:25:44 +00:00
jkunz 7938f12d43 v2.13.6
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 14m42s
Default (tags) / release (push) Has been cancelled
Default (tags) / metadata (push) Has been cancelled
2026-02-01 16:19:37 +00:00
jkunz 3722258d69 fix(templates/npm): use tsbuild tsfolders instead of --web flag in npm template build script 2026-02-01 16:19:37 +00:00
jkunz 68859d0e97 v2.13.5
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 10m44s
Default (tags) / release (push) Has been cancelled
Default (tags) / metadata (push) Has been cancelled
2026-02-01 16:18:37 +00:00
jkunz ecadbc7a86 fix(templates/npm): update npm template: tweak test script, bump devDependencies, add smartpath dependency, and fix ts import path 2026-02-01 16:18:37 +00:00
jkunz 0243bc5ec7 v2.13.4
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 12m1s
Default (tags) / release (push) Has been cancelled
Default (tags) / metadata (push) Has been cancelled
2026-01-12 17:57:00 +00:00
jkunz 92e618104f fix(core): update tsbuild to 4.1.2 with cross-module import path fix 2026-01-12 17:57:00 +00:00
jkunz c089c1f80d v2.13.3
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-18 13:57:13 +00:00
jkunz 10a394c7d8 fix(tsconfig): remove experimentalDecorators and useDefineForClassFields from TypeScript configuration files 2025-12-18 13:57:13 +00:00
jkunz 5980308bb8 v2.13.2
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-16 13:00:30 +00:00
jkunz 398e36bdf7 fix(deps): bump @git.zone/tspublish to ^1.11.0 2025-12-16 13:00:30 +00:00
jkunz 1e78517547 v2.13.1
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-16 12:58:58 +00:00
jkunz 55700ad87e fix(npmextra): merge old npmextra keys into new keys during migration, preserving existing new values 2025-12-16 12:58:58 +00:00
jkunz 773df5268b v2.13.0
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-16 10:49:41 +00:00
jkunz b51fa88283 feat(tests): feat(tests): add sandbox test fixture, CI and editor configs; bump deps 2025-12-16 10:49:41 +00:00
jkunz cb9f717d54 v2.12.2
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-15 17:46:17 +00:00
jkunz 70be11894c fix(cli): noop: no changes 2025-12-15 17:46:17 +00:00
jkunz 89ab63b153 update 2025-12-15 17:45:28 +00:00
jkunz 44c193d4a8 v2.12.1
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-15 17:44:09 +00:00
jkunz 44d259a0ae fix(cli): No changes detected — no version bump required 2025-12-15 17:44:09 +00:00
jkunz f0adff8784 v2.12.0
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-15 17:34:36 +00:00
jkunz fb453e62c3 feat(ci,test): feat(ci/test): add test scaffold, GitLab CI, update gitea workflows and .gitignore 2025-12-15 17:34:36 +00:00
jkunz 001721a8e9 v2.11.1
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-15 17:30:51 +00:00
jkunz b191464ff9 fix(mod_format/formatters): fix(packagejson.formatter): correctly parse scoped package dependency arguments and default to latest 2025-12-15 17:30:51 +00:00
jkunz 4d7eaa238f v2.11.0
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-15 17:24:17 +00:00
jkunz 601e0d1063 feat(mod_format): feat(mod_format): use unified diff formatter with filenames and context in BaseFormatter.displayDiff 2025-12-15 17:24:17 +00:00
74 changed files with 6174 additions and 5176 deletions
+9 -9
View File
@@ -6,19 +6,19 @@ on:
- '**' - '**'
env: env:
IMAGE: code.foss.global/hosttoday/ht-docker-node:npmci IMAGE: code.foss.global/host.today/ht-docker-node:npmci
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git NPMCI_COMPUTED_REPOURL: https://${-{gitea.repository_owner}-}:${-{secrets.GITEA_TOKEN}-}@{{module.githost}}/${-{gitea.repository}-}.git
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}} NPMCI_TOKEN_NPM: ${-{secrets.NPMCI_TOKEN_NPM}-}
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}} NPMCI_TOKEN_NPM2: ${-{secrets.NPMCI_TOKEN_NPM2}-}
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}} NPMCI_GIT_GITHUBTOKEN: ${-{secrets.NPMCI_GIT_GITHUBTOKEN}-}
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}} NPMCI_URL_CLOUDLY: ${-{secrets.NPMCI_URL_CLOUDLY}-}
jobs: jobs:
security: security:
runs-on: ubuntu-latest runs-on: ubuntu-latest
continue-on-error: true continue-on-error: true
container: container:
image: ${{ env.IMAGE }} image: ${-{ env.IMAGE }-}
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
@@ -44,11 +44,11 @@ jobs:
continue-on-error: true continue-on-error: true
test: test:
if: ${{ always() }} if: ${-{ always() }-}
needs: security needs: security
runs-on: ubuntu-latest runs-on: ubuntu-latest
container: container:
image: ${{ env.IMAGE }} image: ${-{ env.IMAGE }-}
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
+11 -11
View File
@@ -6,19 +6,19 @@ on:
- '*' - '*'
env: env:
IMAGE: code.foss.global/hosttoday/ht-docker-node:npmci IMAGE: code.foss.global/host.today/ht-docker-node:npmci
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git NPMCI_COMPUTED_REPOURL: https://${-{gitea.repository_owner}-}:${-{secrets.GITEA_TOKEN}-}@{{module.githost}}/${-{gitea.repository}-}.git
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}} NPMCI_TOKEN_NPM: ${-{secrets.NPMCI_TOKEN_NPM}-}
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}} NPMCI_TOKEN_NPM2: ${-{secrets.NPMCI_TOKEN_NPM2}-}
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}} NPMCI_GIT_GITHUBTOKEN: ${-{secrets.NPMCI_GIT_GITHUBTOKEN}-}
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}} NPMCI_URL_CLOUDLY: ${-{secrets.NPMCI_URL_CLOUDLY}-}
jobs: jobs:
security: security:
runs-on: ubuntu-latest runs-on: ubuntu-latest
continue-on-error: true continue-on-error: true
container: container:
image: ${{ env.IMAGE }} image: ${-{ env.IMAGE }-}
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
@@ -42,11 +42,11 @@ jobs:
continue-on-error: true continue-on-error: true
test: test:
if: ${{ always() }} if: ${-{ always() }-}
needs: security needs: security
runs-on: ubuntu-latest runs-on: ubuntu-latest
container: container:
image: ${{ env.IMAGE }} image: ${-{ env.IMAGE }-}
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
@@ -74,7 +74,7 @@ jobs:
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
runs-on: ubuntu-latest runs-on: ubuntu-latest
container: container:
image: ${{ env.IMAGE }} image: ${-{ env.IMAGE }-}
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
@@ -95,7 +95,7 @@ jobs:
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/') if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
runs-on: ubuntu-latest runs-on: ubuntu-latest
container: container:
image: ${{ env.IMAGE }} image: ${-{ env.IMAGE }-}
continue-on-error: true continue-on-error: true
steps: steps:
+4 -2
View File
@@ -3,8 +3,6 @@
# artifacts # artifacts
coverage/ coverage/
public/ public/
test/
test2/
# installs # installs
node_modules/ node_modules/
@@ -18,6 +16,10 @@ node_modules/
dist/ dist/
dist_*/ dist_*/
# AI
.claude/
.serena/
#------# custom #------# custom
.serena .serena
test-output.json test-output.json
+35 -10
View File
@@ -1,13 +1,12 @@
{ {
"szci": { "@ship.zone/szci": {
"npmGlobalTools": [], "npmGlobalTools": []
"npmAccessLevel": "private",
"npmRegistryUrl": "verdaccio.lossless.one"
}, },
"tsdoc": { "@git.zone/tsdoc": {
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n" "legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
}, },
"@git.zone/cli": { "@git.zone/cli": {
"schemaVersion": 2,
"projectType": "npm", "projectType": "npm",
"module": { "module": {
"githost": "gitlab.com", "githost": "gitlab.com",
@@ -35,12 +34,38 @@
"CI/CD" "CI/CD"
] ]
}, },
"commit": {
"confirmation": "prompt",
"steps": ["analyze", "changelog", "commit"]
},
"release": { "release": {
"registries": [ "confirmation": "prompt",
"https://verdaccio.lossless.digital", "preflight": {
"https://registry.npmjs.org" "requireCleanTree": true,
], "test": false,
"accessLevel": "public" "build": true
},
"targets": {
"git": {
"enabled": true,
"remote": "origin",
"pushBranch": true,
"pushTags": true
},
"npm": {
"enabled": true,
"registries": [
"https://verdaccio.lossless.digital",
"https://registry.npmjs.org"
],
"accessLevel": "public",
"alreadyPublished": "success"
},
"docker": {
"enabled": false,
"images": []
}
}
} }
} }
} }
+4
View File
@@ -19,6 +19,10 @@ node_modules/
dist/ dist/
dist_*/ dist_*/
# rust
rust/target/
dist_rust/
# AI # AI
.claude/ .claude/
.serena/ .serena/
-1
View File
@@ -1,6 +1,5 @@
{ {
"compilerOptions": { "compilerOptions": {
"experimentalDecorators": true,
"lib": ["ES2022", "DOM"], "lib": ["ES2022", "DOM"],
"target": "ES2022", "target": "ES2022",
"checkJs": true "checkJs": true
+9 -7
View File
@@ -12,15 +12,17 @@ fileName: package.json
"author": "{{module.author}}", "author": "{{module.author}}",
"license": "{{module.license}}", "license": "{{module.license}}",
"scripts": { "scripts": {
"test": "(tstest test/ --web)", "test": "(tstest test/ --verbose --logfile --timeout 60)",
"build": "(tsbuild --web --allowimplicitany)", "build": "(tsbuild tsfolders --allowimplicitany)",
"buildDocs": "(tsdoc)" "buildDocs": "(tsdoc)"
}, },
"devDependencies": { "devDependencies": {
"@git.zone/tsbuild": "^3.1.2", "@git.zone/tsbuild": "^4.1.2",
"@git.zone/tsrun": "^2.0.0", "@git.zone/tsrun": "^2.0.1",
"@git.zone/tstest": "^3.1.3", "@git.zone/tstest": "^3.1.8",
"@types/node": "^24.10.1" "@types/node": "^25.2.0"
}, },
"dependencies": {} "dependencies": {
"@push.rocks/smartpath": "^6.0.0"
}
} }
+1 -1
View File
@@ -1,3 +1,3 @@
import * as plugins from './{{module.name}}.plugins.js'; import * as plugins from './plugins.js';
export let demoExport = 'Hi there! :) This is an exported string'; export let demoExport = 'Hi there! :) This is an exported string';
+8 -2
View File
@@ -9,6 +9,13 @@
"npmPackagename": "{{module.npmPackagename}}", "npmPackagename": "{{module.npmPackagename}}",
"license": "{{module.license}}", "license": "{{module.license}}",
"projectDomain": "{{module.projectDomain}}" "projectDomain": "{{module.projectDomain}}"
},
"release": {
"targets": {
"npm": {
"registries": ["{{npmPrivateRegistry}}"]
}
}
} }
}, },
"@ship.zone/szci": { "@ship.zone/szci": {
@@ -18,7 +25,6 @@
}, },
"dockerBuildargEnvMap": { "dockerBuildargEnvMap": {
"NPMCI_TOKEN_NPM2": "NPMCI_TOKEN_NPM2" "NPMCI_TOKEN_NPM2": "NPMCI_TOKEN_NPM2"
}, }
"npmRegistryUrl": "{{npmPrivateRegistry}}"
} }
} }
@@ -1,3 +1,6 @@
---
fileName: .smartconfig.json
---
{ {
"@git.zone/cli": { "@git.zone/cli": {
"projectType": "{{projectType}}", "projectType": "{{projectType}}",
@@ -11,7 +14,11 @@
"projectDomain": "{{module.projectDomain}}" "projectDomain": "{{module.projectDomain}}"
}, },
"release": { "release": {
"accessLevel": "{{module.npmAccessLevel}}" "targets": {
"npm": {
"accessLevel": "{{module.npmAccessLevel}}"
}
}
} }
}, },
"@ship.zone/szci": { "@ship.zone/szci": {
+1 -1
View File
@@ -1,7 +1,7 @@
{ {
"json.schemas": [ "json.schemas": [
{ {
"fileMatch": ["/npmextra.json"], "fileMatch": ["/.smartconfig.json"],
"schema": { "schema": {
"type": "object", "type": "object",
"properties": { "properties": {
+8 -2
View File
@@ -9,6 +9,13 @@
"npmPackagename": "{{module.npmPackagename}}", "npmPackagename": "{{module.npmPackagename}}",
"license": "{{module.license}}", "license": "{{module.license}}",
"projectDomain": "{{module.projectDomain}}" "projectDomain": "{{module.projectDomain}}"
},
"release": {
"targets": {
"npm": {
"registries": ["{{private.npmRegistryUrl}}"]
}
}
} }
}, },
"@ship.zone/szci": { "@ship.zone/szci": {
@@ -18,7 +25,6 @@
}, },
"dockerBuildargEnvMap": { "dockerBuildargEnvMap": {
"NPMCI_TOKEN_NPM2": "NPMCI_TOKEN_NPM2" "NPMCI_TOKEN_NPM2": "NPMCI_TOKEN_NPM2"
}, }
"npmRegistryUrl": "{{private.npmRegistryUrl}}"
} }
} }
+231 -2
View File
@@ -1,6 +1,191 @@
# Changelog # Changelog
## Pending
## 2026-05-10 - 2.16.1
### Fixes
- Prevent startup update checks from crashing when installed package metadata is incomplete.
## 2026-05-10 - 2.16.0
### Features
- Add `gitzone tools` for managing the global `@git.zone` toolchain from the main CLI.
## 2026-05-10 - 2.15.0
### Features
- Split `gitzone commit` and `gitzone release` into commit workflow and target-based release configuration.
- Add pending changelog handling so commits accumulate unreleased notes and releases move them into version sections.
- Add first-class release targets for git, npm, and Docker publishing.
## 2026-04-30 - 2.14.3 - fix(test)
move test workspace into .nogit and add bundled fixture project files
- updates package scripts to clone and run the sandbox test repository from .nogit/test
- adds a complete test fixture project under test/ for CLI, template, and documentation-related test scenarios
## 2026-04-30 - 2.14.2 - fix(package)
correct package entry point extension and align test scripts with pnpm
- Change the package main field from dist_ts/index.ts to dist_ts/index.js for the built CLI entry point.
- Update test and testBuild scripts to use pnpm run instead of npm run for consistent package manager usage.
## 2026-04-16 - 2.14.1 - fix(repo)
no changes to commit
## 2026-04-16 - 2.14.0 - feat(cli)
add machine-readable CLI help, recommendation, and configuration flows
- introduces shared CLI mode handling for human, plain, and JSON output with configurable interactivity and update checks
- adds read-only JSON support for `commit recommend`, `format plan`, and command help output
- expands `config` and `services` commands with non-interactive config inspection and service enablement flows
- updates format and smartconfig handling to respect non-interactive execution and fail clearly when required metadata is missing
## 2026-04-16 - 2.13.16 - fix(mod_format)
stop package.json formatter from modifying buildDocs and dependency entries
- removes automatic buildDocs script injection from the package.json formatter
- removes dependency include/exclude and latest-version update logic from package.json formatting
- drops the unused smartnpm plugin import after removing registry lookups
## 2026-03-24 - 2.13.15 - fix(repo)
no changes to commit
## 2026-03-24 - 2.13.14 - fix(mod_format)
move smartconfig file renaming into the formatter orchestrator
- Renames smartconfig.json or npmextra.json to .smartconfig.json before formatters run
- Simplifies the smartconfig formatter to only read and modify .smartconfig.json
- Removes create/delete change planning for config renames and applies only content updates within the formatter
## 2026-03-24 - 2.13.13 - fix(vscode-template)
update VS Code schema matching to use .smartconfig.json
- Changes the VS Code template settings so the JSON schema applies to /.smartconfig.json instead of /npmextra.json.
## 2026-03-24 - 2.13.12 - fix(mod_format)
render format templates through smartscaf before comparing generated files
- adds smartscaf-based in-memory template rendering so supplied variables are applied before detecting changes
- supports release.accessLevel as a fallback when selecting public vs private CI templates
- matches rendered output by template or destination path to handle renamed files from template frontmatter
## 2026-03-24 - 2.13.10 - fix(config)
migrate configuration handling from npmextra to smartconfig
- replace @push.rocks/npmextra with @push.rocks/smartconfig across config, commit, format, and service modules
- switch managed project config file references from npmextra.json to smartconfig.json
- update formatting and package metadata checks to include smartconfig.json
- extend the gitignore template with Rust build output directories
## 2026-03-11 - 2.13.9 - fix(deps,readme)
bump dependencies and update README to prefer pnpm and document semantic commit flags
- Dev dependency updates: @git.zone/tsbuild -> ^4.3.0, @git.zone/tstest -> ^3.3.2, @types/node -> ^25.4.0
- Dependency upgrades: @git.zone/tsdoc -> ^2.0.0, @git.zone/tspublish -> ^1.11.2, @push.rocks/lik -> ^6.3.1, @push.rocks/smartfs -> ^1.5.0, @push.rocks/smartlog -> ^3.2.1, @push.rocks/smartstream -> ^3.4.0, prettier -> ^3.8.1 (and other minor/patch bumps)
- README changes: prefer pnpm for global install, clarify format command dry-run behavior and --write flag, add and document gitzone commit flags (-y/--yes, -p/--push, -t/--test, -b/--build, -r/--release) and AI-powered commit workflow
- No source code changes; this is a documentation and dependency refresh, recommend a patch release
## 2026-03-05 - 2.13.8 - fix(dependencies)
move runtime tooling packages from devDependencies to dependencies
- Removed @push.rocks/smartdelay, @push.rocks/smartinteract, @push.rocks/smartnetwork, and @push.rocks/smartshell from devDependencies and added them to dependencies
- No package version numbers were changed; this ensures the moved packages are installed for consumers at runtime
## 2026-03-05 - 2.13.7 - fix(deps)
bump devDependencies: @git.zone/tsbuild to ^4.1.4 and @push.rocks/smartshell to ^3.3.7
- Updated @git.zone/tsbuild from ^4.1.2 to ^4.1.4 (patch)
- Updated @push.rocks/smartshell from ^3.3.0 to ^3.3.7 (patch)
## 2026-02-01 - 2.13.6 - fix(templates/npm)
use tsbuild tsfolders instead of --web flag in npm template build script
- Changed build script in assets/templates/npm/.package.json from "(tsbuild --web --allowimplicitany)" to "(tsbuild tsfolders --allowimplicitany)"
- Replaces --web flag with explicit tsfolders argument to correctly target project folders during build
## 2026-02-01 - 2.13.5 - fix(templates/npm)
update npm template: tweak test script, bump devDependencies, add smartpath dependency, and fix ts import path
- test script updated: '(tstest test/ --web)' -> '(tstest test/ --verbose --logfile --timeout 60)'
- devDependencies bumped: @git.zone/tsbuild ^3.1.2 -> ^4.1.2, @git.zone/tsrun ^2.0.0 -> ^2.0.1, @git.zone/tstest ^3.1.3 -> ^3.1.8, @types/node ^24.10.1 -> ^25.2.0
- dependencies: added @push.rocks/smartpath ^6.0.0
- TypeScript template import fixed: './{{module.name}}.plugins.js' -> './plugins.js'
## 2025-12-18 - 2.13.3 - fix(tsconfig)
remove experimentalDecorators and useDefineForClassFields from TypeScript configuration files
- Removed "experimentalDecorators": true from assets/templates/multienv/deno.json and tsconfig.json
- Removed "useDefineForClassFields": false from tsconfig.json
- This change alters TypeScript/Deno compiler behavior: decorator support and legacy class-field initialization semantics may be affected; code relying on those may need updates
## 2025-12-16 - 2.13.2 - fix(deps)
bump @git.zone/tspublish to ^1.11.0
- Updated dependency @git.zone/tspublish from ^1.10.3 to ^1.11.0 in package.json
## 2025-12-16 - 2.13.1 - fix(npmextra)
merge old npmextra keys into new keys during migration, preserving existing new values
- Changed migration logic to merge data when both old and new keys exist instead of skipping the merge.
- Merge preserves existing new-key values (old values do not overwrite new ones) and still deletes the old key after migration.
- Applied the fix in both ts/mod_format/format.npmextra.ts and ts/mod_format/formatters/npmextra.formatter.ts.
- Adds a console log for successful migrations; behavior for single-key rename remains unchanged.
## 2025-12-16 - 2.13.0 - feat(tests)
feat(tests): add sandbox test fixture, CI and editor configs; bump deps
- Added comprehensive test/ fixture (sandbox-npmts) including package.json, npmextra.json, readme, GitLab CI (.gitlab-ci.yml), .npmrc, VSCode launch/settings and qenv.yml
- Added test sources and helper files under test/test and test/ts (browser and node tests, commitinfo data, simple library code)
- Updated dependencies in package.json: @git.zone/tsdoc -> ^1.11.4, @push.rocks/smartfs -> ^1.3.1
## 2025-12-15 - 2.12.2 - fix(cli)
noop: no changes
- No source or documentation changes detected in the diff; nothing to release.
- Package version remains unchanged at 2.12.1.
## 2025-12-15 - 2.12.1 - fix(cli)
No changes detected — no version bump required
- Current package version: 2.12.0
- No files changed in this commit
- No release or version bump necessary
## 2025-12-15 - 2.12.0 - feat(ci,test)
feat(ci/test): add test scaffold, GitLab CI, update gitea workflows and .gitignore
- Add comprehensive test/ scaffold including sample tests, test package.json, npmextra.json, test fixtures and TypeScript test sources
- Add GitLab CI pipeline (test/.gitlab-ci.yml), test-specific .npmrc, VSCode launch/settings and supporting qenv/readme files for CI/local test runs
- Update .gitea workflow YAML files to use templated placeholders and corrected container image path for CI execution
- Update .gitignore to exclude AI assistant and tooling dirs (.claude/, .serena/) and add test/.gitignore to ignore test artifacts
- Update changelog and documentation files to reflect recent formatter/commit/service changes and the new test/CI additions
## 2025-12-15 - 2.11.1 - fix(mod_format/formatters)
fix(packagejson.formatter): correctly parse scoped package dependency arguments and default to latest
- Handle scoped packages (e.g. @scope/name@version) by detecting the last '@' after the scope slash so package name and version are split correctly.
- Fallback to 'latest' when no version is provided.
- Fixes earlier incorrect splitting on every '@' which broke scoped package names.
## 2025-12-15 - 2.11.0 - feat(mod_format)
feat(mod_format): use unified diff formatter with filenames and context in BaseFormatter.displayDiff
- Replaced plugins.smartdiff.formatLineDiffForConsole(...) with plugins.smartdiff.formatUnifiedDiffForConsole(...) when both before and after are present.
- Passes originalFileName and revisedFileName as diff.path and sets context to 3 to show a unified diff with surrounding lines.
- Improves console output for multi-line diffs by using unified diff format and including file names.
## 2025-12-15 - 2.10.0 - feat(mod_format) ## 2025-12-15 - 2.10.0 - feat(mod_format)
Refactor formatting modules to new BaseFormatter and implement concrete analyze/apply logic Refactor formatting modules to new BaseFormatter and implement concrete analyze/apply logic
- Replace generic LegacyFormatter with explicit BaseFormatter implementations for formatters: copy, gitignore, license, npmextra, packagejson, prettier, readme, templates, tsconfig (legacy.formatter.ts removed). - Replace generic LegacyFormatter with explicit BaseFormatter implementations for formatters: copy, gitignore, license, npmextra, packagejson, prettier, readme, templates, tsconfig (legacy.formatter.ts removed).
@@ -16,6 +201,7 @@ Refactor formatting modules to new BaseFormatter and implement concrete analyze/
- General: extensive use of plugins (smartfs, path, smartnpm, smartinteract, smartobject, smartlegal), improved logging and verbose messages. - General: extensive use of plugins (smartfs, path, smartnpm, smartinteract, smartobject, smartlegal), improved logging and verbose messages.
## 2025-12-15 - 2.9.0 - feat(format) ## 2025-12-15 - 2.9.0 - feat(format)
Add --diff option to format command to display file diffs; pass flag through CLI and show formatter diffs. Bump @git.zone/tsdoc to ^1.11.0. Add --diff option to format command to display file diffs; pass flag through CLI and show formatter diffs. Bump @git.zone/tsdoc to ^1.11.0.
- Add a diff boolean option to mod_format to enable showing file diffs during format operations. - Add a diff boolean option to mod_format to enable showing file diffs during format operations.
@@ -24,6 +210,7 @@ Add --diff option to format command to display file diffs; pass flag through CLI
- Update dependency @git.zone/tsdoc from ^1.10.2 to ^1.11.0. - Update dependency @git.zone/tsdoc from ^1.10.2 to ^1.11.0.
## 2025-12-15 - 2.8.0 - feat(commit) ## 2025-12-15 - 2.8.0 - feat(commit)
Add commit configuration and automatic pre-commit tests Add commit configuration and automatic pre-commit tests
- Add CommitConfig class to manage @git.zone/cli.commit settings in npmextra.json (alwaysTest, alwaysBuild). - Add CommitConfig class to manage @git.zone/cli.commit settings in npmextra.json (alwaysTest, alwaysBuild).
@@ -35,6 +222,7 @@ Add commit configuration and automatic pre-commit tests
- Add 'gitzone config services' entry to configure services via ServiceManager. - Add 'gitzone config services' entry to configure services via ServiceManager.
## 2025-12-14 - 2.7.0 - feat(mod_format) ## 2025-12-14 - 2.7.0 - feat(mod_format)
Add check-only formatting with interactive diff preview; make formatting default to dry-run and extend formatting API Add check-only formatting with interactive diff preview; make formatting default to dry-run and extend formatting API
- Add BaseFormatter.check(), displayDiff() and displayAllDiffs() to compute and render diffs without applying changes. - Add BaseFormatter.check(), displayDiff() and displayAllDiffs() to compute and render diffs without applying changes.
@@ -47,6 +235,7 @@ Add check-only formatting with interactive diff preview; make formatting default
- Bump dependency @push.rocks/smartdiff to ^1.1.0. - Bump dependency @push.rocks/smartdiff to ^1.1.0.
## 2025-12-14 - 2.6.1 - fix(npmextra) ## 2025-12-14 - 2.6.1 - fix(npmextra)
Normalize npmextra.json: move tsdoc legal entry and reposition @git.zone/cli configuration Normalize npmextra.json: move tsdoc legal entry and reposition @git.zone/cli configuration
- Move TSDoc legal text into a top-level "tsdoc.legal" property in npmextra.json - Move TSDoc legal text into a top-level "tsdoc.legal" property in npmextra.json
@@ -55,6 +244,7 @@ Normalize npmextra.json: move tsdoc legal entry and reposition @git.zone/cli con
- Pure configuration change (JSON structure) — no functional code changes - Pure configuration change (JSON structure) — no functional code changes
## 2025-12-14 - 2.6.0 - feat(mod_commit) ## 2025-12-14 - 2.6.0 - feat(mod_commit)
Add execution plan output to commit command Add execution plan output to commit command
- Print an execution plan at the start of the commit flow (shows active options and planned steps) - Print an execution plan at the start of the commit flow (shows active options and planned steps)
@@ -63,6 +253,7 @@ Add execution plan output to commit command
- Execution plan reflects flags: auto-accept (-y), push (-p), build (-b), release (-r), --format, and target registries - Execution plan reflects flags: auto-accept (-y), push (-p), build (-b), release (-r), --format, and target registries
## 2025-12-14 - 2.5.0 - feat(mod_standard) ## 2025-12-14 - 2.5.0 - feat(mod_standard)
Add interactive main menu and help to standard CLI module; route commands via dynamic imports Add interactive main menu and help to standard CLI module; route commands via dynamic imports
- Introduce interactive CLI menu using @push.rocks/smartinteract to prompt user for actions. - Introduce interactive CLI menu using @push.rocks/smartinteract to prompt user for actions.
@@ -71,6 +262,7 @@ Add interactive main menu and help to standard CLI module; route commands via dy
- Remove previous static template listing and logger.warn placeholder. - Remove previous static template listing and logger.warn placeholder.
## 2025-12-14 - 2.4.0 - feat(cli) ## 2025-12-14 - 2.4.0 - feat(cli)
Add optional build step to release flow and auto-format npmextra config when registries change Add optional build step to release flow and auto-format npmextra config when registries change
- Introduce a --build/-b flag in the commit/release flow to run 'pnpm build' before pushing/releases - Introduce a --build/-b flag in the commit/release flow to run 'pnpm build' before pushing/releases
@@ -81,6 +273,7 @@ Add optional build step to release flow and auto-format npmextra config when reg
- Add npmextra registry config entry (https://verdaccio.lossless.digital) to npmextra.json - Add npmextra registry config entry (https://verdaccio.lossless.digital) to npmextra.json
## 2025-12-14 - 2.3.0 - feat(config) ## 2025-12-14 - 2.3.0 - feat(config)
Add interactive menu and help to config command, handle unknown commands, and bump dependencies Add interactive menu and help to config command, handle unknown commands, and bump dependencies
- When running the 'config' command with no arguments, show an interactive menu (via SmartInteract) to choose actions (show, add, remove, clear, access, help) instead of defaulting to 'show'. - When running the 'config' command with no arguments, show an interactive menu (via SmartInteract) to choose actions (show, add, remove, clear, access, help) instead of defaulting to 'show'.
@@ -89,6 +282,7 @@ Add interactive menu and help to config command, handle unknown commands, and bu
- Update dependency: @push.rocks/smartjson -> ^6.0.0. - Update dependency: @push.rocks/smartjson -> ^6.0.0.
## 2025-12-04 - 2.2.1 - fix(commit) ## 2025-12-04 - 2.2.1 - fix(commit)
Prevent auto-accept for BREAKING CHANGE commits; require manual confirmation and warn when --yes is used Prevent auto-accept for BREAKING CHANGE commits; require manual confirmation and warn when --yes is used
- Do not auto-accept AI commit recommendations when the suggested change is a BREAKING CHANGE (major bump). - Do not auto-accept AI commit recommendations when the suggested change is a BREAKING CHANGE (major bump).
@@ -97,6 +291,7 @@ Prevent auto-accept for BREAKING CHANGE commits; require manual confirmation and
- Introduced isBreakingChange and canAutoAccept flags to centralize the auto-accept logic. - Introduced isBreakingChange and canAutoAccept flags to centralize the auto-accept logic.
## 2025-12-02 - 2.2.0 - feat(services) ## 2025-12-02 - 2.2.0 - feat(services)
Improve services manager and configuration; switch test templates to @git.zone/tstest; bump dev dependencies and update docs Improve services manager and configuration; switch test templates to @git.zone/tstest; bump dev dependencies and update docs
- services: Add robust ServiceConfiguration (creates .nogit/env.json with sane defaults, syncs ports from existing Docker containers, validates and can reconfigure ports) - services: Add robust ServiceConfiguration (creates .nogit/env.json with sane defaults, syncs ports from existing Docker containers, validates and can reconfigure ports)
@@ -107,6 +302,7 @@ Improve services manager and configuration; switch test templates to @git.zone/t
- docs: README updates — add issue reporting/security section, AI-powered commit recommendation notes, and clarify trademark/legal wording - docs: README updates — add issue reporting/security section, AI-powered commit recommendation notes, and clarify trademark/legal wording
## 2025-11-29 - 2.1.0 - feat(mod_services) ## 2025-11-29 - 2.1.0 - feat(mod_services)
Add global service registry and global commands for managing project containers Add global service registry and global commands for managing project containers
- Introduce GlobalRegistry class to track registered projects, their containers, ports and last activity (ts/mod_services/classes.globalregistry.ts) - Introduce GlobalRegistry class to track registered projects, their containers, ports and last activity (ts/mod_services/classes.globalregistry.ts)
@@ -116,30 +312,34 @@ Add global service registry and global commands for managing project containers
- Bump dependency @push.rocks/smartfile to ^13.1.0 in package.json - Bump dependency @push.rocks/smartfile to ^13.1.0 in package.json
## 2025-11-27 - 2.0.0 - BREAKING CHANGE(core) ## 2025-11-27 - 2.0.0 - BREAKING CHANGE(core)
Migrate filesystem to smartfs (async) and add Elasticsearch service support; refactor format/commit/meta modules Migrate filesystem to smartfs (async) and add Elasticsearch service support; refactor format/commit/meta modules
- Replace @push.rocks/smartfile usage with @push.rocks/smartfs across the codebase; all filesystem operations are now async (SmartFs.file(...).read()/write(), SmartFs.directory(...).list()/create()/delete(), etc.) - Replace @push.rocks/smartfile usage with @push.rocks/smartfs across the codebase; all filesystem operations are now async (SmartFs.file(...).read()/write(), SmartFs.directory(...).list()/create()/delete(), etc.)
- Convert formerly synchronous helpers and APIs to async (notable: detectProjectType, getProjectName, readCurrentVersion and related version bumping logic). Callers updated accordingly. - Convert formerly synchronous helpers and APIs to async (notable: detectProjectType, getProjectName, readCurrentVersion and related version bumping logic). Callers updated accordingly.
- Add Elasticsearch support to services: new config fields (ELASTICSEARCH_*), Docker run/start/stop/logs/status handling, and ELASTICSEARCH_URL in service configuration. - Add Elasticsearch support to services: new config fields (ELASTICSEARCH\_\*), Docker run/start/stop/logs/status handling, and ELASTICSEARCH_URL in service configuration.
- Refactor formatting subsystem: cache and rollback/backup systems removed/disabled for stability, format planner execution simplified (sequential), diff/stats reporting updated to use smartfs. - Refactor formatting subsystem: cache and rollback/backup systems removed/disabled for stability, format planner execution simplified (sequential), diff/stats reporting updated to use smartfs.
- Update package.json dependencies: bump @git.zone/tsbuild, tsrun, tstest; upgrade @push.rocks/smartfile to v13 and add @push.rocks/smartfs dependency; update @types/node. - Update package.json dependencies: bump @git.zone/tsbuild, tsrun, tstest; upgrade @push.rocks/smartfile to v13 and add @push.rocks/smartfs dependency; update @types/node.
- Update commit flow and changelog generation to use smartfs for reading/writing files and to await version/branch detection where necessary. - Update commit flow and changelog generation to use smartfs for reading/writing files and to await version/branch detection where necessary.
- Expose a SmartFs instance via plugins and adjust all mod.* plugin files to import/use smartfs where required. - Expose a SmartFs instance via plugins and adjust all mod.\* plugin files to import/use smartfs where required.
- Breaking change: Public and internal APIs that previously used synchronous smartfile APIs are now asynchronous. Consumers and scripts must await these functions and use the new smartfs API. - Breaking change: Public and internal APIs that previously used synchronous smartfile APIs are now asynchronous. Consumers and scripts must await these functions and use the new smartfs API.
## 2025-11-17 - 1.21.5 - fix(tsconfig) ## 2025-11-17 - 1.21.5 - fix(tsconfig)
Remove emitDecoratorMetadata from tsconfig template Remove emitDecoratorMetadata from tsconfig template
- Removed the "emitDecoratorMetadata" compiler option from assets/templates/tsconfig_update/tsconfig.json - Removed the "emitDecoratorMetadata" compiler option from assets/templates/tsconfig_update/tsconfig.json
- This updates the tsconfig template to avoid emitting decorator metadata when targeting ES2022 - This updates the tsconfig template to avoid emitting decorator metadata when targeting ES2022
## 2025-11-17 - 1.21.4 - fix(tsconfig template) ## 2025-11-17 - 1.21.4 - fix(tsconfig template)
Remove experimentalDecorators and useDefineForClassFields from tsconfig template Remove experimentalDecorators and useDefineForClassFields from tsconfig template
- Removed experimentalDecorators option from assets/templates/tsconfig_update/tsconfig.json - Removed experimentalDecorators option from assets/templates/tsconfig_update/tsconfig.json
- Removed useDefineForClassFields option from assets/templates/tsconfig_update/tsconfig.json - Removed useDefineForClassFields option from assets/templates/tsconfig_update/tsconfig.json
## 2025-11-17 - 1.21.3 - fix(assets/templates/multienv) ## 2025-11-17 - 1.21.3 - fix(assets/templates/multienv)
Remove unused Bun configuration template (assets/templates/multienv/bunfig.toml) Remove unused Bun configuration template (assets/templates/multienv/bunfig.toml)
- Deleted assets/templates/multienv/bunfig.toml which previously provided Bun TypeScript decorator configuration - Deleted assets/templates/multienv/bunfig.toml which previously provided Bun TypeScript decorator configuration
@@ -147,17 +347,20 @@ Remove unused Bun configuration template (assets/templates/multienv/bunfig.toml)
- No functional code changes; removes an unused asset file - No functional code changes; removes an unused asset file
## 2025-11-17 - 1.21.2 - fix(templates/multienv) ## 2025-11-17 - 1.21.2 - fix(templates/multienv)
Disable useDefineForClassFields in multienv TypeScript configs to ensure decorator compatibility Disable useDefineForClassFields in multienv TypeScript configs to ensure decorator compatibility
- Set useDefineForClassFields = false in assets/templates/multienv/bunfig.toml to keep Bun's transpiler compatible with decorator usage - Set useDefineForClassFields = false in assets/templates/multienv/bunfig.toml to keep Bun's transpiler compatible with decorator usage
- Set "useDefineForClassFields": false in assets/templates/multienv/deno.json to ensure Deno/TypeScript compiler emits class fields compatible with decorators - Set "useDefineForClassFields": false in assets/templates/multienv/deno.json to ensure Deno/TypeScript compiler emits class fields compatible with decorators
## 2025-11-17 - 1.21.1 - fix(templates.multienv) ## 2025-11-17 - 1.21.1 - fix(templates.multienv)
Enable checkJs in multienv Deno template to enable JS type checking Enable checkJs in multienv Deno template to enable JS type checking
- Added "checkJs": true to compilerOptions in assets/templates/multienv/deno.json to enable JavaScript type checking for the Deno multienv template - Added "checkJs": true to compilerOptions in assets/templates/multienv/deno.json to enable JavaScript type checking for the Deno multienv template
## 2025-11-17 - 1.21.0 - feat(multienv) ## 2025-11-17 - 1.21.0 - feat(multienv)
Add multi-env templates enabling TypeScript decorators for Bun and Deno; rename npmextra config key to szci Add multi-env templates enabling TypeScript decorators for Bun and Deno; rename npmextra config key to szci
- Added assets/templates/multienv/bunfig.toml to enable Bun TypeScript transpiler experimentalDecorators - Added assets/templates/multienv/bunfig.toml to enable Bun TypeScript transpiler experimentalDecorators
@@ -165,6 +368,7 @@ Add multi-env templates enabling TypeScript decorators for Bun and Deno; rename
- Updated npmextra.json: renamed top-level config key from "npmci" to "szci" (keeps npmGlobalTools, npmAccessLevel and npmRegistryUrl unchanged) - Updated npmextra.json: renamed top-level config key from "npmci" to "szci" (keeps npmGlobalTools, npmAccessLevel and npmRegistryUrl unchanged)
## 2025-11-06 - 1.20.0 - feat(commit) ## 2025-11-06 - 1.20.0 - feat(commit)
Add non-interactive --yes (-y) flag to commit command to auto-accept AI recommendations and optionally push with -p Add non-interactive --yes (-y) flag to commit command to auto-accept AI recommendations and optionally push with -p
- Add -y / --yes flag to gitzone commit to auto-accept AI-generated commit recommendations without interactive prompts - Add -y / --yes flag to gitzone commit to auto-accept AI-generated commit recommendations without interactive prompts
@@ -174,6 +378,7 @@ Add non-interactive --yes (-y) flag to commit command to auto-accept AI recommen
- Updated CLI usage and documentation (readme.hints.md) to document the new flags - Updated CLI usage and documentation (readme.hints.md) to document the new flags
## 2025-11-05 - 1.19.9 - fix(mod_commit) ## 2025-11-05 - 1.19.9 - fix(mod_commit)
Refactor version bumping to a unified implementation for npm and Deno; remove npm-exec based helpers and add file-based version readers/updaters to avoid npm warning pollution Refactor version bumping to a unified implementation for npm and Deno; remove npm-exec based helpers and add file-based version readers/updaters to avoid npm warning pollution
- Removed legacy npm/deno-specific helpers (bumpNpmVersion, syncVersionToDenoJson, bumpDenoVersion) that relied on executing npm and caused warning pollution - Removed legacy npm/deno-specific helpers (bumpNpmVersion, syncVersionToDenoJson, bumpDenoVersion) that relied on executing npm and caused warning pollution
@@ -184,39 +389,46 @@ Refactor version bumping to a unified implementation for npm and Deno; remove np
- Benefits: no npm warning pollution in deno.json, simpler git history, consistent behavior across project types - Benefits: no npm warning pollution in deno.json, simpler git history, consistent behavior across project types
## 2025-11-04 - 1.19.8 - fix(package.json) ## 2025-11-04 - 1.19.8 - fix(package.json)
Bump @git.zone/tsdoc dependency to ^1.9.2 Bump @git.zone/tsdoc dependency to ^1.9.2
- Updated dependency @git.zone/tsdoc from ^1.9.1 to ^1.9.2 in package.json - Updated dependency @git.zone/tsdoc from ^1.9.1 to ^1.9.2 in package.json
## 2025-11-04 - 1.19.7 - fix(dependencies) ## 2025-11-04 - 1.19.7 - fix(dependencies)
Bump @git.zone/tsdoc to ^1.9.1 Bump @git.zone/tsdoc to ^1.9.1
- Updated package.json dependency @git.zone/tsdoc from ^1.9.0 to ^1.9.1 - Updated package.json dependency @git.zone/tsdoc from ^1.9.0 to ^1.9.1
## 2025-11-04 - 1.19.6 - fix(cli) ## 2025-11-04 - 1.19.6 - fix(cli)
Bump @git.zone/tsdoc dependency to ^1.9.0 Bump @git.zone/tsdoc dependency to ^1.9.0
- Updated dependency @git.zone/tsdoc from ^1.8.3 to ^1.9.0 in package.json - Updated dependency @git.zone/tsdoc from ^1.8.3 to ^1.9.0 in package.json
## 2025-11-04 - 1.19.5 - fix(cli) ## 2025-11-04 - 1.19.5 - fix(cli)
Bump @git.zone/tsdoc to ^1.8.3 and add local .claude settings for allowed permissions Bump @git.zone/tsdoc to ^1.8.3 and add local .claude settings for allowed permissions
- Updated dependency @git.zone/tsdoc from ^1.8.2 to ^1.8.3 - Updated dependency @git.zone/tsdoc from ^1.8.2 to ^1.8.3
- Added .claude/settings.local.json to declare allowed permissions for local tooling (Bash commands, Docker, npm, WebFetch and MCP actions) - Added .claude/settings.local.json to declare allowed permissions for local tooling (Bash commands, Docker, npm, WebFetch and MCP actions)
## 2025-11-03 - 1.19.3 - fix(tsdoc) ## 2025-11-03 - 1.19.3 - fix(tsdoc)
Bump @git.zone/tsdoc to ^1.8.0 and add .claude local settings Bump @git.zone/tsdoc to ^1.8.0 and add .claude local settings
- Upgrade dependency @git.zone/tsdoc from ^1.6.1 to ^1.8.0 in package.json - Upgrade dependency @git.zone/tsdoc from ^1.6.1 to ^1.8.0 in package.json
- Add .claude/settings.local.json for local assistant permissions/configuration - Add .claude/settings.local.json for local assistant permissions/configuration
## 2025-11-03 - 1.19.2 - fix(tsdoc) ## 2025-11-03 - 1.19.2 - fix(tsdoc)
Bump @git.zone/tsdoc to ^1.6.1 and add .claude/settings.local.json Bump @git.zone/tsdoc to ^1.6.1 and add .claude/settings.local.json
- Update dependency @git.zone/tsdoc from ^1.6.0 to ^1.6.1 - Update dependency @git.zone/tsdoc from ^1.6.0 to ^1.6.1
- Add .claude/settings.local.json to include local Claude settings/permissions - Add .claude/settings.local.json to include local Claude settings/permissions
## 2025-11-02 - 1.19.1 - fix(dependencies) ## 2025-11-02 - 1.19.1 - fix(dependencies)
Bump dependencies and add local Claude settings Bump dependencies and add local Claude settings
- Bump devDependencies: @git.zone/tsbuild -> ^2.7.1, @git.zone/tsrun -> ^1.6.2, @git.zone/tstest -> ^2.7.0 - Bump devDependencies: @git.zone/tsbuild -> ^2.7.1, @git.zone/tsrun -> ^1.6.2, @git.zone/tstest -> ^2.7.0
@@ -224,6 +436,7 @@ Bump dependencies and add local Claude settings
- Add .claude/settings.local.json (local project permissions/settings file) - Add .claude/settings.local.json (local project permissions/settings file)
## 2025-10-23 - 1.19.0 - feat(mod_commit) ## 2025-10-23 - 1.19.0 - feat(mod_commit)
Add CLI UI helpers and improve commit workflow with progress, recommendations and summary Add CLI UI helpers and improve commit workflow with progress, recommendations and summary
- Introduce ts/mod_commit/mod.ui.ts: reusable CLI UI helpers (pretty headers, sections, AI recommendation box, step printer, commit summary and helpers for consistent messaging). - Introduce ts/mod_commit/mod.ui.ts: reusable CLI UI helpers (pretty headers, sections, AI recommendation box, step printer, commit summary and helpers for consistent messaging).
@@ -232,6 +445,7 @@ Add CLI UI helpers and improve commit workflow with progress, recommendations an
- Add .claude/settings.local.json: local permissions configuration for development tooling. - Add .claude/settings.local.json: local permissions configuration for development tooling.
## 2025-10-23 - 1.18.9 - fix(mod_commit) ## 2025-10-23 - 1.18.9 - fix(mod_commit)
Stage and commit deno.json when bumping/syncing versions and create/update git tags Stage and commit deno.json when bumping/syncing versions and create/update git tags
- bumpDenoVersion now creates a Smartshell instance and runs git add deno.json, git commit -m "v<newVersion>", and git tag v<newVersion> to persist the version bump - bumpDenoVersion now creates a Smartshell instance and runs git add deno.json, git commit -m "v<newVersion>", and git tag v<newVersion> to persist the version bump
@@ -239,6 +453,7 @@ Stage and commit deno.json when bumping/syncing versions and create/update git t
- Added informative logger messages after creating commits and tags - Added informative logger messages after creating commits and tags
## 2025-10-23 - 1.18.8 - fix(mod_commit) ## 2025-10-23 - 1.18.8 - fix(mod_commit)
Improve commit workflow: detect project type and current branch; add robust version bump helpers for npm/deno Improve commit workflow: detect project type and current branch; add robust version bump helpers for npm/deno
- Add mod_commit/mod.helpers.ts with utilities: detectCurrentBranch(), detectProjectType(), bumpProjectVersion(), bumpDenoVersion(), bumpNpmVersion(), syncVersionToDenoJson(), and calculateNewVersion() - Add mod_commit/mod.helpers.ts with utilities: detectCurrentBranch(), detectProjectType(), bumpProjectVersion(), bumpDenoVersion(), bumpNpmVersion(), syncVersionToDenoJson(), and calculateNewVersion()
@@ -248,12 +463,14 @@ Improve commit workflow: detect project type and current branch; add robust vers
- Add local Claude settings file (.claude/settings.local.json) (editor/CI config) — no code behavior change but included in diff - Add local Claude settings file (.claude/settings.local.json) (editor/CI config) — no code behavior change but included in diff
## 2025-09-07 - 1.18.7 - fix(claude) ## 2025-09-07 - 1.18.7 - fix(claude)
Add .claude local settings to whitelist dev tool permissions Add .claude local settings to whitelist dev tool permissions
- Add .claude/settings.local.json to configure allowed permissions for local AI/tooling helpers (Bash commands, WebFetch, and mcp_serena actions). - Add .claude/settings.local.json to configure allowed permissions for local AI/tooling helpers (Bash commands, WebFetch, and mcp_serena actions).
- Disable enableAllProjectMcpServers (set to false) to limit automatic project MCP server usage. - Disable enableAllProjectMcpServers (set to false) to limit automatic project MCP server usage.
## 2025-09-07 - 1.18.6 - fix(deps) ## 2025-09-07 - 1.18.6 - fix(deps)
Bump dependency versions and add local Claude settings Bump dependency versions and add local Claude settings
- Updated devDependencies: @git.zone/tsbuild ^2.6.4 → ^2.6.8, @git.zone/tstest ^2.3.4 → ^2.3.6, @push.rocks/smartfile ^11.2.5 → ^11.2.7 - Updated devDependencies: @git.zone/tsbuild ^2.6.4 → ^2.6.8, @git.zone/tstest ^2.3.4 → ^2.3.6, @push.rocks/smartfile ^11.2.5 → ^11.2.7
@@ -261,6 +478,7 @@ Bump dependency versions and add local Claude settings
- Added .claude/settings.local.json to configure local Claude permissions/settings - Added .claude/settings.local.json to configure local Claude permissions/settings
## 2025-08-17 - 1.18.5 - fix(dependencies) ## 2025-08-17 - 1.18.5 - fix(dependencies)
Bump smartshell and smartscaf versions; add .claude local settings Bump smartshell and smartscaf versions; add .claude local settings
- Update @push.rocks/smartshell from ^3.2.4 to ^3.3.0 in package.json - Update @push.rocks/smartshell from ^3.2.4 to ^3.3.0 in package.json
@@ -268,6 +486,7 @@ Bump smartshell and smartscaf versions; add .claude local settings
- Add .claude/settings.local.json for local assistant permissions/configuration - Add .claude/settings.local.json for local assistant permissions/configuration
## 2025-08-17 - 1.18.4 - fix(cli) ## 2025-08-17 - 1.18.4 - fix(cli)
Update dependencies, add local Claude settings, and update gitignore template Update dependencies, add local Claude settings, and update gitignore template
- Bump several dependencies: @git.zone/tsbuild -> ^2.6.4, @git.zone/tspublish -> ^1.10.1, @git.zone/tstest -> ^2.3.4, @push.rocks/smartfile -> ^11.2.5, @push.rocks/npmextra -> ^5.3.3, @push.rocks/smartchok -> ^1.1.1, @push.rocks/smartlog -> ^3.1.8, @push.rocks/smartpath -> ^6.0.0, prettier -> ^3.6.2 - Bump several dependencies: @git.zone/tsbuild -> ^2.6.4, @git.zone/tspublish -> ^1.10.1, @git.zone/tstest -> ^2.3.4, @push.rocks/smartfile -> ^11.2.5, @push.rocks/npmextra -> ^5.3.3, @push.rocks/smartchok -> ^1.1.1, @push.rocks/smartlog -> ^3.1.8, @push.rocks/smartpath -> ^6.0.0, prettier -> ^3.6.2
@@ -276,6 +495,7 @@ Update dependencies, add local Claude settings, and update gitignore template
- Add pnpm onlyBuiltDependencies entries: esbuild and mongodb-memory-server - Add pnpm onlyBuiltDependencies entries: esbuild and mongodb-memory-server
## 2025-08-16 - 1.18.3 - fix(services) ## 2025-08-16 - 1.18.3 - fix(services)
Simplify S3 endpoint handling in ServiceConfiguration to store host only Simplify S3 endpoint handling in ServiceConfiguration to store host only
- S3_ENDPOINT now stores the raw host (e.g. 'localhost') instead of a full URL with protocol and port. - S3_ENDPOINT now stores the raw host (e.g. 'localhost') instead of a full URL with protocol and port.
@@ -284,6 +504,7 @@ Simplify S3 endpoint handling in ServiceConfiguration to store host only
- Consumers that previously relied on S3_ENDPOINT containing protocol and port should now construct the full endpoint URL using S3_USESSL, S3_HOST and S3_PORT. - Consumers that previously relied on S3_ENDPOINT containing protocol and port should now construct the full endpoint URL using S3_USESSL, S3_HOST and S3_PORT.
## 2025-08-16 - 1.18.1 - fix(services) ## 2025-08-16 - 1.18.1 - fix(services)
Improve services and commit flow: stop AiDoc, use silent docker inspect, sync ports with logging, fix config loading, and bump deps Improve services and commit flow: stop AiDoc, use silent docker inspect, sync ports with logging, fix config loading, and bump deps
- Ensure AiDoc is stopped after building commit recommendation to avoid resource leaks - Ensure AiDoc is stopped after building commit recommendation to avoid resource leaks
@@ -295,6 +516,7 @@ Improve services and commit flow: stop AiDoc, use silent docker inspect, sync po
- Add local Claude settings file (.claude/settings.local.json) with development permissions - Add local Claude settings file (.claude/settings.local.json) with development permissions
## 2025-08-16 - 1.18.0 - feat(services) ## 2025-08-16 - 1.18.0 - feat(services)
Add Docker port mapping sync and reconfigure workflow for local services Add Docker port mapping sync and reconfigure workflow for local services
- Add getPortMappings to DockerContainer to extract port bindings from docker inspect output - Add getPortMappings to DockerContainer to extract port bindings from docker inspect output
@@ -307,6 +529,7 @@ Add Docker port mapping sync and reconfigure workflow for local services
- Add .claude/settings.local.json (local permissions config) to repository - Add .claude/settings.local.json (local permissions config) to repository
## 2025-08-15 - 1.17.5 - fix(services) ## 2025-08-15 - 1.17.5 - fix(services)
Update S3 credentials naming and add S3_ENDPOINT/S3_USESSL support for improved MinIO integration Update S3 credentials naming and add S3_ENDPOINT/S3_USESSL support for improved MinIO integration
- Replaced S3_USER/S3_PASS with S3_ACCESSKEY/S3_SECRETKEY in ServiceConfiguration - Replaced S3_USER/S3_PASS with S3_ACCESSKEY/S3_SECRETKEY in ServiceConfiguration
@@ -316,6 +539,7 @@ Update S3 credentials naming and add S3_ENDPOINT/S3_USESSL support for improved
- Added .claude/settings.local.json for local permission settings - Added .claude/settings.local.json for local permission settings
## 2025-08-15 - 1.17.4 - fix(services) ## 2025-08-15 - 1.17.4 - fix(services)
Update S3 credentials naming and add S3_ENDPOINT/S3_USESSL support for improved MinIO integration Update S3 credentials naming and add S3_ENDPOINT/S3_USESSL support for improved MinIO integration
- Replaced S3_USER/S3_PASS with S3_ACCESSKEY/S3_SECRETKEY in ServiceConfiguration - Replaced S3_USER/S3_PASS with S3_ACCESSKEY/S3_SECRETKEY in ServiceConfiguration
@@ -324,12 +548,14 @@ Update S3 credentials naming and add S3_ENDPOINT/S3_USESSL support for improved
- Updated ServiceManager to use new credential names in container setup and logging - Updated ServiceManager to use new credential names in container setup and logging
## 2025-08-15 - 1.17.3 - fix(serviceconfig) ## 2025-08-15 - 1.17.3 - fix(serviceconfig)
Update service configuration to include dynamic MongoDB connection string and add local permissions settings Update service configuration to include dynamic MongoDB connection string and add local permissions settings
- Added .claude/settings.local.json for local permissions configuration - Added .claude/settings.local.json for local permissions configuration
- Updated ServiceConfiguration to compute and update MONGODB_URL based on current config values - Updated ServiceConfiguration to compute and update MONGODB_URL based on current config values
## 2025-08-15 - 1.17.2 - fix(ci-test-services) ## 2025-08-15 - 1.17.2 - fix(ci-test-services)
Update CI/CD configurations, test settings, and Docker service for MongoDB. Update CI/CD configurations, test settings, and Docker service for MongoDB.
- Add .claude/settings.local.json with updated permission settings - Add .claude/settings.local.json with updated permission settings
@@ -338,6 +564,7 @@ Update CI/CD configurations, test settings, and Docker service for MongoDB.
- Fix MongoDB Docker container command by adding '--bind_ip_all' for proper network binding - Fix MongoDB Docker container command by adding '--bind_ip_all' for proper network binding
## 2025-08-15 - 1.17.1 - fix(services) ## 2025-08-15 - 1.17.1 - fix(services)
Improve services module logging and enhance MongoDB Compass integration Improve services module logging and enhance MongoDB Compass integration
- Refactored services module to use centralized logger from gitzone.logging.ts - Refactored services module to use centralized logger from gitzone.logging.ts
@@ -346,6 +573,7 @@ Improve services module logging and enhance MongoDB Compass integration
- Consistent logging across all service commands - Consistent logging across all service commands
## 2025-08-14 - 1.17.0 - feat(services) ## 2025-08-14 - 1.17.0 - feat(services)
Add comprehensive development services management for MongoDB and MinIO containers Add comprehensive development services management for MongoDB and MinIO containers
- Implemented `gitzone services` command for managing local development services - Implemented `gitzone services` command for managing local development services
@@ -359,6 +587,7 @@ Add comprehensive development services management for MongoDB and MinIO containe
- Interactive confirmations for destructive operations - Interactive confirmations for destructive operations
## 2025-08-08 - 1.16.10 - fix(format) ## 2025-08-08 - 1.16.10 - fix(format)
Improve concurrency control in caching and rollback modules, refine gitignore custom section handling, and enhance Prettier file processing. Improve concurrency control in caching and rollback modules, refine gitignore custom section handling, and enhance Prettier file processing.
- Added mutex locking in ChangeCache and RollbackManager to prevent race conditions during manifest updates - Added mutex locking in ChangeCache and RollbackManager to prevent race conditions during manifest updates
+1 -1
View File
@@ -1,4 +1,4 @@
Copyright (c) 2015 Task Venture Capital GmbH (hello@lossless.com) Copyright (c) 2015 Task Venture Capital GmbH (hello@task.vc)
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the "Software"), to deal
+32 -38
View File
@@ -1,9 +1,9 @@
{ {
"name": "@git.zone/cli", "name": "@git.zone/cli",
"private": false, "private": false,
"version": "2.10.0", "version": "2.16.1",
"description": "A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.", "description": "A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.",
"main": "dist_ts/index.ts", "main": "dist_ts/index.js",
"typings": "dist_ts/index.d.ts", "typings": "dist_ts/index.d.ts",
"type": "module", "type": "module",
"bin": { "bin": {
@@ -11,21 +11,21 @@
"gzone": "./cli.js" "gzone": "./cli.js"
}, },
"scripts": { "scripts": {
"test": "(npm run clean && npm run prepareTest && npm run testCli && npm run testFormat && npm run testCommit && npm run testDeprecate && npm run testVersion && npm run testReadme && npm run testUpdate && npm run testTemplateNpm && npm run testTemplateLit) && rm -rf test", "test": "(pnpm run clean && pnpm run prepareTest && pnpm run testCli && pnpm run testFormat && pnpm run testCommit && pnpm run testDeprecate && pnpm run testVersion && pnpm run testReadme && pnpm run testUpdate && pnpm run testTemplateNpm && pnpm run testTemplateLit) && rm -rf .nogit/test",
"build": "tsbuild tsfolders", "build": "tsbuild tsfolders",
"clean": "(rm -rf test/)", "clean": "(rm -rf .nogit/test/)",
"prepareTest": "(git clone https://gitlab.com/sandboxzone/sandbox-npmts.git test/)", "prepareTest": "(mkdir -p .nogit && git clone https://gitlab.com/sandboxzone/sandbox-npmts.git .nogit/test/)",
"testBuild": "npm run build && rm -r dist/", "testBuild": "pnpm run build && rm -r dist/",
"testCli": "(cd test && node ../cli.ts.js)", "testCli": "(cd .nogit/test && node ../../cli.ts.js)",
"testCommit": "(cd test && node ../cli.ts.js commit)", "testCommit": "(cd .nogit/test && node ../../cli.ts.js commit)",
"testDeprecate": "(cd test && node ../cli.ts.js deprecate)", "testDeprecate": "(cd .nogit/test && node ../../cli.ts.js deprecate)",
"testOpen": "(cd test && node ../cli.ts.js open ci)", "testOpen": "(cd .nogit/test && node ../../cli.ts.js open ci)",
"testReadme": "(cd test && node ../cli.ts.js readme)", "testReadme": "(cd .nogit/test && node ../../cli.ts.js readme)",
"testFormat": "(cd test && node ../cli.ts.js format)", "testFormat": "(cd .nogit/test && node ../../cli.ts.js format)",
"testTemplateNpm": "(rm -rf test/testtemplate_npm/ && mkdir test/testtemplate_npm && cd test/testtemplate_npm && node ../../cli.ts.js template npm)", "testTemplateNpm": "(rm -rf .nogit/test/testtemplate_npm/ && mkdir -p .nogit/test/testtemplate_npm && cd .nogit/test/testtemplate_npm && node ../../../cli.ts.js template npm)",
"testTemplateLit": "(rm -rf test/testtemplate_lit/ && mkdir test/testtemplate_lit && cd test/testtemplate_lit && node ../../cli.ts.js template lit)", "testTemplateLit": "(rm -rf .nogit/test/testtemplate_lit/ && mkdir -p .nogit/test/testtemplate_lit && cd .nogit/test/testtemplate_lit && node ../../../cli.ts.js template lit)",
"testUpdate": "(cd test && node ../cli.ts.js update)", "testUpdate": "(cd .nogit/test && node ../../cli.ts.js update)",
"testVersion": "(cd test && node ../cli.ts.js -v)", "testVersion": "(cd .nogit/test && node ../../cli.ts.js -v)",
"buildDocs": "tsdoc" "buildDocs": "tsdoc"
}, },
"repository": { "repository": {
@@ -57,46 +57,40 @@
}, },
"homepage": "https://gitlab.com/gitzone/private/gitzone#readme", "homepage": "https://gitlab.com/gitzone/private/gitzone#readme",
"devDependencies": { "devDependencies": {
"@git.zone/tsbuild": "^4.0.2", "@git.zone/tsbuild": "^4.3.0",
"@git.zone/tsrun": "^2.0.1", "@git.zone/tsrun": "^2.0.1",
"@git.zone/tstest": "^3.1.3", "@git.zone/tstest": "^3.3.2",
"@push.rocks/smartdelay": "^3.0.5", "@types/node": "^25.4.0"
"@push.rocks/smartinteract": "^2.0.16",
"@push.rocks/smartnetwork": "^4.4.0",
"@push.rocks/smartshell": "^3.3.0",
"@types/node": "^25.0.2"
}, },
"dependencies": { "dependencies": {
"@git.zone/tsdoc": "^1.11.3", "@git.zone/tsdoc": "^2.0.0",
"@git.zone/tspublish": "^1.10.3", "@git.zone/tspublish": "^1.11.2",
"@push.rocks/commitinfo": "^1.0.12", "@push.rocks/commitinfo": "^1.0.12",
"@push.rocks/early": "^4.0.4", "@push.rocks/early": "^4.0.4",
"@push.rocks/gulp-function": "^3.0.7",
"@push.rocks/lik": "^6.2.2",
"@push.rocks/npmextra": "^5.3.3",
"@push.rocks/projectinfo": "^5.0.2", "@push.rocks/projectinfo": "^5.0.2",
"@push.rocks/smartcli": "^4.0.19", "@push.rocks/smartcli": "^4.0.20",
"@push.rocks/smartconfig": "^6.0.1",
"@push.rocks/smartdelay": "^3.0.5",
"@push.rocks/smartdiff": "^1.1.0", "@push.rocks/smartdiff": "^1.1.0",
"@push.rocks/smartfile": "^13.1.2", "@push.rocks/smartfile": "^13.1.2",
"@push.rocks/smartfs": "^1.2.0", "@push.rocks/smartfs": "^1.5.0",
"@push.rocks/smartgulp": "^3.0.4", "@push.rocks/smartinteract": "^2.0.16",
"@push.rocks/smartjson": "^6.0.0", "@push.rocks/smartjson": "^6.0.0",
"@push.rocks/smartlegal": "^1.0.27", "@push.rocks/smartlegal": "^1.0.27",
"@push.rocks/smartlog": "^3.1.10", "@push.rocks/smartlog": "^3.2.1",
"@push.rocks/smartlog-destination-local": "^9.0.2", "@push.rocks/smartlog-destination-local": "^9.0.2",
"@push.rocks/smartmustache": "^3.0.2", "@push.rocks/smartmustache": "^3.0.2",
"@push.rocks/smartnetwork": "^4.4.0",
"@push.rocks/smartnpm": "^2.0.6", "@push.rocks/smartnpm": "^2.0.6",
"@push.rocks/smartobject": "^1.0.12", "@push.rocks/smartobject": "^1.0.12",
"@push.rocks/smartopen": "^2.0.0", "@push.rocks/smartopen": "^2.0.0",
"@push.rocks/smartpath": "^6.0.0", "@push.rocks/smartpath": "^6.0.0",
"@push.rocks/smartpromise": "^4.2.3", "@push.rocks/smartpromise": "^4.2.3",
"@push.rocks/smartscaf": "^4.0.19", "@push.rocks/smartscaf": "^4.0.21",
"@push.rocks/smartstream": "^3.2.5", "@push.rocks/smartshell": "^3.3.7",
"@push.rocks/smartunique": "^3.0.9", "@push.rocks/smartunique": "^3.0.9",
"@push.rocks/smartupdate": "^2.0.6", "@push.rocks/smartupdate": "^2.0.6",
"@types/through2": "^2.0.41", "prettier": "^3.8.1"
"prettier": "^3.7.4",
"through2": "^4.0.2"
}, },
"files": [ "files": [
"ts/**/*", "ts/**/*",
@@ -107,7 +101,7 @@
"dist_ts_web/**/*", "dist_ts_web/**/*",
"assets/**/*", "assets/**/*",
"cli.js", "cli.js",
"npmextra.json", ".smartconfig.json",
"readme.md" "readme.md"
], ],
"browserslist": [ "browserslist": [
+865 -1611
View File
File diff suppressed because it is too large Load Diff
+52 -53
View File
@@ -23,10 +23,10 @@ Gitzone CLI (`@git.zone/cli`) is a comprehensive toolbelt for streamlining local
### Configuration Management ### Configuration Management
- Uses `npmextra.json` for all tool configuration - Uses `.smartconfig.json` for tool configuration
- Configuration stored under `gitzone` key in npmextra - CLI settings live under the `@git.zone/cli` namespace
- No separate `.gitzonerc` file - everything in npmextra.json - Agent and non-interactive defaults now belong under `@git.zone/cli.cli`
- Project type and module metadata also stored in npmextra - Project type, module metadata, release settings, commit defaults, and format settings live in the same file
### Format Module (`mod_format`) - SIGNIFICANTLY ENHANCED ### Format Module (`mod_format`) - SIGNIFICANTLY ENHANCED
@@ -38,7 +38,7 @@ The format module is responsible for project standardization:
2. **copy** - File copying with glob patterns (fully implemented) 2. **copy** - File copying with glob patterns (fully implemented)
3. **gitignore** - Creates/updates .gitignore from templates 3. **gitignore** - Creates/updates .gitignore from templates
4. **license** - Checks dependency licenses for compatibility 4. **license** - Checks dependency licenses for compatibility
5. **npmextra** - Manages project metadata and configuration 5. **smartconfig** - Manages project metadata and configuration
6. **packagejson** - Formats and updates package.json 6. **packagejson** - Formats and updates package.json
7. **prettier** - Applies code formatting with batching 7. **prettier** - Applies code formatting with batching
8. **readme** - Ensures readme files exist 8. **readme** - Ensures readme files exist
@@ -84,42 +84,44 @@ The format module is responsible for project standardization:
1. **Plan → Action Workflow**: Shows changes before applying them 1. **Plan → Action Workflow**: Shows changes before applying them
2. **Rollback Mechanism**: Full backup and restore on failures 2. **Rollback Mechanism**: Full backup and restore on failures
3. **Enhanced Configuration**: Granular control via npmextra.json 3. **Enhanced Configuration**: Granular control via `.smartconfig.json`
4. **Better Error Handling**: Detailed errors with recovery options 4. **Better Error Handling**: Detailed errors with recovery options
5. **Performance Optimizations**: Parallel execution and caching 5. **Performance Optimizations**: Parallel execution and caching
6. **Reporting**: Diff views, statistics, verbose logging 6. **Reporting**: Diff views, statistics, verbose logging
7. **Architecture**: Clean separation of concerns with new classes 7. **Architecture**: Clean separation of concerns with new classes
8. **Unified Version Bumping**: Self-managed version updates eliminating npm warning pollution in deno.json 8. **Split Commit/Release Workflows**: `commit` creates source commits; `release` owns versioning, tags, and artifact publishing
### Version Bumping Refactor (Latest) ### Commit/Release Workflow Refactor (Latest)
The commit module's version bumping has been refactored to eliminate npm command dependencies: The commit module no longer bumps versions, creates tags, or publishes packages. Release work now belongs to `gitzone release`:
**Changes:** **Changes:**
- Removed `bumpNpmVersion()` - was causing npm warnings to pollute deno.json
- Removed `syncVersionToDenoJson()` - no longer needed with unified approach - `gitzone commit` analyzes changes, updates `changelog.md` `Pending`, commits, and optionally pushes.
- Removed separate `bumpDenoVersion()` - replaced by unified implementation - `gitzone release` reads `Pending`, bumps versions, moves changelog entries into a version section, tags, pushes, and publishes configured artifacts.
- Added `readCurrentVersion()` helper - reads from either package.json or deno.json - Commit workflow steps are configured in `.smartconfig.json` under `@git.zone/cli.commit.steps`.
- Added `updateVersionFile()` helper - updates JSON files directly - Smartconfig schema versioning lives at `@git.zone/cli.schemaVersion`; run `gitzone config migrate <version>` for targeted migrations.
- Unified `bumpProjectVersion()` - handles npm/deno/both with single clean code path - Release publishing is target-based under `@git.zone/cli.release.targets`.
- NPM registries only live under `@git.zone/cli.release.targets.npm.registries`.
**Benefits:** **Benefits:**
- No npm warning pollution in version fields
- Full control over version bumping process - Commit is safer and has no publishing side effects.
- Simpler git history (no amending, no force-tagging) - Multiple source commits can accumulate into one release via `Pending`.
- Same code path for all project types - Per-artifact release results can distinguish published, already-published, skipped, and failed targets.
- Reuses existing `calculateNewVersion()` function
### Auto-Accept Flag for Commits ### Auto-Accept Flag for Commits
The commit module now supports `-y/--yes` flag for non-interactive commits: The commit module now supports `-y/--yes` flag for non-interactive commits:
**Usage:** **Usage:**
- `gitzone commit -y` - Auto-accepts AI recommendations without prompts - `gitzone commit -y` - Auto-accepts AI recommendations without prompts
- `gitzone commit -yp` - Auto-accepts and pushes to origin - `gitzone commit -yp` - Auto-accepts and pushes to origin
- Separate `-p/--push` flag controls push behavior - Separate `-p/--push` flag controls push behavior
**Implementation:** **Implementation:**
- Creates AnswerBucket programmatically when `-y` flag detected - Creates AnswerBucket programmatically when `-y` flag detected
- Preserves all UI output for transparency - Preserves all UI output for transparency
- Fully backward compatible with interactive mode - Fully backward compatible with interactive mode
@@ -128,7 +130,7 @@ The commit module now supports `-y/--yes` flag for non-interactive commits:
## Development Tips ## Development Tips
- Always check readme.plan.md for ongoing improvement plans - Always check readme.plan.md for ongoing improvement plans
- Use npmextra.json for any new configuration options - Use `.smartconfig.json` for any new configuration options
- Keep modules focused and single-purpose - Keep modules focused and single-purpose
- Maintain the existing plugin pattern for dependencies - Maintain the existing plugin pattern for dependencies
- Test format operations on sample projects before deploying - Test format operations on sample projects before deploying
@@ -140,30 +142,18 @@ The commit module now supports `-y/--yes` flag for non-interactive commits:
```json ```json
{ {
"gitzone": { "@git.zone/cli": {
"cli": {
"interactive": true,
"output": "human",
"checkUpdates": true
},
"format": { "format": {
"interactive": true, "interactive": true,
"parallel": true,
"showStats": true, "showStats": true,
"cache": {
"enabled": true,
"clean": true
},
"rollback": {
"enabled": true,
"autoRollbackOnError": true,
"backupRetentionDays": 7
},
"modules": { "modules": {
"skip": ["prettier"], "skip": ["prettier"],
"only": [], "only": []
"order": []
},
"licenses": {
"allowed": ["MIT", "Apache-2.0"],
"exceptions": {
"some-package": "GPL-3.0"
}
} }
} }
} }
@@ -178,6 +168,9 @@ The commit module now supports `-y/--yes` flag for non-interactive commits:
# Interactive commit (default) # Interactive commit (default)
gitzone commit gitzone commit
# Read-only recommendation
gitzone commit recommend --json
# Auto-accept AI recommendations (no prompts) # Auto-accept AI recommendations (no prompts)
gitzone commit -y gitzone commit -y
gitzone commit --yes gitzone commit --yes
@@ -197,11 +190,14 @@ gitzone commit --format
# Basic format # Basic format
gitzone format gitzone format
# Read-only JSON plan
gitzone format plan --json
# Dry run to preview changes # Dry run to preview changes
gitzone format --dry-run gitzone format --dry-run
# Non-interactive mode # Non-interactive apply
gitzone format --yes gitzone format --write --yes
# Plan only (no execution) # Plan only (no execution)
gitzone format --plan-only gitzone format --plan-only
@@ -218,11 +214,10 @@ gitzone format --verbose
# Detailed diff views # Detailed diff views
gitzone format --detailed gitzone format --detailed
# Rollback operations # Inspect config for agents and scripts
gitzone format --rollback gitzone config show --json
gitzone format --rollback <operation-id> gitzone config set cli.output json
gitzone format --list-backups gitzone config get release.targets.npm.accessLevel
gitzone format --clean-backups
``` ```
## Common Issues (Now Resolved) ## Common Issues (Now Resolved)
@@ -248,10 +243,12 @@ gitzone format --clean-backups
The project has been fully migrated from @push.rocks/smartfile v11 to v13, which introduced a major breaking change where filesystem operations were split into two separate packages: The project has been fully migrated from @push.rocks/smartfile v11 to v13, which introduced a major breaking change where filesystem operations were split into two separate packages:
**Packages:** **Packages:**
- `@push.rocks/smartfile` v13.0.1 - File representation classes (SmartFile, StreamFile, VirtualDirectory) - `@push.rocks/smartfile` v13.0.1 - File representation classes (SmartFile, StreamFile, VirtualDirectory)
- `@push.rocks/smartfs` v1.1.0 - Filesystem operations (read, write, exists, stat, etc.) - `@push.rocks/smartfs` v1.1.0 - Filesystem operations (read, write, exists, stat, etc.)
**Key API Changes:** **Key API Changes:**
1. **File Reading**: 1. **File Reading**:
- Old: `plugins.smartfile.fs.toStringSync(path)` or `plugins.smartfile.fs.toObjectSync(path)` - Old: `plugins.smartfile.fs.toStringSync(path)` or `plugins.smartfile.fs.toObjectSync(path)`
- New: `await plugins.smartfs.file(path).encoding('utf8').read()` + JSON.parse if needed - New: `await plugins.smartfs.file(path).encoding('utf8').read()` + JSON.parse if needed
@@ -290,13 +287,15 @@ The project has been fully migrated from @push.rocks/smartfile v11 to v13, which
All sync methods must become async. Functions that were previously synchronous (like `getProjectName()`) now return `Promise<T>` and must be awaited. All sync methods must become async. Functions that were previously synchronous (like `getProjectName()`) now return `Promise<T>` and must be awaited.
**Affected Modules:** **Affected Modules:**
- ts/mod_format/* (largest area - 15+ files)
- ts/mod_commit/* (version bumping) - ts/mod_format/\* (largest area - 15+ files)
- ts/mod_services/* (configuration management) - ts/mod_commit/\* and ts/mod_release/\* (commit/release workflows)
- ts/mod_meta/* (meta repository management) - ts/mod_services/\* (configuration management)
- ts/mod_standard/* (template listing) - ts/mod_meta/\* (meta repository management)
- ts/mod_template/* (template operations) - ts/mod_standard/\* (template listing)
- ts/mod_template/\* (template operations)
**Previous API Changes:** **Previous API Changes:**
- smartnpm requires instance creation: `new NpmRegistry()` - smartnpm requires instance creation: `new NpmRegistry()`
- Type imports use `import type` for proper verbatim module syntax - Type imports use `import type` for proper verbatim module syntax
+302 -431
View File
@@ -1,539 +1,410 @@
# @git.zone/cli 🚀 # @git.zone/cli 🚀
**The ultimate CLI toolbelt for modern TypeScript development workflows** `@git.zone/cli` is the development workflow CLI behind the `gitzone` and `gzone` commands. It helps TypeScript-heavy teams keep projects tidy, create semantic source commits, manage local Docker-backed services, scaffold new modules, and release software through explicit, target-based release configuration.
[![npm version](https://img.shields.io/npm/v/@git.zone/cli.svg)](https://www.npmjs.com/package/@git.zone/cli) It is opinionated where that saves time: source commits and releases are separate, changelog entries flow through a standard `Pending` section, project config lives in `.smartconfig.json`, and release targets make side effects visible before they happen.
[![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT)
## 🎯 What is gitzone?
gitzone is a powerful command-line interface that supercharges your development workflow with automated project management, intelligent code formatting, seamless version control, and development service orchestration. Whether you're bootstrapping a new TypeScript project, maintaining code quality, managing complex multi-repository setups, or spinning up local development databases, gitzone has got you covered.
## Issue Reporting and Security ## Issue Reporting and Security
For reporting bugs, issues, or security vulnerabilities, please visit [community.foss.global/](https://community.foss.global/). This is the central community hub for all issue reporting. Developers who sign and comply with our contribution agreement and go through identification can also get a [code.foss.global/](https://code.foss.global/) account to submit Pull Requests directly. For reporting bugs, issues, or security vulnerabilities, please visit [community.foss.global/](https://community.foss.global/). This is the central community hub for all issue reporting. Developers who sign and comply with our contribution agreement and go through identification can also get a [code.foss.global/](https://code.foss.global/) account to submit Pull Requests directly.
## 🏃‍♂️ Quick Start ## Install
### Installation
```bash ```bash
# Install globally via npm
npm install -g @git.zone/cli
# Or with pnpm (recommended)
pnpm add -g @git.zone/cli pnpm add -g @git.zone/cli
``` ```
Once installed, you can use either `gitzone` or the shorter `gzone` command from anywhere in your terminal. After installation, both binaries point to the same CLI:
### Your First Commands
```bash ```bash
# Create a new TypeScript npm package gitzone --help
gitzone template npm gzone --help
```
# Format your entire codebase ## The Big Idea
`gitzone commit` handles source history.
`gitzone release` handles release transactions.
That split is intentional. A commit should not unexpectedly publish npm packages, push Docker images, or trigger remote release pipelines. A release should clearly show which targets it will publish to.
## Quick Start
```bash
# Preview project standardization work
gitzone format gitzone format
# Start local MongoDB and MinIO services # Apply formatting changes
gitzone services start gitzone format --write
# Create a semantic commit with AI-powered suggestions # Create a semantic source commit
gitzone commit
```
## 🛠️ Core Features
### 🐳 Development Services Management
Effortlessly manage local MongoDB and MinIO (S3-compatible) services for your development environment:
```bash
gitzone services [command]
```
**Available commands:**
- **`start [service]`** - Start services (mongo|s3|all)
- **`stop [service]`** - Stop services (mongo|s3|all)
- **`restart [service]`** - Restart services
- **`status`** - Show current service status
- **`config`** - Display configuration details
- **`compass`** - Get MongoDB Compass connection string with network IP
- **`logs [service] [lines]`** - View service logs
- **`remove`** - Remove containers (preserves data)
- **`clean`** - Remove containers AND data (⚠️ destructive)
**Key features:**
- 🎲 **Smart port assignment** - Automatically assigns random ports (20000-30000) to avoid conflicts
- 📦 **Project isolation** - Each project gets its own containers with unique names
- 💾 **Data persistence** - Data stored in `.nogit/` directories survives container restarts
- 🔗 **MongoDB Compass support** - Instantly get connection strings for GUI access
- 🌐 **Network IP detection** - Automatically detects your local network IP for remote connections
- ⚙️ **Auto-configuration** - Creates `.nogit/env.json` with smart defaults
**Example workflow:**
```bash
# Start all services for your project
gitzone services start
# Check what's running
gitzone services status
# Get MongoDB Compass connection string
gitzone services compass
# Output: mongodb://defaultadmin:defaultpass@192.168.1.100:27018/myproject?authSource=admin
# View MongoDB logs
gitzone services logs mongo 50
# Stop services when done
gitzone services stop
```
The services are configured via `.nogit/env.json` which is automatically created with secure defaults and random ports for each project.
### 📦 Project Templates
Instantly scaffold production-ready projects with best practices built-in:
```bash
gitzone template [template-name]
```
**Available templates:**
- **`npm`** - TypeScript npm package with testing, CI/CD, and full tooling
- **`service`** - Microservice architecture with Docker support
- **`website`** - Modern web application with LitElement and service workers
- **`wcc`** - Web Component Collection for reusable UI components
Each template comes pre-configured with:
- ✅ TypeScript with modern configurations
- ✅ Automated testing setup with `@git.zone/tstest`
- ✅ CI/CD pipelines (GitLab/GitHub)
- ✅ Code formatting and linting
- ✅ Documentation structure
### 🎨 Intelligent Code Formatting
The most powerful feature of gitzone - automatically format and standardize your entire codebase:
```bash
# Preview changes without applying them
gitzone format --dry-run
# Format with automatic approval
gitzone format --yes
# Save formatting plan for later execution
gitzone format --save-plan format-plan.json
# Execute a saved plan
gitzone format --from-plan format-plan.json
# Enable verbose output for debugging
gitzone format --verbose
```
**Format features:**
- 🔄 **Smart caching** - Only processes changed files
- 🛡️ **Rollback support** - Undo formatting changes if needed
- 📊 **Detailed reporting** - See exactly what changed
-**Parallel execution** - Format multiple files simultaneously
- 🎯 **Module-specific formatting** - Target specific formatters
**Rollback capabilities:**
```bash
# List all available backups
gitzone format --list-backups
# Rollback to the last operation
gitzone format --rollback
# Rollback to a specific operation
gitzone format --rollback [operation-id]
# Clean old backups
gitzone format --clean-backups
```
**Formatters included:**
- **Prettier** - JavaScript/TypeScript code formatting
- **License** - Ensure proper licensing
- **Package.json** - Standardize package configurations
- **Tsconfig** - TypeScript configuration optimization
- **Readme** - Documentation formatting
- **Gitignore** - Repository ignore rules
- **Templates** - Project template updates
- **Npmextra** - Extended npm configurations
- **Cleanup** - Removes obsolete files (yarn.lock, package-lock.json, tslint.json, etc.)
### 🔀 Semantic Commits & Versioning
Create standardized commits with AI-powered suggestions that automatically handle versioning:
```bash
# Interactive commit with AI recommendations
gitzone commit gitzone commit
# Auto-accept AI recommendations # Preview the configured release transaction
gitzone release --plan
# Release pending changelog entries to configured targets
gitzone release
```
## Commands
| Command | Purpose |
| --- | --- |
| `commit` | Analyze changes and create one semantic source commit |
| `release` | Turn pending changelog entries into a versioned release and publish targets |
| `format` | Plan or apply project formatting and standardization |
| `config` | Inspect, update, and migrate `.smartconfig.json` |
| `services` | Manage local MongoDB, MinIO, and Elasticsearch containers |
| `tools` | Manage the global `@git.zone` toolchain |
| `template` | Scaffold projects from built-in templates |
| `meta` | Manage multi-repository workspaces |
| `open` | Open repository assets like CI pages |
| `docker` | Run Docker maintenance tasks |
| `deprecate` | Deprecate npm packages across registries |
| `start` | Prepare an existing project for local work |
| `helpers` | Run small helper utilities |
Global flags include `--help`, `--json`, `--plain`, `--agent`, `--no-interactive`, and `--no-check-updates`.
## Toolchain Management
`gitzone tools` replaces the former `gtools` command from `@git.zone/tools`. It manages globally installed `@git.zone` development tools through pnpm.
```bash
# Check installed @git.zone tools and update outdated packages
gitzone tools update
# Update without prompts
gitzone tools update -y
# Install missing managed @git.zone tools
gitzone tools install
```
`gitzone tools update` checks `@git.zone/cli` first. If the CLI itself needs an update, it updates `@git.zone/cli` and asks you to rerun the command before updating the rest of the toolchain.
## Commit Workflow
`gitzone commit` creates one semantic source commit. It does not bump versions, create tags, publish packages, or push Docker images.
```bash
# Interactive semantic commit
gitzone commit
# Read-only AI recommendation
gitzone commit recommend --json
# Auto-accept safe recommendations
gitzone commit -y gitzone commit -y
# Auto-accept and push # Auto-accept, test, build, and push
gitzone commit -y -p gitzone commit -ytbp
# Show the resolved workflow without mutating anything
gitzone commit --plan
``` ```
Features: The commit flow:
- 🤖 **AI-powered analysis** - Analyzes your changes and suggests commit type, scope, and message 1. Analyze the working tree.
- 📝 Interactive commit message builder with smart defaults 2. Suggest commit type, scope, and message.
- 🏷️ Automatic version bumping (major/minor/patch) 3. Write a human-readable entry into `changelog.md` under `## Pending`.
- 📜 Changelog generation 4. Stage and create one semantic source commit.
- 🚀 Optional auto-push to origin 5. Optionally run formatting, tests, build, and push based on flags or config.
- 🎯 Conventional commit compliance
The commit wizard guides you through: Commit flags:
1. **Type selection** (fix/feat/BREAKING CHANGE) with AI recommendation | Flag | Meaning |
2. **Scope definition** (component/module affected) | --- | --- |
3. **Description crafting** | `-y`, `--yes` | Auto-accept safe recommendations |
4. **Version bump determination** | `-t`, `--test` | Add test step |
| `-b`, `--build` | Add build step |
| `-p`, `--push` | Push after the source commit |
| `-f`, `--format` | Run `gitzone format --write` before commit |
| `--plan` | Show resolved workflow only |
### 🏗️ Meta Repository Management `-r` is intentionally not part of commit anymore. Use `gitzone release`.
Manage multiple related repositories as a cohesive unit: ## Release Workflow
`gitzone release` performs the release core once, then publishes to configured targets.
The release core is not configurable plumbing. It always follows the same professional release transaction:
1. Run configured preflight checks.
2. Read `changelog.md` `## Pending` entries.
3. Infer or accept a semver bump.
4. Update version files and baked commit info.
5. Move pending changelog entries into the new version section.
6. Create the local release commit.
7. Create the local release tag.
Targets decide what happens after that:
| Target | What it does |
| --- | --- |
| `git` | Pushes the release commit and tags, often triggering remote CI release builds |
| `npm` | Publishes the package to configured npm registries |
| `docker` | Builds and pushes configured Docker images |
```bash ```bash
# Initialize a meta repository # Preview the resolved release plan
gitzone meta init gitzone release --plan
# Add a sub-project # Release to configured targets
gitzone meta add [name] [git-url] gitzone release
# Update all sub-projects # Release only to npm
gitzone meta update gitzone release --target npm
# Remove a sub-project # Release only to git and Docker
gitzone meta remove [name] gitzone release --target git,docker
# Skip package/container publishing and keep only git target
gitzone release --no-publish
# Override inferred semver level
gitzone release --minor
``` ```
Perfect for: Release flags:
- Monorepo management | Flag | Meaning |
- Multi-package projects | --- | --- |
- Coordinated deployments | `-y`, `--yes` | Run without interactive confirmation |
- Synchronized versioning | `-t`, `--test` | Enable preflight tests |
| `-b`, `--build` | Enable preflight build |
| `-p`, `--push` | Enable the `git` target |
| `--target <csv>` | Use only selected targets, e.g. `git,npm` |
| `--npm` | Enable the `npm` target |
| `--docker` | Enable the `docker` target |
| `--no-publish` | Keep release core and `git` target only |
| `--no-build` | Disable preflight build for this run |
| `--major`, `--minor`, `--patch` | Override inferred semver level |
| `--plan` | Show resolved workflow only |
### 🐳 Docker Management ## Standard Changelog
Streamline your Docker workflow: The changelog is convention-based and intentionally not configured.
```bash `gitzone commit` appends entries to:
# Clean up all Docker resources
gitzone docker prune ```markdown
## Pending
``` ```
This command removes: `gitzone release` moves those pending entries into a dated version section:
- Stopped containers ```markdown
- Unused images ## 2026-05-10 - 2.15.0
- Dangling volumes
- Unused networks
### 🔗 Quick CI/CD Access
Jump directly to your CI/CD configurations:
```bash
# Open CI/CD settings
gitzone open ci
# Open pipelines view
gitzone open pipelines
``` ```
Works with GitLab repositories to provide instant access to your deployment configurations. The standard buckets are `Breaking Changes`, `Features`, `Fixes`, `Documentation`, and `Maintenance`.
### 📝 Package Deprecation ## Configuration
Smoothly transition users from old to new packages: All CLI config lives under `@git.zone/cli` in `.smartconfig.json`.
```bash
gitzone deprecate
```
Interactive wizard for:
- Setting deprecation notices
- Guiding users to replacements
- Updating registry metadata
- Coordinating migration paths
### 🚦 Project Initialization
Prepare existing projects for development:
```bash
gitzone start
```
Automatically:
- Checks out master branch
- Pulls latest changes
- Installs dependencies
- Sets up development environment
### 🔧 Helper Utilities
Quick utilities for common tasks:
```bash
# Generate a unique short ID
gitzone helpers shortid
```
## 📋 Configuration
### npmextra.json Configuration
Customize gitzone behavior through `npmextra.json`:
```json ```json
{ {
"gitzone": { "@git.zone/cli": {
"format": { "schemaVersion": 2,
"interactive": true, "projectType": "npm",
"showDiffs": false, "commit": {
"autoApprove": false, "confirmation": "prompt",
"parallel": true, "steps": ["analyze", "test", "build", "changelog", "commit", "push"]
"rollback": { },
"enabled": true, "release": {
"autoRollbackOnError": true, "confirmation": "prompt",
"backupRetentionDays": 7 "preflight": {
"requireCleanTree": true,
"test": false,
"build": true
}, },
"modules": { "targets": {
"skip": ["prettier"], "git": {
"only": [], "enabled": true,
"order": [] "remote": "origin",
}, "pushBranch": true,
"cache": { "pushTags": true
"enabled": true, },
"clean": true "npm": {
"enabled": true,
"registries": ["https://registry.npmjs.org"],
"accessLevel": "public",
"alreadyPublished": "success"
},
"docker": {
"enabled": false,
"images": []
}
} }
} }
} }
} }
``` ```
### Environment Variables NPM registries belong only here:
- `CI` - Detect CI environment for automated workflows ```text
- `DEBUG` - Enable debug output @git.zone/cli.release.targets.npm.registries
- `GITZONE_FORMAT_PARALLEL` - Control parallel formatting ```
## 🏆 Best Practices Useful config commands:
### For New Projects
1. Start with a template: `gitzone template npm`
2. Set up local services: `gitzone services start`
3. Customize the generated structure
4. Run initial format: `gitzone format`
5. Set up CI/CD: `gitzone open ci`
### For Existing Projects
1. Initialize: `gitzone start`
2. Format codebase: `gitzone format --dry-run` (preview first!)
3. Apply formatting: `gitzone format --yes`
4. Set up services: `gitzone services start`
5. Commit changes: `gitzone commit`
### For Teams
1. Document format preferences in `npmextra.json`
2. Share `.nogit/env.json` template for consistent service setup
3. Use `--save-plan` for reviewable format changes
4. Enable rollback for safety
5. Standardize commit conventions
## 🎯 Common Workflows
### Full-Stack Development Cycle
```bash ```bash
# 1. Start fresh # Show current @git.zone/cli config
gitzone start gitzone config show --json
# 2. Spin up databases and services # Read the npm release target registries
gitzone services start gitzone config get release.targets.npm.registries
# 3. Make changes # Add an npm release target registry
# ... your development work ... gitzone config add https://registry.npmjs.org
# 4. Check service logs if needed # Set npm target access level
gitzone services logs mongo gitzone config access public
# 5. Format code # Run schema migration to v2
gitzone config migrate 2
```
## Formatting
`gitzone format` is dry-run by default. That makes it safe to run in any repo.
```bash
# Preview changes
gitzone format gitzone format
# 6. Commit with semantic versioning # Emit a machine-readable plan
gitzone commit gitzone format plan --json
# 7. Stop services when done # Apply changes
gitzone services stop gitzone format --write
# Apply without prompt
gitzone format --write --yes
``` ```
### Multi-Repository Management Formatters include cleanup, smartconfig normalization, dependency license checks, package metadata normalization, template updates, `.gitignore`, TypeScript config, Prettier, README existence checks, and configured copy operations.
## Development Services
`gitzone services` manages local Docker-backed services for development projects.
Supported services:
| Service | Aliases |
| --- | --- |
| MongoDB | `mongo`, `mongodb` |
| MinIO | `minio`, `s3` |
| Elasticsearch | `elasticsearch`, `es` |
```bash ```bash
# 1. Set up meta repository # Start configured services
gitzone meta init
# 2. Add all related projects
gitzone meta add frontend https://github.com/org/frontend.git
gitzone meta add backend https://github.com/org/backend.git
gitzone meta add shared https://github.com/org/shared.git
# 3. Synchronize updates
gitzone meta update
```
### Safe Formatting with Rollback
```bash
# 1. Preview changes
gitzone format --dry-run
# 2. Save plan for review
gitzone format --save-plan format-changes.json
# 3. Apply formatting
gitzone format --from-plan format-changes.json
# 4. If something goes wrong, rollback
gitzone format --rollback
```
### Database-Driven Development
```bash
# 1. Start MongoDB and MinIO
gitzone services start gitzone services start
# 2. Get connection string for your app # Enable specific services non-interactively
gitzone services config gitzone services set mongodb,minio
# 3. Connect with MongoDB Compass # Check status
gitzone services compass
# 4. Monitor services
gitzone services status gitzone services status
# 5. Clean everything when done # Print MongoDB Compass connection string
gitzone services clean # ⚠️ Warning: deletes data gitzone services compass
# Show logs
gitzone services logs mongo 50
# Stop containers but keep data
gitzone services stop
# Remove containers and data
gitzone services clean
``` ```
## 🔌 Integrations Service config is stored in `.nogit/env.json`. Data is stored below `.nogit/`, so it stays out of Git.
### CI/CD Platforms ## Templates
- **GitLab CI** - Full pipeline support with templates Start new projects with built-in scaffolds:
- **GitHub Actions** - Automated workflows
- **Docker** - Container-based deployments
### Development Tools ```bash
gitzone template npm
gitzone template service
gitzone template website
gitzone template wcc
```
- **TypeScript** - First-class support Templates are rendered through SmartScaf and then can be normalized with `gitzone format`.
- **Prettier** - Code formatting
- **npm/pnpm** - Package management
- **MongoDB** - Local database service
- **MinIO** - S3-compatible object storage
- **MongoDB Compass** - Database GUI integration
### Version Control ## Meta Repositories
- **Git** - Deep integration Use `gitzone meta` when one workspace coordinates multiple repositories.
- **Semantic Versioning** - Automatic version bumping
- **Conventional Commits** - Standardized commit messages
## 💡 Pro Tips ```bash
gitzone meta init
gitzone meta add frontend https://example.com/org/frontend.git
gitzone meta update
gitzone meta remove frontend
```
1. **Use aliases**: Add `alias gz='gitzone'` to your shell profile ## Other Utilities
2. **Combine commands**: `gitzone format --yes && gitzone commit`
3. **Leverage templates**: Start projects right with proven structures
4. **Enable caching**: Dramatically speeds up formatting operations
5. **Save format plans**: Review changes before applying in production
6. **Port management**: Let services auto-assign ports to avoid conflicts
7. **Use MongoDB Compass**: `gitzone services compass` for visual DB management
## 🐛 Troubleshooting ```bash
# Docker cleanup
gitzone docker prune
### Format Command Shows "Cancelled" # Open GitLab CI settings or pipelines for the current repo
gitzone open ci
gitzone open pipelines
If the format command shows cancelled even after confirming: # Deprecate an old npm package interactively
gitzone deprecate
- Check your `npmextra.json` configuration # Prepare a project for local work
- Try with `--yes` flag to skip confirmation gitzone start
- Use `--verbose` for detailed output
### Docker Commands Fail # Generate a short unique ID
gitzone helpers shortid
```
Ensure Docker daemon is running: ## Troubleshooting
Format only previews changes:
```bash
gitzone format --write
```
Release says there is nothing to release:
```bash
# Make sure commits have populated the Pending changelog section
gitzone commit
```
Docker services fail to start:
```bash ```bash
docker info docker info
gitzone services status
gitzone services reconfigure
``` ```
### Services Won't Start Config looks outdated:
Check for port conflicts:
```bash ```bash
# Services auto-assign ports, but you can check the config gitzone config migrate 2
cat .nogit/env.json gitzone config show --json
# Verify Docker is running
docker ps
``` ```
### Template Creation Issues
Verify npm/pnpm is properly configured:
```bash
npm config get registry
```
### MongoDB Connection Issues
- Ensure services are running: `gitzone services status`
- Check firewall settings for the assigned ports
- Use `gitzone services compass` for the correct connection string
## 📈 Performance
gitzone is optimized for speed:
- **Parallel processing** for format operations
- **Smart caching** to avoid redundant work
- **Incremental updates** for meta repositories
- **Minimal dependencies** for fast installation
- **Isolated services** prevent resource conflicts
- **Auto port assignment** eliminates manual configuration
## License and Legal Information ## License and Legal Information
This repository contains open-source code licensed under the MIT License. A copy of the license can be found in the [LICENSE](./LICENSE) file. This repository contains open-source code licensed under the MIT License. A copy of the license can be found in the [LICENSE](./license) file.
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file. **Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
+11
View File
@@ -1,11 +1,13 @@
# GitZone Services Command Implementation Plan # GitZone Services Command Implementation Plan
## Overview ## Overview
Implement the `gitzone services` command to manage MongoDB and MinIO containers for development projects. Implement the `gitzone services` command to manage MongoDB and MinIO containers for development projects.
## Tasks ## Tasks
### Module Structure Setup ### Module Structure Setup
- [x] Create `ts/mod_services/` directory - [x] Create `ts/mod_services/` directory
- [x] Create `mod.plugins.ts` with required imports - [x] Create `mod.plugins.ts` with required imports
- [x] Create `helpers.ts` with utility functions - [x] Create `helpers.ts` with utility functions
@@ -15,6 +17,7 @@ Implement the `gitzone services` command to manage MongoDB and MinIO containers
- [x] Create `index.ts` with main command logic - [x] Create `index.ts` with main command logic
### Core Functionality ### Core Functionality
- [x] Implement ServiceConfiguration class - [x] Implement ServiceConfiguration class
- [x] Load/create `.nogit/env.json` configuration - [x] Load/create `.nogit/env.json` configuration
- [x] Generate random available ports (20000-30000 range) - [x] Generate random available ports (20000-30000 range)
@@ -37,6 +40,7 @@ Implement the `gitzone services` command to manage MongoDB and MinIO containers
- [x] Generate MongoDB Compass connection strings - [x] Generate MongoDB Compass connection strings
### Commands Implementation ### Commands Implementation
- [x] `start` command - Start services (mongo|s3|all) - [x] `start` command - Start services (mongo|s3|all)
- [x] `stop` command - Stop services (mongo|s3|all) - [x] `stop` command - Stop services (mongo|s3|all)
- [x] `restart` command - Restart services (mongo|s3|all) - [x] `restart` command - Restart services (mongo|s3|all)
@@ -48,12 +52,14 @@ Implement the `gitzone services` command to manage MongoDB and MinIO containers
- [x] `clean` command - Remove containers and data - [x] `clean` command - Remove containers and data
### Integration ### Integration
- [x] Add `@push.rocks/smartshell` to main plugins.ts - [x] Add `@push.rocks/smartshell` to main plugins.ts
- [x] Add `@push.rocks/smartnetwork` to main plugins.ts - [x] Add `@push.rocks/smartnetwork` to main plugins.ts
- [x] Add `@push.rocks/smartinteraction` to main plugins.ts - [x] Add `@push.rocks/smartinteraction` to main plugins.ts
- [x] Register services command in `gitzone.cli.ts` - [x] Register services command in `gitzone.cli.ts`
### Features ### Features
- [x] Auto-configuration with smart defaults - [x] Auto-configuration with smart defaults
- [x] Random port assignment to avoid conflicts - [x] Random port assignment to avoid conflicts
- [x] Project isolation with unique container names - [x] Project isolation with unique container names
@@ -65,6 +71,7 @@ Implement the `gitzone services` command to manage MongoDB and MinIO containers
- [x] MongoDB Compass connection string with network IP - [x] MongoDB Compass connection string with network IP
### Testing ### Testing
- [ ] Test service start/stop operations - [ ] Test service start/stop operations
- [ ] Test configuration creation and updates - [ ] Test configuration creation and updates
- [ ] Test port collision handling - [ ] Test port collision handling
@@ -73,6 +80,7 @@ Implement the `gitzone services` command to manage MongoDB and MinIO containers
- [ ] Test all command variations - [ ] Test all command variations
## Configuration Format ## Configuration Format
```json ```json
{ {
"PROJECT_NAME": "derived-from-package-name", "PROJECT_NAME": "derived-from-package-name",
@@ -91,6 +99,7 @@ Implement the `gitzone services` command to manage MongoDB and MinIO containers
``` ```
## Command Examples ## Command Examples
```bash ```bash
gitzone services start # Start all services gitzone services start # Start all services
gitzone services start mongo # Start only MongoDB gitzone services start mongo # Start only MongoDB
@@ -104,10 +113,12 @@ gitzone services clean # Remove containers and data
``` ```
## Progress Notes ## Progress Notes
Implementation started: 2025-08-14 Implementation started: 2025-08-14
Implementation completed: 2025-08-14 Implementation completed: 2025-08-14
## Summary ## Summary
Successfully implemented the `gitzone services` command in TypeScript, providing a complete replacement for the `services.sh` shell script. The implementation includes: Successfully implemented the `gitzone services` command in TypeScript, providing a complete replacement for the `services.sh` shell script. The implementation includes:
1. **Complete Docker service management** for MongoDB and MinIO containers 1. **Complete Docker service management** for MongoDB and MinIO containers
Submodule
+1
Submodule test added at 0b89443584
+1 -1
View File
@@ -3,6 +3,6 @@
*/ */
export const commitinfo = { export const commitinfo = {
name: '@git.zone/cli', name: '@git.zone/cli',
version: '2.10.0', version: '2.16.1',
description: 'A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.' description: 'A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.'
} }
+3 -3
View File
@@ -38,11 +38,11 @@ export class GitzoneConfig {
public data: IGitzoneConfigData; public data: IGitzoneConfigData;
public async readConfigFromCwd() { public async readConfigFromCwd() {
const npmextraInstance = new plugins.npmextra.Npmextra(paths.cwd); const smartconfigInstance = new plugins.smartconfig.Smartconfig(paths.cwd);
this.data = npmextraInstance.dataFor<IGitzoneConfigData>('@git.zone/cli', {}); this.data = smartconfigInstance.dataFor<IGitzoneConfigData>('@git.zone/cli', {});
// Read szci config for backward compatibility // Read szci config for backward compatibility
const szciConfig = npmextraInstance.dataFor<any>('@ship.zone/szci', {}); const szciConfig = smartconfigInstance.dataFor<any>('@ship.zone/szci', {});
// Prefer accessLevel from @git.zone/cli.release, fallback to @ship.zone/szci.npmAccessLevel // Prefer accessLevel from @git.zone/cli.release, fallback to @ship.zone/szci.npmAccessLevel
const accessLevel = const accessLevel =
+77 -56
View File
@@ -1,24 +1,40 @@
import * as plugins from './plugins.js'; import * as plugins from "./plugins.js";
import * as paths from './paths.js'; import * as paths from "./paths.js";
import { GitzoneConfig } from './classes.gitzoneconfig.js'; import { GitzoneConfig } from "./classes.gitzoneconfig.js";
import { getRawCliMode } from "./helpers.climode.js";
import { commitinfo } from "./00_commitinfo_data.js";
const gitzoneSmartcli = new plugins.smartcli.Smartcli(); const gitzoneSmartcli = new plugins.smartcli.Smartcli();
export let run = async () => { export let run = async () => {
const done = plugins.smartpromise.defer(); const done = plugins.smartpromise.defer();
const rawCliMode = await getRawCliMode();
// get packageInfo // get packageInfo
const projectInfo = new plugins.projectinfo.ProjectInfo(paths.packageDir); const projectInfo = new plugins.projectinfo.ProjectInfo(paths.packageDir);
const projectInfoVersion = (projectInfo.npm as any)?.version;
const packageVersion =
typeof projectInfoVersion === "string" && projectInfoVersion.length > 0
? projectInfoVersion
: commitinfo.version;
// check for updates // check for updates
const smartupdateInstance = new plugins.smartupdate.SmartUpdate(); if (rawCliMode.checkUpdates) {
await smartupdateInstance.check( const smartupdateInstance = new plugins.smartupdate.SmartUpdate();
'gitzone', try {
projectInfo.npm.version, await smartupdateInstance.check(
'http://gitzone.gitlab.io/gitzone/changelog.html', "gitzone",
); packageVersion,
console.log('---------------------------------------------'); "http://gitzone.gitlab.io/gitzone/changelog.html",
gitzoneSmartcli.addVersion(projectInfo.npm.version); );
} catch {
// Update checks must never block actual CLI commands.
}
}
if (rawCliMode.output === "human") {
console.log("---------------------------------------------");
}
gitzoneSmartcli.addVersion(packageVersion);
// ======> Standard task <====== // ======> Standard task <======
@@ -26,8 +42,13 @@ export let run = async () => {
* standard task * standard task
*/ */
gitzoneSmartcli.standardCommand().subscribe(async (argvArg) => { gitzoneSmartcli.standardCommand().subscribe(async (argvArg) => {
const modStandard = await import('./mod_standard/index.js'); const modStandard = await import("./mod_standard/index.js");
await modStandard.run(); await modStandard.run(argvArg);
});
gitzoneSmartcli.addCommand("help").subscribe(async (argvArg) => {
const modStandard = await import("./mod_standard/index.js");
await modStandard.run(argvArg);
}); });
// ======> Specific tasks <====== // ======> Specific tasks <======
@@ -35,62 +56,54 @@ export let run = async () => {
/** /**
* commit something * commit something
*/ */
gitzoneSmartcli.addCommand('commit').subscribe(async (argvArg) => { gitzoneSmartcli.addCommand("commit").subscribe(async (argvArg) => {
const modCommit = await import('./mod_commit/index.js'); const modCommit = await import("./mod_commit/index.js");
await modCommit.run(argvArg); await modCommit.run(argvArg);
}); });
/**
* create a release from pending changelog entries
*/
gitzoneSmartcli.addCommand("release").subscribe(async (argvArg) => {
const modRelease = await import("./mod_release/index.js");
await modRelease.run(argvArg);
});
/** /**
* deprecate a package on npm * deprecate a package on npm
*/ */
gitzoneSmartcli.addCommand('deprecate').subscribe(async (argvArg) => { gitzoneSmartcli.addCommand("deprecate").subscribe(async (argvArg) => {
const modDeprecate = await import('./mod_deprecate/index.js'); const modDeprecate = await import("./mod_deprecate/index.js");
await modDeprecate.run(); await modDeprecate.run();
}); });
/** /**
* docker * docker
*/ */
gitzoneSmartcli.addCommand('docker').subscribe(async (argvArg) => { gitzoneSmartcli.addCommand("docker").subscribe(async (argvArg) => {
const modDocker = await import('./mod_docker/index.js'); const modDocker = await import("./mod_docker/index.js");
await modDocker.run(argvArg); await modDocker.run(argvArg);
}); });
/** /**
* Update all files that comply with the gitzone standard * Update all files that comply with the gitzone standard
*/ */
gitzoneSmartcli.addCommand('format').subscribe(async (argvArg) => { gitzoneSmartcli.addCommand("format").subscribe(async (argvArg) => {
const config = GitzoneConfig.fromCwd(); const config = GitzoneConfig.fromCwd();
const modFormat = await import('./mod_format/index.js'); const modFormat = await import("./mod_format/index.js");
// Handle rollback commands
if (argvArg.rollback) {
await modFormat.handleRollback(argvArg.rollback);
return;
}
if (argvArg['list-backups']) {
await modFormat.handleListBackups();
return;
}
if (argvArg['clean-backups']) {
await modFormat.handleCleanBackups();
return;
}
// Handle format with options // Handle format with options
// Default is dry-mode, use --write/-w to apply changes // Default is dry-mode, use --write/-w to apply changes
await modFormat.run({ await modFormat.run({
...argvArg,
write: argvArg.write || argvArg.w, write: argvArg.write || argvArg.w,
dryRun: argvArg['dry-run'], dryRun: argvArg["dry-run"],
yes: argvArg.yes, yes: argvArg.yes,
planOnly: argvArg['plan-only'], planOnly: argvArg["plan-only"],
savePlan: argvArg['save-plan'], savePlan: argvArg["save-plan"],
fromPlan: argvArg['from-plan'], fromPlan: argvArg["from-plan"],
detailed: argvArg.detailed, detailed: argvArg.detailed,
interactive: argvArg.interactive !== false, interactive: argvArg.interactive !== false,
parallel: argvArg.parallel !== false,
verbose: argvArg.verbose, verbose: argvArg.verbose,
diff: argvArg.diff, diff: argvArg.diff,
}); });
@@ -99,54 +112,62 @@ export let run = async () => {
/** /**
* run meta commands * run meta commands
*/ */
gitzoneSmartcli.addCommand('meta').subscribe(async (argvArg) => { gitzoneSmartcli.addCommand("meta").subscribe(async (argvArg) => {
const config = GitzoneConfig.fromCwd(); const config = GitzoneConfig.fromCwd();
const modMeta = await import('./mod_meta/index.js'); const modMeta = await import("./mod_meta/index.js");
modMeta.run(argvArg); modMeta.run(argvArg);
}); });
/** /**
* open assets * open assets
*/ */
gitzoneSmartcli.addCommand('open').subscribe(async (argvArg) => { gitzoneSmartcli.addCommand("open").subscribe(async (argvArg) => {
const modOpen = await import('./mod_open/index.js'); const modOpen = await import("./mod_open/index.js");
modOpen.run(argvArg); modOpen.run(argvArg);
}); });
/** /**
* add a readme to a project * add a readme to a project
*/ */
gitzoneSmartcli.addCommand('template').subscribe(async (argvArg) => { gitzoneSmartcli.addCommand("template").subscribe(async (argvArg) => {
const modTemplate = await import('./mod_template/index.js'); const modTemplate = await import("./mod_template/index.js");
modTemplate.run(argvArg); modTemplate.run(argvArg);
}); });
/** /**
* start working on a project * start working on a project
*/ */
gitzoneSmartcli.addCommand('start').subscribe(async (argvArg) => { gitzoneSmartcli.addCommand("start").subscribe(async (argvArg) => {
const modTemplate = await import('./mod_start/index.js'); const modTemplate = await import("./mod_start/index.js");
modTemplate.run(argvArg); modTemplate.run(argvArg);
}); });
gitzoneSmartcli.addCommand('helpers').subscribe(async (argvArg) => { gitzoneSmartcli.addCommand("helpers").subscribe(async (argvArg) => {
const modHelpers = await import('./mod_helpers/index.js'); const modHelpers = await import("./mod_helpers/index.js");
modHelpers.run(argvArg); modHelpers.run(argvArg);
}); });
/**
* manage the global @git.zone toolchain
*/
gitzoneSmartcli.addCommand("tools").subscribe(async (argvArg) => {
const modTools = await import("./mod_tools/index.js");
await modTools.run(argvArg);
});
/** /**
* manage release configuration * manage release configuration
*/ */
gitzoneSmartcli.addCommand('config').subscribe(async (argvArg) => { gitzoneSmartcli.addCommand("config").subscribe(async (argvArg) => {
const modConfig = await import('./mod_config/index.js'); const modConfig = await import("./mod_config/index.js");
await modConfig.run(argvArg); await modConfig.run(argvArg);
}); });
/** /**
* manage development services (MongoDB, S3/MinIO) * manage development services (MongoDB, S3/MinIO)
*/ */
gitzoneSmartcli.addCommand('services').subscribe(async (argvArg) => { gitzoneSmartcli.addCommand("services").subscribe(async (argvArg) => {
const modServices = await import('./mod_services/index.js'); const modServices = await import("./mod_services/index.js");
await modServices.run(argvArg); await modServices.run(argvArg);
}); });
+165
View File
@@ -0,0 +1,165 @@
import * as plugins from "./plugins.js";
export type TChangelogBucket =
| "Breaking Changes"
| "Features"
| "Fixes"
| "Documentation"
| "Maintenance";
export interface IChangelogEntry {
type: string;
scope: string;
message: string;
details?: string[];
}
export interface IPendingChangelog {
block: string;
isEmpty: boolean;
}
const bucketForCommitType = (commitType: string): TChangelogBucket => {
switch (commitType) {
case "BREAKING CHANGE":
return "Breaking Changes";
case "feat":
return "Features";
case "fix":
return "Fixes";
case "docs":
return "Documentation";
default:
return "Maintenance";
}
};
const readChangelog = async (filePath: string): Promise<string> => {
if (!(await plugins.smartfs.file(filePath).exists())) {
return "# Changelog\n\n";
}
return (await plugins.smartfs.file(filePath).encoding("utf8").read()) as string;
};
const writeChangelog = async (filePath: string, content: string): Promise<void> => {
await plugins.smartfs.file(filePath).encoding("utf8").write(content.endsWith("\n") ? content : `${content}\n`);
};
const findPendingSection = (
content: string,
sectionName: string,
): { start: number; bodyStart: number; end: number } | null => {
const headingRegex = new RegExp(`^##\\s+${sectionName.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}\\s*$`, "m");
const match = headingRegex.exec(content);
if (!match || match.index === undefined) {
return null;
}
const bodyStart = match.index + match[0].length;
const rest = content.slice(bodyStart);
const nextHeadingMatch = /^##\s+/m.exec(rest);
const end = nextHeadingMatch ? bodyStart + nextHeadingMatch.index : content.length;
return { start: match.index, bodyStart, end };
};
export const ensurePendingSection = async (
filePath: string,
sectionName = "Pending",
): Promise<string> => {
let content = await readChangelog(filePath);
if (findPendingSection(content, sectionName)) {
return content;
}
const pendingSection = `## ${sectionName}\n\n`;
const titleMatch = /^#\s+.+$/m.exec(content);
if (titleMatch && titleMatch.index !== undefined) {
const insertAt = titleMatch.index + titleMatch[0].length;
content = `${content.slice(0, insertAt)}\n\n${pendingSection}${content.slice(insertAt).replace(/^\n+/, "")}`;
} else {
content = `# Changelog\n\n${pendingSection}${content}`;
}
await writeChangelog(filePath, content);
return content;
};
export const appendPendingChangelogEntry = async (
filePath: string,
sectionName: string,
entry: IChangelogEntry,
): Promise<void> => {
let content = await ensurePendingSection(filePath, sectionName);
const pendingSection = findPendingSection(content, sectionName)!;
let pendingBody = content.slice(pendingSection.bodyStart, pendingSection.end);
const bucket = bucketForCommitType(entry.type);
const bucketHeading = `### ${bucket}`;
const entryLines = [`- ${entry.message}${entry.scope ? ` (${entry.scope})` : ""}`];
for (const detail of entry.details || []) {
entryLines.push(` - ${detail}`);
}
const renderedEntry = entryLines.join("\n");
const bucketRegex = new RegExp(`^###\\s+${bucket.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")}\\s*$`, "m");
const bucketMatch = bucketRegex.exec(pendingBody);
if (!bucketMatch || bucketMatch.index === undefined) {
pendingBody = `${pendingBody.trimEnd()}\n\n${bucketHeading}\n\n${renderedEntry}\n`;
} else {
const bucketBodyStart = bucketMatch.index + bucketMatch[0].length;
const afterBucket = pendingBody.slice(bucketBodyStart);
const nextBucketMatch = /^###\s+/m.exec(afterBucket);
const insertAt = nextBucketMatch ? bucketBodyStart + nextBucketMatch.index : pendingBody.length;
const beforeInsert = pendingBody.slice(0, insertAt).trimEnd();
const afterInsert = pendingBody.slice(insertAt).replace(/^\n+/, "");
pendingBody = `${beforeInsert}\n${renderedEntry}\n\n${afterInsert}`;
}
content = `${content.slice(0, pendingSection.bodyStart)}\n${pendingBody.trim()}\n\n${content.slice(pendingSection.end).replace(/^\n+/, "")}`;
await writeChangelog(filePath, content);
};
export const readPendingChangelog = async (
filePath: string,
sectionName = "Pending",
): Promise<IPendingChangelog> => {
const content = await ensurePendingSection(filePath, sectionName);
const pendingSection = findPendingSection(content, sectionName)!;
const block = content.slice(pendingSection.bodyStart, pendingSection.end).trim();
return {
block,
isEmpty: block.length === 0,
};
};
export const inferVersionTypeFromPending = (pendingBlock: string): "patch" | "minor" | "major" => {
if (/^###\s+Breaking Changes\s*$/m.test(pendingBlock)) {
return "major";
}
if (/^###\s+Features\s*$/m.test(pendingBlock)) {
return "minor";
}
return "patch";
};
export const movePendingToVersion = async (
filePath: string,
sectionName: string,
versionHeading: string,
version: string,
dateString: string,
): Promise<void> => {
let content = await ensurePendingSection(filePath, sectionName);
const pendingSection = findPendingSection(content, sectionName)!;
const pendingBlock = content.slice(pendingSection.bodyStart, pendingSection.end).trim();
if (!pendingBlock) {
throw new Error("No pending changelog entries. Nothing to release.");
}
const renderedHeading = versionHeading
.replaceAll("{{version}}", version)
.replaceAll("{{date}}", dateString);
const nextContent = content.slice(pendingSection.end).replace(/^\n+/, "");
content = `${content.slice(0, pendingSection.bodyStart)}\n\n${renderedHeading}\n\n${pendingBlock}\n\n${nextContent}`;
await writeChangelog(filePath, content);
};
+212
View File
@@ -0,0 +1,212 @@
import { getCliConfigValue } from "./helpers.smartconfig.js";
export type TCliOutputMode = "human" | "plain" | "json";
export interface ICliMode {
output: TCliOutputMode;
interactive: boolean;
json: boolean;
plain: boolean;
quiet: boolean;
yes: boolean;
help: boolean;
agent: boolean;
checkUpdates: boolean;
isTty: boolean;
command?: string;
}
interface ICliConfigSettings {
interactive?: boolean;
output?: TCliOutputMode;
checkUpdates?: boolean;
}
type TArgSource = Record<string, any> & { _?: string[] };
const camelCase = (value: string): string => {
return value.replace(/-([a-z])/g, (_match, group: string) =>
group.toUpperCase(),
);
};
const getArgValue = (argvArg: TArgSource, key: string): any => {
const keyVariants = [key, camelCase(key), key.replace(/-/g, "")];
for (const keyVariant of keyVariants) {
if (argvArg[keyVariant] !== undefined) {
return argvArg[keyVariant];
}
}
return undefined;
};
const parseRawArgv = (argv: string[]): TArgSource => {
const parsedArgv: TArgSource = { _: [] };
for (let i = 0; i < argv.length; i++) {
const currentArg = argv[i];
if (currentArg.startsWith("--no-")) {
const key = currentArg.slice(5);
parsedArgv[key] = false;
parsedArgv[camelCase(key)] = false;
continue;
}
if (currentArg.startsWith("--")) {
const withoutPrefix = currentArg.slice(2);
const [rawKey, inlineValue] = withoutPrefix.split("=", 2);
if (inlineValue !== undefined) {
parsedArgv[rawKey] = inlineValue;
parsedArgv[camelCase(rawKey)] = inlineValue;
continue;
}
const nextArg = argv[i + 1];
if (nextArg && !nextArg.startsWith("-")) {
parsedArgv[rawKey] = nextArg;
parsedArgv[camelCase(rawKey)] = nextArg;
i++;
} else {
parsedArgv[rawKey] = true;
parsedArgv[camelCase(rawKey)] = true;
}
continue;
}
if (currentArg.startsWith("-") && currentArg.length > 1) {
for (const shortFlag of currentArg.slice(1).split("")) {
parsedArgv[shortFlag] = true;
}
continue;
}
parsedArgv._ = parsedArgv._ || [];
parsedArgv._.push(currentArg);
}
return parsedArgv;
};
const normalizeOutputMode = (value: unknown): TCliOutputMode | undefined => {
if (value === "human" || value === "plain" || value === "json") {
return value;
}
return undefined;
};
const resolveCliMode = (
argvArg: TArgSource,
cliConfig: ICliConfigSettings,
): ICliMode => {
const isTty = Boolean(process.stdout?.isTTY && process.stdin?.isTTY);
const agentMode = Boolean(getArgValue(argvArg, "agent"));
const outputOverride = normalizeOutputMode(getArgValue(argvArg, "output"));
let output: TCliOutputMode =
normalizeOutputMode(cliConfig.output) || (isTty ? "human" : "plain");
if (agentMode || getArgValue(argvArg, "json")) {
output = "json";
} else if (getArgValue(argvArg, "plain")) {
output = "plain";
} else if (outputOverride) {
output = outputOverride;
}
const interactiveSetting = getArgValue(argvArg, "interactive");
let interactive = cliConfig.interactive ?? isTty;
if (interactiveSetting === true) {
interactive = true;
} else if (interactiveSetting === false) {
interactive = false;
}
if (!isTty || output !== "human" || agentMode) {
interactive = false;
}
const checkUpdatesSetting = getArgValue(argvArg, "check-updates");
let checkUpdates = cliConfig.checkUpdates ?? output === "human";
if (checkUpdatesSetting === true) {
checkUpdates = true;
} else if (checkUpdatesSetting === false) {
checkUpdates = false;
}
if (output !== "human" || agentMode) {
checkUpdates = false;
}
return {
output,
interactive,
json: output === "json",
plain: output === "plain",
quiet: Boolean(
getArgValue(argvArg, "quiet") ||
getArgValue(argvArg, "q") ||
output === "json",
),
yes: Boolean(getArgValue(argvArg, "yes") || getArgValue(argvArg, "y")),
help: Boolean(
getArgValue(argvArg, "help") ||
getArgValue(argvArg, "h") ||
argvArg._?.[0] === "help",
),
agent: agentMode,
checkUpdates,
isTty,
command: argvArg._?.[0],
};
};
const getCliModeConfig = async (): Promise<ICliConfigSettings> => {
return await getCliConfigValue<ICliConfigSettings>("cli", {});
};
export const getCliMode = async (
argvArg: TArgSource = {},
): Promise<ICliMode> => {
const cliConfig = await getCliModeConfig();
return resolveCliMode(argvArg, cliConfig);
};
export const getRawCliMode = async (): Promise<ICliMode> => {
const cliConfig = await getCliModeConfig();
const rawArgv = parseRawArgv(process.argv.slice(2));
return resolveCliMode(rawArgv, cliConfig);
};
export const printJson = (data: unknown): void => {
console.log(JSON.stringify(data, null, 2));
};
export const runWithSuppressedOutput = async <T>(
fn: () => Promise<T>,
): Promise<T> => {
const originalConsole = {
log: console.log,
info: console.info,
warn: console.warn,
error: console.error,
};
const originalStdoutWrite = process.stdout.write.bind(process.stdout);
const originalStderrWrite = process.stderr.write.bind(process.stderr);
const noop = () => undefined;
console.log = noop;
console.info = noop;
console.warn = noop;
console.error = noop;
process.stdout.write = (() => true) as typeof process.stdout.write;
process.stderr.write = (() => true) as typeof process.stderr.write;
try {
return await fn();
} finally {
console.log = originalConsole.log;
console.info = originalConsole.info;
console.warn = originalConsole.warn;
console.error = originalConsole.error;
process.stdout.write = originalStdoutWrite;
process.stderr.write = originalStderrWrite;
}
};
+192
View File
@@ -0,0 +1,192 @@
import * as plugins from "./plugins.js";
import { rename, writeFile } from "fs/promises";
export const CLI_NAMESPACE = "@git.zone/cli";
const isPlainObject = (value: unknown): value is Record<string, any> => {
return typeof value === "object" && value !== null && !Array.isArray(value);
};
export const getSmartconfigPath = (cwd: string = process.cwd()): string => {
return plugins.path.join(cwd, ".smartconfig.json");
};
export const readSmartconfigFile = async (
cwd: string = process.cwd(),
): Promise<Record<string, any>> => {
const smartconfigPath = getSmartconfigPath(cwd);
if (!(await plugins.smartfs.file(smartconfigPath).exists())) {
return {};
}
const content = (await plugins.smartfs
.file(smartconfigPath)
.encoding("utf8")
.read()) as string;
if (content.trim() === "") {
return {};
}
return JSON.parse(content);
};
export const writeSmartconfigFile = async (
data: Record<string, any>,
cwd: string = process.cwd(),
): Promise<void> => {
const smartconfigPath = getSmartconfigPath(cwd);
const tempPath = `${smartconfigPath}.tmp-${Date.now()}`;
const content = JSON.stringify(data, null, 2);
await writeFile(tempPath, content, "utf8");
await rename(tempPath, smartconfigPath);
};
export const normalizeCliConfigPath = (configPath: string): string => {
const trimmedPath = configPath.trim();
if (!trimmedPath || trimmedPath === CLI_NAMESPACE) {
return "";
}
if (trimmedPath.startsWith(`${CLI_NAMESPACE}.`)) {
return trimmedPath.slice(`${CLI_NAMESPACE}.`.length);
}
return trimmedPath;
};
export const getCliConfigPathSegments = (configPath: string): string[] => {
const normalizedPath = normalizeCliConfigPath(configPath);
if (!normalizedPath) {
return [];
}
return normalizedPath
.split(".")
.map((segment) => segment.trim())
.filter(Boolean);
};
export const getCliNamespaceConfig = (
smartconfigData: Record<string, any>,
): Record<string, any> => {
const cliConfig = smartconfigData[CLI_NAMESPACE];
if (isPlainObject(cliConfig)) {
return cliConfig;
}
return {};
};
export const getCliConfigValueFromData = (
smartconfigData: Record<string, any>,
configPath: string,
): any => {
const segments = getCliConfigPathSegments(configPath);
let currentValue: any = getCliNamespaceConfig(smartconfigData);
for (const segment of segments) {
if (!isPlainObject(currentValue) && !Array.isArray(currentValue)) {
return undefined;
}
currentValue = (currentValue as any)?.[segment];
}
return currentValue;
};
export const getCliConfigValue = async <T>(
configPath: string,
defaultValue: T,
cwd: string = process.cwd(),
): Promise<T> => {
const smartconfigData = await readSmartconfigFile(cwd);
const configValue = getCliConfigValueFromData(smartconfigData, configPath);
if (configValue === undefined) {
return defaultValue;
}
if (isPlainObject(defaultValue) && isPlainObject(configValue)) {
return {
...defaultValue,
...configValue,
} as T;
}
return configValue as T;
};
export const setCliConfigValueInData = (
smartconfigData: Record<string, any>,
configPath: string,
value: any,
): Record<string, any> => {
const segments = getCliConfigPathSegments(configPath);
if (!isPlainObject(smartconfigData[CLI_NAMESPACE])) {
smartconfigData[CLI_NAMESPACE] = {};
}
if (segments.length === 0) {
smartconfigData[CLI_NAMESPACE] = value;
return smartconfigData;
}
let currentValue = smartconfigData[CLI_NAMESPACE];
for (const segment of segments.slice(0, -1)) {
if (!isPlainObject(currentValue[segment])) {
currentValue[segment] = {};
}
currentValue = currentValue[segment];
}
currentValue[segments[segments.length - 1]] = value;
return smartconfigData;
};
export const unsetCliConfigValueInData = (
smartconfigData: Record<string, any>,
configPath: string,
): boolean => {
const segments = getCliConfigPathSegments(configPath);
if (segments.length === 0) {
if (smartconfigData[CLI_NAMESPACE] !== undefined) {
delete smartconfigData[CLI_NAMESPACE];
return true;
}
return false;
}
const parentSegments = segments.slice(0, -1);
let currentValue: any = getCliNamespaceConfig(smartconfigData);
const objectPath: Array<Record<string, any>> = [currentValue];
for (const segment of parentSegments) {
if (!isPlainObject(currentValue[segment])) {
return false;
}
currentValue = currentValue[segment];
objectPath.push(currentValue);
}
const lastSegment = segments[segments.length - 1];
if (!(lastSegment in currentValue)) {
return false;
}
delete currentValue[lastSegment];
for (let i = objectPath.length - 1; i >= 1; i--) {
if (Object.keys(objectPath[i]).length > 0) {
break;
}
const parentObject = objectPath[i - 1];
const parentKey = parentSegments[i - 1];
delete parentObject[parentKey];
}
if (Object.keys(getCliNamespaceConfig(smartconfigData)).length === 0) {
delete smartconfigData[CLI_NAMESPACE];
}
return true;
};
+192
View File
@@ -0,0 +1,192 @@
export const CURRENT_GITZONE_CLI_SCHEMA_VERSION = 2;
export interface ISmartconfigMigrationResult {
migrated: boolean;
fromVersion: number;
toVersion: number;
}
const CLI_NAMESPACE = "@git.zone/cli";
const isPlainObject = (value: unknown): value is Record<string, any> => {
return typeof value === "object" && value !== null && !Array.isArray(value);
};
const ensureObject = (parent: Record<string, any>, key: string): Record<string, any> => {
if (!isPlainObject(parent[key])) {
parent[key] = {};
}
return parent[key];
};
const migrateNamespaceKeys = (smartconfigJson: Record<string, any>): boolean => {
let migrated = false;
const migrations = [
{ oldKey: "gitzone", newKey: CLI_NAMESPACE },
{ oldKey: "tsdoc", newKey: "@git.zone/tsdoc" },
{ oldKey: "npmdocker", newKey: "@git.zone/tsdocker" },
{ oldKey: "npmci", newKey: "@ship.zone/szci" },
{ oldKey: "szci", newKey: "@ship.zone/szci" },
];
for (const { oldKey, newKey } of migrations) {
if (!isPlainObject(smartconfigJson[oldKey])) {
continue;
}
if (!isPlainObject(smartconfigJson[newKey])) {
smartconfigJson[newKey] = smartconfigJson[oldKey];
} else {
smartconfigJson[newKey] = {
...smartconfigJson[oldKey],
...smartconfigJson[newKey],
};
}
delete smartconfigJson[oldKey];
migrated = true;
}
return migrated;
};
const migrateToV2 = (smartconfigJson: Record<string, any>): boolean => {
const cliConfig = ensureObject(smartconfigJson, CLI_NAMESPACE);
const releaseConfig = ensureObject(cliConfig, "release");
let migrated = false;
const targets = ensureObject(releaseConfig, "targets");
const shipzoneConfig = smartconfigJson["@ship.zone/szci"];
if (isPlainObject(releaseConfig.git) && !isPlainObject(targets.git)) {
targets.git = releaseConfig.git;
delete releaseConfig.git;
migrated = true;
}
if (isPlainObject(releaseConfig.npm) && !isPlainObject(targets.npm)) {
targets.npm = releaseConfig.npm;
delete releaseConfig.npm;
migrated = true;
}
if (isPlainObject(releaseConfig.docker) && !isPlainObject(targets.docker)) {
targets.docker = releaseConfig.docker;
delete releaseConfig.docker;
migrated = true;
}
if (Array.isArray(releaseConfig.registries)) {
const npmTarget = ensureObject(targets, "npm");
if (!Array.isArray(npmTarget.registries)) {
npmTarget.registries = releaseConfig.registries;
}
delete releaseConfig.registries;
migrated = true;
}
if (releaseConfig.accessLevel) {
const npmTarget = ensureObject(targets, "npm");
if (!npmTarget.accessLevel) {
npmTarget.accessLevel = releaseConfig.accessLevel;
}
delete releaseConfig.accessLevel;
migrated = true;
}
if (isPlainObject(shipzoneConfig)) {
if (shipzoneConfig.npmAccessLevel) {
const npmTarget = ensureObject(targets, "npm");
if (!npmTarget.accessLevel) {
npmTarget.accessLevel = shipzoneConfig.npmAccessLevel;
}
delete shipzoneConfig.npmAccessLevel;
migrated = true;
}
if (shipzoneConfig.npmRegistryUrl) {
const npmTarget = ensureObject(targets, "npm");
const registry = normalizeRegistryUrl(shipzoneConfig.npmRegistryUrl);
const registries = Array.isArray(npmTarget.registries) ? npmTarget.registries : [];
if (!registries.includes(registry)) {
registries.push(registry);
}
npmTarget.registries = registries;
delete shipzoneConfig.npmRegistryUrl;
migrated = true;
}
}
if (Array.isArray(releaseConfig.steps)) {
const steps = releaseConfig.steps as string[];
const preflight = ensureObject(releaseConfig, "preflight");
if (steps.includes("test") && preflight.test === undefined) {
preflight.test = true;
}
if (steps.includes("build") && preflight.build === undefined) {
preflight.build = true;
}
if (steps.includes("push")) {
const gitTarget = ensureObject(targets, "git");
if (gitTarget.enabled === undefined) {
gitTarget.enabled = true;
}
}
if (steps.includes("publishNpm")) {
const npmTarget = ensureObject(targets, "npm");
if (npmTarget.enabled === undefined) {
npmTarget.enabled = true;
}
}
if (steps.includes("publishDocker")) {
const dockerTarget = ensureObject(targets, "docker");
if (dockerTarget.enabled === undefined) {
dockerTarget.enabled = true;
}
}
delete releaseConfig.steps;
migrated = true;
}
if (releaseConfig.changelog) {
delete releaseConfig.changelog;
migrated = true;
}
cliConfig.schemaVersion = 2;
return migrated || true;
};
const normalizeRegistryUrl = (url: string): string => {
let normalizedUrl = url.trim();
if (!normalizedUrl.startsWith("http://") && !normalizedUrl.startsWith("https://")) {
normalizedUrl = `https://${normalizedUrl}`;
}
return normalizedUrl.endsWith("/") ? normalizedUrl.slice(0, -1) : normalizedUrl;
};
export const migrateSmartconfigData = (
smartconfigJson: Record<string, any>,
targetVersion = CURRENT_GITZONE_CLI_SCHEMA_VERSION,
): ISmartconfigMigrationResult => {
let migrated = false;
migrated = migrateNamespaceKeys(smartconfigJson) || migrated;
const cliConfig = ensureObject(smartconfigJson, CLI_NAMESPACE);
const fromVersion = typeof cliConfig.schemaVersion === "number" ? cliConfig.schemaVersion : 1;
let currentVersion = fromVersion;
if (currentVersion < 2 && targetVersion >= 2) {
migrated = migrateToV2(smartconfigJson) || migrated;
currentVersion = 2;
}
if (targetVersion === CURRENT_GITZONE_CLI_SCHEMA_VERSION && cliConfig.schemaVersion !== targetVersion) {
cliConfig.schemaVersion = targetVersion;
migrated = true;
}
return {
migrated,
fromVersion,
toVersion: Math.min(targetVersion, CURRENT_GITZONE_CLI_SCHEMA_VERSION),
};
};
+387
View File
@@ -0,0 +1,387 @@
import { getCliConfigValue } from "./helpers.smartconfig.js";
export type TConfirmationMode = "prompt" | "auto" | "plan";
export type TCommitStep =
| "format"
| "analyze"
| "test"
| "build"
| "changelog"
| "commit"
| "push";
export type TReleaseTarget = "git" | "npm" | "docker";
export interface ICommitWorkflowConfig {
confirmation?: TConfirmationMode;
staging?: "all";
steps?: TCommitStep[];
alwaysTest?: boolean;
alwaysBuild?: boolean;
analyze?: {
provider?: "ai";
requireConfirmationFor?: string[];
};
test?: {
command?: string;
};
build?: {
command?: string;
verifyCleanTree?: boolean;
};
push?: {
remote?: string;
followTags?: boolean;
};
}
export interface IReleaseGitTargetConfig {
enabled?: boolean;
remote?: string;
pushBranch?: boolean;
pushTags?: boolean;
}
export interface IReleaseNpmTargetConfig {
enabled?: boolean;
registries?: string[];
accessLevel?: "public" | "private";
alreadyPublished?: "success" | "error";
}
export interface IReleaseDockerTargetConfig {
enabled?: boolean;
images?: string[];
}
export interface IReleaseWorkflowConfig {
confirmation?: TConfirmationMode;
version?: {
strategy?: "semver";
source?: "pendingChangelog" | "manual";
};
preflight?: {
requireCleanTree?: boolean;
test?: boolean;
build?: boolean;
testCommand?: string;
buildCommand?: string;
};
targets?: {
git?: IReleaseGitTargetConfig;
npm?: IReleaseNpmTargetConfig;
docker?: IReleaseDockerTargetConfig;
};
}
export interface IResolvedCommitWorkflow {
confirmation: TConfirmationMode;
steps: TCommitStep[];
staging: "all";
testCommand: string;
buildCommand: string;
changelogFile: "changelog.md";
changelogSection: "Pending";
pushRemote: string;
pushFollowTags: boolean;
releaseFlagRequested: boolean;
}
export interface IResolvedReleaseWorkflow {
confirmation: TConfirmationMode;
plan: string[];
targets: TReleaseTarget[];
requireCleanTree: boolean;
runTests: boolean;
runBuild: boolean;
testCommand: string;
buildCommand: string;
changelogFile: "changelog.md";
changelogPendingSection: "Pending";
changelogVersionHeading: "## {{date}} - {{version}}";
gitEnabled: boolean;
gitRemote: string;
pushBranch: boolean;
pushTags: boolean;
npmEnabled: boolean;
npmRegistries: string[];
npmAccessLevel: "public" | "private";
npmAlreadyPublished: "success" | "error";
dockerEnabled: boolean;
dockerImages: string[];
}
interface ICliWorkflowConfig {
commit?: ICommitWorkflowConfig;
release?: IReleaseWorkflowConfig;
}
const commitFlagToStep: Record<string, TCommitStep | undefined> = {
f: "format",
t: "test",
b: "build",
p: "push",
};
const unique = <T>(items: T[]): T[] => {
const result: T[] = [];
for (const item of items) {
if (!result.includes(item)) {
result.push(item);
}
}
return result;
};
const normalizeConfirmation = (
value: unknown,
fallback: TConfirmationMode,
): TConfirmationMode => {
if (value === "prompt" || value === "auto" || value === "plan") {
return value;
}
return fallback;
};
const normalizeRegistryUrl = (url: string): string => {
let normalizedUrl = url.trim();
if (!normalizedUrl.startsWith("http://") && !normalizedUrl.startsWith("https://")) {
normalizedUrl = `https://${normalizedUrl}`;
}
return normalizedUrl.endsWith("/") ? normalizedUrl.slice(0, -1) : normalizedUrl;
};
const isDisabled = (argvArg: any, ...keys: string[]): boolean => {
return keys.some((key) => argvArg[key] === false || argvArg[`no-${key}`] || argvArg[`no${key[0].toUpperCase()}${key.slice(1)}`]);
};
const readCliWorkflowConfig = async (): Promise<ICliWorkflowConfig> => {
return await getCliConfigValue<ICliWorkflowConfig>("", {});
};
const getOrderedArgsAfterCommand = (commandName: string): string[] => {
const rawArgs = process.argv.slice(2);
const commandIndex = rawArgs.indexOf(commandName);
if (commandIndex === -1) {
return rawArgs;
}
return rawArgs.slice(commandIndex + 1);
};
const getOrderedShortFlags = (commandName: string): string[] => {
const orderedFlags: string[] = [];
for (const arg of getOrderedArgsAfterCommand(commandName)) {
if (arg === "--") {
break;
}
if (arg.startsWith("--")) {
continue;
}
if (arg.startsWith("-") && arg.length > 1) {
orderedFlags.push(...arg.slice(1).split(""));
}
}
return orderedFlags;
};
const hasExplicitCommitWorkflowFlags = (argvArg: any): boolean => {
return Boolean(
argvArg.f ||
argvArg.format ||
argvArg.t ||
argvArg.test ||
argvArg.b ||
argvArg.build ||
argvArg.p ||
argvArg.push,
);
};
const normalizeCommitSteps = (rawSteps: TCommitStep[]): TCommitStep[] => {
const steps = unique(rawSteps.filter(Boolean));
const pushRequested = steps.includes("push");
const prePushSteps = steps.filter((step) => step !== "push");
if (!prePushSteps.includes("analyze")) {
prePushSteps.unshift("analyze");
}
if (!prePushSteps.includes("changelog")) {
const commitIndex = prePushSteps.indexOf("commit");
if (commitIndex === -1) {
prePushSteps.push("changelog");
} else {
prePushSteps.splice(commitIndex, 0, "changelog");
}
}
if (!prePushSteps.includes("commit")) {
prePushSteps.push("commit");
}
const analyzeIndex = prePushSteps.indexOf("analyze");
const commitIndex = prePushSteps.indexOf("commit");
if (analyzeIndex > commitIndex) {
throw new Error("Commit workflow requires analyze before commit.");
}
const changelogIndex = prePushSteps.indexOf("changelog");
if (changelogIndex === -1 || changelogIndex > commitIndex) {
throw new Error("Commit workflow requires changelog before commit.");
}
return pushRequested ? [...prePushSteps, "push"] : prePushSteps;
};
const getTargetOverride = (argvArg: any): TReleaseTarget[] | undefined => {
const validTargets: TReleaseTarget[] = ["git", "npm", "docker"];
const rawTargets = argvArg.target || argvArg.targets;
if (typeof rawTargets === "string") {
return rawTargets
.split(",")
.map((target) => target.trim())
.filter((target): target is TReleaseTarget => validTargets.includes(target as TReleaseTarget));
}
const targets: TReleaseTarget[] = [];
if (argvArg.git || argvArg.p || argvArg.push) targets.push("git");
if (argvArg.npm) targets.push("npm");
if (argvArg.docker) targets.push("docker");
return targets.length > 0 ? targets : undefined;
};
const buildReleasePlan = (options: {
requireCleanTree: boolean;
runTests: boolean;
runBuild: boolean;
targets: TReleaseTarget[];
}): string[] => {
const plan: string[] = [];
if (options.requireCleanTree) plan.push("preflight.cleanTree");
if (options.runTests) plan.push("preflight.test");
plan.push("core.version", "core.changelog", "core.commit", "core.tag");
if (options.runBuild) plan.push("core.build");
for (const target of options.targets) {
plan.push(`target.${target}`);
}
return plan;
};
export const resolveCommitWorkflow = async (argvArg: any): Promise<IResolvedCommitWorkflow> => {
const cliConfig = await readCliWorkflowConfig();
const commitConfig = cliConfig.commit || {};
const releaseFlagRequested = Boolean(argvArg.r || argvArg.release);
let confirmation = normalizeConfirmation(commitConfig.confirmation, "prompt");
if (argvArg.plan) {
confirmation = "plan";
} else if (argvArg.y || argvArg.yes) {
confirmation = "auto";
}
let rawSteps: TCommitStep[];
if (hasExplicitCommitWorkflowFlags(argvArg)) {
const orderedFlags = getOrderedShortFlags("commit");
rawSteps = ["analyze"];
for (const shortFlag of orderedFlags) {
const step = commitFlagToStep[shortFlag];
if (step) {
rawSteps.push(step);
}
}
if (argvArg.format && !rawSteps.includes("format")) rawSteps.push("format");
if (argvArg.test && !rawSteps.includes("test")) rawSteps.push("test");
if (argvArg.build && !rawSteps.includes("build")) rawSteps.push("build");
if (argvArg.push && !rawSteps.includes("push")) rawSteps.push("push");
rawSteps.push("changelog");
rawSteps.push("commit");
} else if (Array.isArray(commitConfig.steps) && commitConfig.steps.length > 0) {
rawSteps = commitConfig.steps;
} else {
rawSteps = ["analyze"];
if (commitConfig.alwaysTest) rawSteps.push("test");
if (commitConfig.alwaysBuild) rawSteps.push("build");
rawSteps.push("changelog");
rawSteps.push("commit");
}
return {
confirmation,
steps: normalizeCommitSteps(rawSteps),
staging: commitConfig.staging || "all",
testCommand: commitConfig.test?.command || "pnpm test",
buildCommand: commitConfig.build?.command || "pnpm build",
changelogFile: "changelog.md",
changelogSection: "Pending",
pushRemote: commitConfig.push?.remote || "origin",
pushFollowTags: commitConfig.push?.followTags || false,
releaseFlagRequested,
};
};
export const resolveReleaseWorkflow = async (argvArg: any): Promise<IResolvedReleaseWorkflow> => {
const cliConfig = await readCliWorkflowConfig();
const releaseConfig = cliConfig.release || {};
const targetConfig = releaseConfig.targets || {};
const gitConfig = targetConfig.git || {};
const npmConfig = targetConfig.npm || {};
const dockerConfig = targetConfig.docker || {};
const npmRegistries = (npmConfig.registries || []).map(normalizeRegistryUrl);
const npmEnabled = npmConfig.enabled ?? npmRegistries.length > 0;
const gitEnabled = gitConfig.enabled ?? true;
const dockerEnabled = dockerConfig.enabled ?? false;
let confirmation = normalizeConfirmation(releaseConfig.confirmation, "prompt");
if (argvArg.plan) {
confirmation = "plan";
} else if (argvArg.y || argvArg.yes) {
confirmation = "auto";
}
let requireCleanTree = releaseConfig.preflight?.requireCleanTree ?? true;
let runTests = releaseConfig.preflight?.test ?? false;
let runBuild = releaseConfig.preflight?.build ?? true;
if (argvArg.t || argvArg.test) runTests = true;
if (argvArg.b || argvArg.build) runBuild = true;
if (isDisabled(argvArg, "test")) runTests = false;
if (isDisabled(argvArg, "build")) runBuild = false;
if (isDisabled(argvArg, "preflight")) requireCleanTree = false;
const configuredTargets: TReleaseTarget[] = [];
if (gitEnabled) configuredTargets.push("git");
if (npmEnabled) configuredTargets.push("npm");
if (dockerEnabled) configuredTargets.push("docker");
let targets = getTargetOverride(argvArg) || configuredTargets;
if (isDisabled(argvArg, "git", "push")) {
targets = targets.filter((target) => target !== "git");
}
if (isDisabled(argvArg, "publish")) {
targets = targets.filter((target) => target === "git");
}
targets = unique(targets);
return {
confirmation,
plan: buildReleasePlan({ requireCleanTree, runTests, runBuild, targets }),
targets,
requireCleanTree,
runTests,
runBuild,
testCommand: releaseConfig.preflight?.testCommand || "pnpm test",
buildCommand: releaseConfig.preflight?.buildCommand || "pnpm build",
changelogFile: "changelog.md",
changelogPendingSection: "Pending",
changelogVersionHeading: "## {{date}} - {{version}}",
gitEnabled,
gitRemote: gitConfig.remote || "origin",
pushBranch: gitConfig.pushBranch ?? true,
pushTags: gitConfig.pushTags ?? true,
npmEnabled,
npmRegistries,
npmAccessLevel: npmConfig.accessLevel || "public",
npmAlreadyPublished: npmConfig.alreadyPublished || "success",
dockerEnabled,
dockerImages: dockerConfig.images || [],
};
};
+340 -315
View File
@@ -1,346 +1,371 @@
// this file contains code to create commits in a consistent way // this file contains code to create commits in a consistent way
import * as plugins from './mod.plugins.js'; import * as plugins from "./mod.plugins.js";
import * as paths from '../paths.js'; import * as paths from "../paths.js";
import { logger } from '../gitzone.logging.js'; import { logger } from "../gitzone.logging.js";
import * as helpers from './mod.helpers.js'; import * as ui from "./mod.ui.js";
import * as ui from './mod.ui.js'; import type { ICliMode } from "../helpers.climode.js";
import { ReleaseConfig } from '../mod_config/classes.releaseconfig.js'; import { getCliMode, printJson, runWithSuppressedOutput } from "../helpers.climode.js";
import { appendPendingChangelogEntry } from "../helpers.changelog.js";
import { resolveCommitWorkflow, type IResolvedCommitWorkflow } from "../helpers.workflow.js";
export const run = async (argvArg: any) => { export const run = async (argvArg: any) => {
// Read commit config from npmextra.json const mode = await getCliMode(argvArg);
const npmextraConfig = new plugins.npmextra.Npmextra(); const subcommand = argvArg._?.[1];
const gitzoneConfig = npmextraConfig.dataFor<{
commit?: {
alwaysTest?: boolean;
alwaysBuild?: boolean;
};
}>('@git.zone/cli', {});
const commitConfig = gitzoneConfig.commit || {};
// Check flags and merge with config options if (mode.help || subcommand === "help") {
const wantsRelease = !!(argvArg.r || argvArg.release); showHelp(mode);
const wantsTest = !!(argvArg.t || argvArg.test || commitConfig.alwaysTest); return;
const wantsBuild = !!(argvArg.b || argvArg.build || commitConfig.alwaysBuild);
let releaseConfig: ReleaseConfig | null = null;
if (wantsRelease) {
releaseConfig = await ReleaseConfig.fromCwd();
if (!releaseConfig.hasRegistries()) {
logger.log('error', 'No release registries configured.');
console.log('');
console.log(' Run `gitzone config add <registry-url>` to add registries.');
console.log('');
process.exit(1);
}
} }
// Print execution plan at the start if (subcommand === "recommend") {
ui.printExecutionPlan({ await handleRecommend(mode);
autoAccept: !!(argvArg.y || argvArg.yes), return;
push: !!(argvArg.p || argvArg.push),
test: wantsTest,
build: wantsBuild,
release: wantsRelease,
format: !!argvArg.format,
registries: releaseConfig?.getRegistries(),
});
if (argvArg.format) {
const formatMod = await import('../mod_format/index.js');
await formatMod.run();
} }
// Run tests early to fail fast before analysis if (mode.json) {
if (wantsTest) { printJson({
ui.printHeader('🧪 Running tests...'); ok: false,
const smartshellForTest = new plugins.smartshell.Smartshell({ error:
executor: 'bash', "JSON output is only supported for the read-only recommendation flow. Use `gitzone commit recommend --json`.",
sourceFilePaths: [],
}); });
const testResult = await smartshellForTest.exec('pnpm test'); return;
if (testResult.exitCode !== 0) {
logger.log('error', 'Tests failed. Aborting commit.');
process.exit(1);
}
logger.log('success', 'All tests passed.');
} }
ui.printHeader('🔍 Analyzing repository changes...'); const workflow = await resolveCommitWorkflow(argvArg);
if (workflow.releaseFlagRequested) {
const aidoc = new plugins.tsdoc.AiDoc(); logger.log(
await aidoc.start(); "warn",
"`gitzone commit -r` is deprecated and no longer releases. Use `gitzone release` after committing.",
const nextCommitObject = await aidoc.buildNextCommitObject(paths.cwd); );
}
await aidoc.stop();
printCommitExecutionPlan(workflow);
ui.printRecommendation({ if (workflow.confirmation === "plan") {
recommendedNextVersion: nextCommitObject.recommendedNextVersion, return;
recommendedNextVersionLevel: nextCommitObject.recommendedNextVersionLevel,
recommendedNextVersionScope: nextCommitObject.recommendedNextVersionScope,
recommendedNextVersionMessage: nextCommitObject.recommendedNextVersionMessage,
});
let answerBucket: plugins.smartinteract.AnswerBucket;
// Check if -y/--yes flag is set AND version is not a breaking change
// Breaking changes (major version bumps) always require manual confirmation
const isBreakingChange = nextCommitObject.recommendedNextVersionLevel === 'BREAKING CHANGE';
const canAutoAccept = (argvArg.y || argvArg.yes) && !isBreakingChange;
if (canAutoAccept) {
// Auto-mode: create AnswerBucket programmatically
logger.log('info', '✓ Auto-accepting AI recommendations (--yes flag)');
answerBucket = new plugins.smartinteract.AnswerBucket();
answerBucket.addAnswer({
name: 'commitType',
value: nextCommitObject.recommendedNextVersionLevel,
});
answerBucket.addAnswer({
name: 'commitScope',
value: nextCommitObject.recommendedNextVersionScope,
});
answerBucket.addAnswer({
name: 'commitDescription',
value: nextCommitObject.recommendedNextVersionMessage,
});
answerBucket.addAnswer({
name: 'pushToOrigin',
value: !!(argvArg.p || argvArg.push), // Only push if -p flag also provided
});
answerBucket.addAnswer({
name: 'createRelease',
value: wantsRelease,
});
} else {
// Warn if --yes was provided but we're requiring confirmation due to breaking change
if (isBreakingChange && (argvArg.y || argvArg.yes)) {
logger.log('warn', '⚠️ BREAKING CHANGE detected - manual confirmation required');
}
// Interactive mode: prompt user for input
const commitInteract = new plugins.smartinteract.SmartInteract();
commitInteract.addQuestions([
{
type: 'list',
name: `commitType`,
message: `Choose TYPE of the commit:`,
choices: [`fix`, `feat`, `BREAKING CHANGE`],
default: nextCommitObject.recommendedNextVersionLevel,
},
{
type: 'input',
name: `commitScope`,
message: `What is the SCOPE of the commit:`,
default: nextCommitObject.recommendedNextVersionScope,
},
{
type: `input`,
name: `commitDescription`,
message: `What is the DESCRIPTION of the commit?`,
default: nextCommitObject.recommendedNextVersionMessage,
},
{
type: 'confirm',
name: `pushToOrigin`,
message: `Do you want to push this version now?`,
default: true,
},
{
type: 'confirm',
name: `createRelease`,
message: `Do you want to publish to npm registries?`,
default: wantsRelease,
},
]);
answerBucket = await commitInteract.runQueue();
} }
const commitString = createCommitStringFromAnswerBucket(answerBucket);
const commitVersionType = (() => {
switch (answerBucket.getAnswerFor('commitType')) {
case 'fix':
return 'patch';
case 'feat':
return 'minor';
case 'BREAKING CHANGE':
return 'major';
}
})();
ui.printHeader('✨ Creating Semantic Commit');
ui.printCommitMessage(commitString);
const smartshellInstance = new plugins.smartshell.Smartshell({ const smartshellInstance = new plugins.smartshell.Smartshell({
executor: 'bash', executor: "bash",
sourceFilePaths: [], sourceFilePaths: [],
}); });
// Load release config if user wants to release (interactively selected) let nextCommitObject: any;
if (answerBucket.getAnswerFor('createRelease') && !releaseConfig) { let answerBucket: plugins.smartinteract.AnswerBucket | undefined;
releaseConfig = await ReleaseConfig.fromCwd();
if (!releaseConfig.hasRegistries()) { for (const step of workflow.steps) {
logger.log('error', 'No release registries configured.'); switch (step) {
console.log(''); case "format":
console.log(' Run `gitzone config add <registry-url>` to add registries.'); await runFormatStep();
console.log(''); break;
process.exit(1); case "test":
await runCommandStep(smartshellInstance, "Running tests", workflow.testCommand);
break;
case "build":
await runCommandStep(smartshellInstance, "Running build", workflow.buildCommand);
break;
case "analyze":
nextCommitObject = await runAnalyzeStep();
answerBucket = await buildAnswerBucket(nextCommitObject, workflow, mode, argvArg);
break;
case "changelog":
assertAnalysisComplete(answerBucket, nextCommitObject);
await runChangelogStep(workflow, answerBucket!, nextCommitObject);
break;
case "commit":
assertAnalysisComplete(answerBucket, nextCommitObject);
await runCommitStep(smartshellInstance, answerBucket!);
break;
case "push":
await runPushStep(smartshellInstance, workflow);
break;
} }
} }
// Determine total steps based on options const commitShaResult = await smartshellInstance.exec("git rev-parse --short HEAD");
// Note: test runs early (like format) so not counted in numbered steps const currentBranch = await detectCurrentBranch(smartshellInstance);
const willPush = answerBucket.getAnswerFor('pushToOrigin') && !(process.env.CI === 'true');
const willRelease = answerBucket.getAnswerFor('createRelease') && releaseConfig?.hasRegistries();
let totalSteps = 5; // Base steps: commitinfo, changelog, staging, commit, version
if (wantsBuild) totalSteps += 2; // build step + verification step
if (willPush) totalSteps++;
if (willRelease) totalSteps++;
let currentStep = 0;
// Step 1: Baking commitinfo
currentStep++;
ui.printStep(currentStep, totalSteps, '🔧 Baking commit info into code', 'in-progress');
const commitInfo = new plugins.commitinfo.CommitInfo(
paths.cwd,
commitVersionType,
);
await commitInfo.writeIntoPotentialDirs();
ui.printStep(currentStep, totalSteps, '🔧 Baking commit info into code', 'done');
// Step 2: Writing changelog
currentStep++;
ui.printStep(currentStep, totalSteps, '📄 Generating changelog.md', 'in-progress');
let changelog = nextCommitObject.changelog;
changelog = changelog.replaceAll(
'{{nextVersion}}',
(await commitInfo.getNextPlannedVersion()).versionString,
);
changelog = changelog.replaceAll(
'{{nextVersionScope}}',
`${await answerBucket.getAnswerFor('commitType')}(${await answerBucket.getAnswerFor('commitScope')})`,
);
changelog = changelog.replaceAll(
'{{nextVersionMessage}}',
nextCommitObject.recommendedNextVersionMessage,
);
if (nextCommitObject.recommendedNextVersionDetails?.length > 0) {
changelog = changelog.replaceAll(
'{{nextVersionDetails}}',
'- ' + nextCommitObject.recommendedNextVersionDetails.join('\n- '),
);
} else {
changelog = changelog.replaceAll('\n{{nextVersionDetails}}', '');
}
await plugins.smartfs
.file(plugins.path.join(paths.cwd, `changelog.md`))
.encoding('utf8')
.write(changelog);
ui.printStep(currentStep, totalSteps, '📄 Generating changelog.md', 'done');
// Step 3: Staging files
currentStep++;
ui.printStep(currentStep, totalSteps, '📦 Staging files', 'in-progress');
await smartshellInstance.exec(`git add -A`);
ui.printStep(currentStep, totalSteps, '📦 Staging files', 'done');
// Step 4: Creating commit
currentStep++;
ui.printStep(currentStep, totalSteps, '💾 Creating git commit', 'in-progress');
await smartshellInstance.exec(`git commit -m "${commitString}"`);
ui.printStep(currentStep, totalSteps, '💾 Creating git commit', 'done');
// Step 5: Bumping version
currentStep++;
const projectType = await helpers.detectProjectType();
const newVersion = await helpers.bumpProjectVersion(projectType, commitVersionType, currentStep, totalSteps);
// Step 6: Run build (optional)
if (wantsBuild) {
currentStep++;
ui.printStep(currentStep, totalSteps, '🔨 Running build', 'in-progress');
const buildResult = await smartshellInstance.exec('pnpm build');
if (buildResult.exitCode !== 0) {
ui.printStep(currentStep, totalSteps, '🔨 Running build', 'error');
logger.log('error', 'Build failed. Aborting release.');
process.exit(1);
}
ui.printStep(currentStep, totalSteps, '🔨 Running build', 'done');
// Step 7: Verify no uncommitted changes
currentStep++;
ui.printStep(currentStep, totalSteps, '🔍 Verifying clean working tree', 'in-progress');
const statusResult = await smartshellInstance.exec('git status --porcelain');
if (statusResult.stdout.trim() !== '') {
ui.printStep(currentStep, totalSteps, '🔍 Verifying clean working tree', 'error');
logger.log('error', 'Build produced uncommitted changes. This usually means build output is not gitignored.');
logger.log('error', 'Uncommitted files:');
console.log(statusResult.stdout);
logger.log('error', 'Aborting release. Please ensure build artifacts are in .gitignore');
process.exit(1);
}
ui.printStep(currentStep, totalSteps, '🔍 Verifying clean working tree', 'done');
}
// Step: Push to remote (optional)
const currentBranch = await helpers.detectCurrentBranch();
if (willPush) {
currentStep++;
ui.printStep(currentStep, totalSteps, `🚀 Pushing to origin/${currentBranch}`, 'in-progress');
await smartshellInstance.exec(`git push origin ${currentBranch} --follow-tags`);
ui.printStep(currentStep, totalSteps, `🚀 Pushing to origin/${currentBranch}`, 'done');
}
// Step 7: Publish to npm registries (optional)
let releasedRegistries: string[] = [];
if (willRelease && releaseConfig) {
currentStep++;
const registries = releaseConfig.getRegistries();
ui.printStep(currentStep, totalSteps, `📦 Publishing to ${registries.length} registr${registries.length === 1 ? 'y' : 'ies'}`, 'in-progress');
const accessLevel = releaseConfig.getAccessLevel();
for (const registry of registries) {
try {
await smartshellInstance.exec(`npm publish --registry=${registry} --access=${accessLevel}`);
releasedRegistries.push(registry);
} catch (error) {
logger.log('error', `Failed to publish to ${registry}: ${error}`);
}
}
if (releasedRegistries.length === registries.length) {
ui.printStep(currentStep, totalSteps, `📦 Publishing to ${registries.length} registr${registries.length === 1 ? 'y' : 'ies'}`, 'done');
} else {
ui.printStep(currentStep, totalSteps, `📦 Publishing to ${registries.length} registr${registries.length === 1 ? 'y' : 'ies'}`, 'error');
}
}
console.log(''); // Add spacing before summary
// Get commit SHA for summary
const commitShaResult = await smartshellInstance.exec('git rev-parse --short HEAD');
const commitSha = commitShaResult.stdout.trim();
// Print final summary
ui.printSummary({ ui.printSummary({
projectType, projectType: "source",
branch: currentBranch, branch: currentBranch,
commitType: answerBucket.getAnswerFor('commitType'), commitType: answerBucket!.getAnswerFor("commitType"),
commitScope: answerBucket.getAnswerFor('commitScope'), commitScope: answerBucket!.getAnswerFor("commitScope"),
commitMessage: answerBucket.getAnswerFor('commitDescription'), commitMessage: answerBucket!.getAnswerFor("commitDescription"),
newVersion: newVersion, commitSha: commitShaResult.stdout.trim(),
commitSha: commitSha, pushed: workflow.steps.includes("push"),
pushed: willPush,
released: releasedRegistries.length > 0,
releasedRegistries: releasedRegistries.length > 0 ? releasedRegistries : undefined,
}); });
}; };
async function runFormatStep(): Promise<void> {
ui.printHeader("Formatting project files");
const formatMod = await import("../mod_format/index.js");
await formatMod.run({ write: true, yes: true, interactive: false });
}
async function runCommandStep(
smartshellInstance: plugins.smartshell.Smartshell,
label: string,
command: string,
): Promise<void> {
ui.printHeader(label);
const result = await smartshellInstance.exec(command);
if (result.exitCode !== 0) {
logger.log("error", `${label} failed. Aborting commit.`);
process.exit(1);
}
logger.log("success", `${label} passed.`);
}
async function runAnalyzeStep(): Promise<any> {
ui.printHeader("Analyzing repository changes");
const aidoc = new plugins.tsdoc.AiDoc();
await aidoc.start();
try {
const nextCommitObject = await aidoc.buildNextCommitObject(paths.cwd);
ui.printRecommendation({
recommendedNextVersion: nextCommitObject.recommendedNextVersion,
recommendedNextVersionLevel: nextCommitObject.recommendedNextVersionLevel,
recommendedNextVersionScope: nextCommitObject.recommendedNextVersionScope,
recommendedNextVersionMessage: nextCommitObject.recommendedNextVersionMessage,
});
return nextCommitObject;
} finally {
await aidoc.stop();
}
}
async function buildAnswerBucket(
nextCommitObject: any,
workflow: IResolvedCommitWorkflow,
mode: ICliMode,
argvArg: any,
): Promise<plugins.smartinteract.AnswerBucket> {
const isBreakingChange = nextCommitObject.recommendedNextVersionLevel === "BREAKING CHANGE";
const canAutoAccept = workflow.confirmation === "auto" && !isBreakingChange;
if (canAutoAccept) {
logger.log("info", "Auto-accepting AI recommendations");
return createAnswerBucket({
commitType: nextCommitObject.recommendedNextVersionLevel,
commitScope: nextCommitObject.recommendedNextVersionScope,
commitDescription: nextCommitObject.recommendedNextVersionMessage,
});
}
if (isBreakingChange && (workflow.confirmation === "auto" || argvArg.y || argvArg.yes)) {
logger.log("warn", "BREAKING CHANGE detected - manual confirmation required");
}
if (!mode.interactive) {
throw new Error("Commit confirmation requires an interactive terminal. Use `-y` or set commit.confirmation to `auto`.");
}
const commitInteract = new plugins.smartinteract.SmartInteract();
commitInteract.addQuestions([
{
type: "list",
name: "commitType",
message: "Choose TYPE of the commit:",
choices: ["fix", "feat", "BREAKING CHANGE"],
default: nextCommitObject.recommendedNextVersionLevel,
},
{
type: "input",
name: "commitScope",
message: "What is the SCOPE of the commit:",
default: nextCommitObject.recommendedNextVersionScope,
},
{
type: "input",
name: "commitDescription",
message: "What is the DESCRIPTION of the commit?",
default: nextCommitObject.recommendedNextVersionMessage,
},
]);
return await commitInteract.runQueue();
}
function createAnswerBucket(answers: {
commitType: string;
commitScope: string;
commitDescription: string;
}): plugins.smartinteract.AnswerBucket {
const answerBucket = new plugins.smartinteract.AnswerBucket();
for (const [name, value] of Object.entries(answers)) {
answerBucket.addAnswer({ name, value });
}
return answerBucket;
}
async function runChangelogStep(
workflow: IResolvedCommitWorkflow,
answerBucket: plugins.smartinteract.AnswerBucket,
nextCommitObject: any,
): Promise<void> {
await appendPendingChangelogEntry(
plugins.path.join(paths.cwd, workflow.changelogFile),
workflow.changelogSection,
{
type: answerBucket.getAnswerFor("commitType"),
scope: answerBucket.getAnswerFor("commitScope"),
message: answerBucket.getAnswerFor("commitDescription"),
details: nextCommitObject.recommendedNextVersionDetails || [],
},
);
logger.log("success", `Updated ${workflow.changelogFile} pending section.`);
}
async function runCommitStep(
smartshellInstance: plugins.smartshell.Smartshell,
answerBucket: plugins.smartinteract.AnswerBucket,
): Promise<void> {
ui.printHeader("Creating Semantic Commit");
const commitString = createCommitStringFromAnswerBucket(answerBucket);
ui.printCommitMessage(commitString);
await smartshellInstance.exec("git add -A");
const result = await smartshellInstance.exec(`git commit -m ${shellQuote(commitString)}`);
if (result.exitCode !== 0) {
logger.log("error", "git commit failed.");
process.exit(1);
}
}
async function runPushStep(
smartshellInstance: plugins.smartshell.Smartshell,
workflow: IResolvedCommitWorkflow,
): Promise<void> {
const currentBranch = await detectCurrentBranch(smartshellInstance);
const followTags = workflow.pushFollowTags ? " --follow-tags" : "";
const result = await smartshellInstance.exec(
`git push ${workflow.pushRemote} ${currentBranch}${followTags}`,
);
if (result.exitCode !== 0) {
logger.log("error", "git push failed.");
process.exit(1);
}
}
async function detectCurrentBranch(
smartshellInstance: plugins.smartshell.Smartshell,
): Promise<string> {
const branchResult = await smartshellInstance.exec("git branch --show-current");
return branchResult.stdout.trim() || "master";
}
function assertAnalysisComplete(
answerBucket: plugins.smartinteract.AnswerBucket | undefined,
nextCommitObject: any,
): void {
if (!answerBucket || !nextCommitObject) {
throw new Error("Commit workflow requires analyze before changelog and commit steps.");
}
}
function shellQuote(value: string): string {
return `'${value.replaceAll("'", "'\\''")}'`;
}
function printCommitExecutionPlan(workflow: IResolvedCommitWorkflow): void {
console.log("");
console.log("gitzone commit - resolved workflow");
console.log(`confirmation: ${workflow.confirmation}`);
console.log(`steps: ${workflow.steps.join(" -> ")}`);
console.log(`changelog: ${workflow.changelogFile}#${workflow.changelogSection}`);
console.log("");
}
async function handleRecommend(mode: ICliMode): Promise<void> {
const recommendationBuilder = async () => {
const aidoc = new plugins.tsdoc.AiDoc();
await aidoc.start();
try {
return await aidoc.buildNextCommitObject(paths.cwd);
} finally {
await aidoc.stop();
}
};
const recommendation = mode.json
? await runWithSuppressedOutput(recommendationBuilder)
: await recommendationBuilder();
if (mode.json) {
printJson(recommendation);
return;
}
ui.printRecommendation({
recommendedNextVersion: recommendation.recommendedNextVersion,
recommendedNextVersionLevel: recommendation.recommendedNextVersionLevel,
recommendedNextVersionScope: recommendation.recommendedNextVersionScope,
recommendedNextVersionMessage: recommendation.recommendedNextVersionMessage,
});
console.log(
`Suggested commit: ${recommendation.recommendedNextVersionLevel}(${recommendation.recommendedNextVersionScope}): ${recommendation.recommendedNextVersionMessage}`,
);
}
const createCommitStringFromAnswerBucket = ( const createCommitStringFromAnswerBucket = (
answerBucket: plugins.smartinteract.AnswerBucket, answerBucket: plugins.smartinteract.AnswerBucket,
) => { ) => {
const commitType = answerBucket.getAnswerFor('commitType'); const commitType = answerBucket.getAnswerFor("commitType");
const commitScope = answerBucket.getAnswerFor('commitScope'); const commitScope = answerBucket.getAnswerFor("commitScope");
const commitDescription = answerBucket.getAnswerFor('commitDescription'); const commitDescription = answerBucket.getAnswerFor("commitDescription");
return `${commitType}(${commitScope}): ${commitDescription}`; return `${commitType}(${commitScope}): ${commitDescription}`;
}; };
export function showHelp(mode?: ICliMode): void {
if (mode?.json) {
printJson({
command: "commit",
usage: "gitzone commit [recommend] [options]",
description: "Analyzes changes and creates one semantic source commit.",
commands: [
{
name: "recommend",
description: "Generate a commit recommendation without mutating the repository",
},
],
flags: [
{ flag: "-y, --yes", description: "Auto-accept safe AI recommendations" },
{ flag: "-p, --push", description: "Push to origin after committing" },
{ flag: "-t, --test", description: "Run tests as part of the commit workflow" },
{ flag: "-b, --build", description: "Run build as part of the commit workflow" },
{ flag: "-f, --format", description: "Run gitzone format before committing" },
{ flag: "--plan", description: "Show resolved workflow without mutating files" },
{ flag: "--json", description: "Emit JSON for `commit recommend` only" },
],
examples: [
"gitzone commit recommend --json",
"gitzone commit -y",
"gitzone commit -ytbp",
"gitzone release",
],
});
return;
}
console.log("");
console.log("Usage: gitzone commit [recommend] [options]");
console.log("");
console.log("Creates one semantic source commit. It does not version, tag, or publish.");
console.log("");
console.log("Commands:");
console.log(" recommend Generate a commit recommendation without mutating the repository");
console.log("");
console.log("Flags:");
console.log(" -y, --yes Auto-accept safe AI recommendations");
console.log(" -p, --push Push after commit");
console.log(" -t, --test Run tests in the configured order");
console.log(" -b, --build Run build in the configured order");
console.log(" -f, --format Run gitzone format before committing");
console.log(" --plan Show resolved workflow without mutating files");
console.log(" --json Emit JSON for `commit recommend` only");
console.log("");
console.log("Examples:");
console.log(" gitzone commit recommend --json");
console.log(" gitzone commit -y");
console.log(" gitzone commit -ytbp");
console.log(" gitzone release");
console.log("");
}
+28 -16
View File
@@ -63,7 +63,7 @@ export async function detectProjectType(): Promise<ProjectType> {
* @param versionType Type of version bump * @param versionType Type of version bump
* @returns New version string * @returns New version string
*/ */
function calculateNewVersion(currentVersion: string, versionType: VersionType): string { export function calculateNewVersion(currentVersion: string, versionType: VersionType): string {
const versionMatch = currentVersion.match(/^(\d+)\.(\d+)\.(\d+)/); const versionMatch = currentVersion.match(/^(\d+)\.(\d+)\.(\d+)/);
if (!versionMatch) { if (!versionMatch) {
@@ -95,7 +95,7 @@ function calculateNewVersion(currentVersion: string, versionType: VersionType):
* @param projectType The project type to determine which file to read * @param projectType The project type to determine which file to read
* @returns The current version string * @returns The current version string
*/ */
async function readCurrentVersion(projectType: ProjectType): Promise<string> { export async function readCurrentVersion(projectType: ProjectType): Promise<string> {
if (projectType === 'npm' || projectType === 'both') { if (projectType === 'npm' || projectType === 'both') {
const packageJsonPath = plugins.path.join(paths.cwd, 'package.json'); const packageJsonPath = plugins.path.join(paths.cwd, 'package.json');
const content = (await plugins.smartfs const content = (await plugins.smartfs
@@ -128,7 +128,7 @@ async function readCurrentVersion(projectType: ProjectType): Promise<string> {
* @param filePath Path to the JSON file * @param filePath Path to the JSON file
* @param newVersion The new version to write * @param newVersion The new version to write
*/ */
async function updateVersionFile(filePath: string, newVersion: string): Promise<void> { export async function updateVersionFile(filePath: string, newVersion: string): Promise<void> {
const content = (await plugins.smartfs const content = (await plugins.smartfs
.file(filePath) .file(filePath)
.encoding('utf8') .encoding('utf8')
@@ -141,6 +141,30 @@ async function updateVersionFile(filePath: string, newVersion: string): Promise<
.write(JSON.stringify(config, null, 2) + '\n'); .write(JSON.stringify(config, null, 2) + '\n');
} }
/**
* Updates project version files without creating commits or tags.
*/
export async function updateProjectVersionFiles(
projectType: ProjectType,
newVersion: string,
): Promise<string[]> {
const filesToUpdate: string[] = [];
const packageJsonPath = plugins.path.join(paths.cwd, 'package.json');
const denoJsonPath = plugins.path.join(paths.cwd, 'deno.json');
if (projectType === 'npm' || projectType === 'both') {
await updateVersionFile(packageJsonPath, newVersion);
filesToUpdate.push('package.json');
}
if (projectType === 'deno' || projectType === 'both') {
await updateVersionFile(denoJsonPath, newVersion);
filesToUpdate.push('deno.json');
}
return filesToUpdate;
}
/** /**
* Bumps the project version based on project type * Bumps the project version based on project type
* Handles npm-only, deno-only, and dual projects with unified logic * Handles npm-only, deno-only, and dual projects with unified logic
@@ -182,19 +206,7 @@ export async function bumpProjectVersion(
logger.log('info', `Bumping version: ${currentVersion}${newVersion}`); logger.log('info', `Bumping version: ${currentVersion}${newVersion}`);
// 3. Determine which files to update // 3. Determine which files to update
const filesToUpdate: string[] = []; const filesToUpdate = await updateProjectVersionFiles(projectType, newVersion);
const packageJsonPath = plugins.path.join(paths.cwd, 'package.json');
const denoJsonPath = plugins.path.join(paths.cwd, 'deno.json');
if (projectType === 'npm' || projectType === 'both') {
await updateVersionFile(packageJsonPath, newVersion);
filesToUpdate.push('package.json');
}
if (projectType === 'deno' || projectType === 'both') {
await updateVersionFile(denoJsonPath, newVersion);
filesToUpdate.push('deno.json');
}
// 4. Stage all updated files // 4. Stage all updated files
await smartshellInstance.exec(`git add ${filesToUpdate.join(' ')}`); await smartshellInstance.exec(`git add ${filesToUpdate.join(' ')}`);
+7 -2
View File
@@ -10,7 +10,7 @@ interface ICommitSummary {
commitType: string; commitType: string;
commitScope: string; commitScope: string;
commitMessage: string; commitMessage: string;
newVersion: string; newVersion?: string;
commitSha?: string; commitSha?: string;
pushed: boolean; pushed: boolean;
repoUrl?: string; repoUrl?: string;
@@ -197,9 +197,14 @@ export function printSummary(summary: ICommitSummary): void {
`Branch: 🌿 ${summary.branch}`, `Branch: 🌿 ${summary.branch}`,
`Commit Type: ${getCommitTypeEmoji(summary.commitType)}`, `Commit Type: ${getCommitTypeEmoji(summary.commitType)}`,
`Scope: 📍 ${summary.commitScope}`, `Scope: 📍 ${summary.commitScope}`,
`New Version: 🏷️ v${summary.newVersion}`,
]; ];
if (summary.newVersion) {
lines.push(`New Version: 🏷️ v${summary.newVersion}`);
} else {
lines.push(`Version: ⊘ Not bumped`);
}
if (summary.commitSha) { if (summary.commitSha) {
lines.push(`Commit SHA: 📌 ${summary.commitSha}`); lines.push(`Commit SHA: 📌 ${summary.commitSha}`);
} }
+52 -22
View File
@@ -3,10 +3,12 @@ import * as plugins from './mod.plugins.js';
export interface ICommitConfig { export interface ICommitConfig {
alwaysTest: boolean; alwaysTest: boolean;
alwaysBuild: boolean; alwaysBuild: boolean;
confirmation: 'prompt' | 'auto' | 'plan';
steps: string[];
} }
/** /**
* Manages commit configuration stored in npmextra.json * Manages commit configuration stored in .smartconfig.json
* under @git.zone/cli.commit namespace * under @git.zone/cli.commit namespace
*/ */
export class CommitConfig { export class CommitConfig {
@@ -15,7 +17,7 @@ export class CommitConfig {
constructor(cwd: string = process.cwd()) { constructor(cwd: string = process.cwd()) {
this.cwd = cwd; this.cwd = cwd;
this.config = { alwaysTest: false, alwaysBuild: false }; this.config = { alwaysTest: false, alwaysBuild: false, confirmation: 'prompt', steps: ['analyze', 'changelog', 'commit'] };
} }
/** /**
@@ -28,50 +30,62 @@ export class CommitConfig {
} }
/** /**
* Load configuration from npmextra.json * Load configuration from .smartconfig.json
*/ */
public async load(): Promise<void> { public async load(): Promise<void> {
const npmextraInstance = new plugins.npmextra.Npmextra(this.cwd); const smartconfigInstance = new plugins.smartconfig.Smartconfig(this.cwd);
const gitzoneConfig = npmextraInstance.dataFor<any>('@git.zone/cli', {}); const gitzoneConfig = smartconfigInstance.dataFor<any>('@git.zone/cli', {});
const alwaysTest = gitzoneConfig?.commit?.alwaysTest ?? false;
const alwaysBuild = gitzoneConfig?.commit?.alwaysBuild ?? false;
this.config = { this.config = {
alwaysTest: gitzoneConfig?.commit?.alwaysTest ?? false, alwaysTest,
alwaysBuild: gitzoneConfig?.commit?.alwaysBuild ?? false, alwaysBuild,
confirmation: gitzoneConfig?.commit?.confirmation ?? 'prompt',
steps: gitzoneConfig?.commit?.steps || [
'analyze',
...(alwaysTest ? ['test'] : []),
...(alwaysBuild ? ['build'] : []),
'changelog',
'commit',
],
}; };
} }
/** /**
* Save configuration to npmextra.json * Save configuration to .smartconfig.json
*/ */
public async save(): Promise<void> { public async save(): Promise<void> {
const npmextraPath = plugins.path.join(this.cwd, 'npmextra.json'); const smartconfigPath = plugins.path.join(this.cwd, '.smartconfig.json');
let npmextraData: any = {}; let smartconfigData: any = {};
// Read existing npmextra.json // Read existing .smartconfig.json
if (await plugins.smartfs.file(npmextraPath).exists()) { if (await plugins.smartfs.file(smartconfigPath).exists()) {
const content = await plugins.smartfs.file(npmextraPath).encoding('utf8').read(); const content = await plugins.smartfs.file(smartconfigPath).encoding('utf8').read();
npmextraData = JSON.parse(content as string); smartconfigData = JSON.parse(content as string);
} }
// Ensure @git.zone/cli namespace exists // Ensure @git.zone/cli namespace exists
if (!npmextraData['@git.zone/cli']) { if (!smartconfigData['@git.zone/cli']) {
npmextraData['@git.zone/cli'] = {}; smartconfigData['@git.zone/cli'] = {};
} }
// Ensure commit object exists // Ensure commit object exists
if (!npmextraData['@git.zone/cli'].commit) { if (!smartconfigData['@git.zone/cli'].commit) {
npmextraData['@git.zone/cli'].commit = {}; smartconfigData['@git.zone/cli'].commit = {};
} }
// Update commit settings // Update commit settings
npmextraData['@git.zone/cli'].commit.alwaysTest = this.config.alwaysTest; smartconfigData['@git.zone/cli'].commit.alwaysTest = this.config.alwaysTest;
npmextraData['@git.zone/cli'].commit.alwaysBuild = this.config.alwaysBuild; smartconfigData['@git.zone/cli'].commit.alwaysBuild = this.config.alwaysBuild;
smartconfigData['@git.zone/cli'].commit.confirmation = this.config.confirmation;
smartconfigData['@git.zone/cli'].commit.steps = this.config.steps;
// Write back to file // Write back to file
await plugins.smartfs await plugins.smartfs
.file(npmextraPath) .file(smartconfigPath)
.encoding('utf8') .encoding('utf8')
.write(JSON.stringify(npmextraData, null, 2)); .write(JSON.stringify(smartconfigData, null, 2));
} }
/** /**
@@ -101,4 +115,20 @@ export class CommitConfig {
public setAlwaysBuild(value: boolean): void { public setAlwaysBuild(value: boolean): void {
this.config.alwaysBuild = value; this.config.alwaysBuild = value;
} }
public getConfirmation(): 'prompt' | 'auto' | 'plan' {
return this.config.confirmation;
}
public setConfirmation(value: 'prompt' | 'auto' | 'plan'): void {
this.config.confirmation = value;
}
public getSteps(): string[] {
return [...this.config.steps];
}
public setSteps(steps: string[]): void {
this.config.steps = [...steps];
}
} }
+31 -24
View File
@@ -8,7 +8,7 @@ export interface IReleaseConfig {
} }
/** /**
* Manages release configuration stored in npmextra.json * Manages release configuration stored in .smartconfig.json
* under @git.zone/cli.release namespace * under @git.zone/cli.release namespace
*/ */
export class ReleaseConfig { export class ReleaseConfig {
@@ -30,53 +30,60 @@ export class ReleaseConfig {
} }
/** /**
* Load configuration from npmextra.json * Load configuration from .smartconfig.json
*/ */
public async load(): Promise<void> { public async load(): Promise<void> {
const npmextraInstance = new plugins.npmextra.Npmextra(this.cwd); const smartconfigInstance = new plugins.smartconfig.Smartconfig(this.cwd);
const gitzoneConfig = npmextraInstance.dataFor<any>('@git.zone/cli', {}); const gitzoneConfig = smartconfigInstance.dataFor<any>('@git.zone/cli', {});
const npmTarget = gitzoneConfig?.release?.targets?.npm || {};
// Also check szci for backward compatibility
const szciConfig = npmextraInstance.dataFor<any>('@ship.zone/szci', {});
this.config = { this.config = {
registries: gitzoneConfig?.release?.registries || [], registries: npmTarget.registries || [],
accessLevel: gitzoneConfig?.release?.accessLevel || szciConfig?.npmAccessLevel || 'public', accessLevel: npmTarget.accessLevel || 'public',
}; };
} }
/** /**
* Save configuration to npmextra.json * Save configuration to .smartconfig.json
*/ */
public async save(): Promise<void> { public async save(): Promise<void> {
const npmextraPath = plugins.path.join(this.cwd, 'npmextra.json'); const smartconfigPath = plugins.path.join(this.cwd, '.smartconfig.json');
let npmextraData: any = {}; let smartconfigData: any = {};
// Read existing npmextra.json // Read existing .smartconfig.json
if (await plugins.smartfs.file(npmextraPath).exists()) { if (await plugins.smartfs.file(smartconfigPath).exists()) {
const content = await plugins.smartfs.file(npmextraPath).encoding('utf8').read(); const content = await plugins.smartfs.file(smartconfigPath).encoding('utf8').read();
npmextraData = JSON.parse(content as string); smartconfigData = JSON.parse(content as string);
} }
// Ensure @git.zone/cli namespace exists // Ensure @git.zone/cli namespace exists
if (!npmextraData['@git.zone/cli']) { if (!smartconfigData['@git.zone/cli']) {
npmextraData['@git.zone/cli'] = {}; smartconfigData['@git.zone/cli'] = {};
} }
// Ensure release object exists // Ensure release object exists
if (!npmextraData['@git.zone/cli'].release) { if (!smartconfigData['@git.zone/cli'].release) {
npmextraData['@git.zone/cli'].release = {}; smartconfigData['@git.zone/cli'].release = {};
}
if (!smartconfigData['@git.zone/cli'].release.targets) {
smartconfigData['@git.zone/cli'].release.targets = {};
}
if (!smartconfigData['@git.zone/cli'].release.targets.npm) {
smartconfigData['@git.zone/cli'].release.targets.npm = {};
} }
// Update registries and accessLevel // Update registries and accessLevel
npmextraData['@git.zone/cli'].release.registries = this.config.registries; smartconfigData['@git.zone/cli'].release.targets.npm.enabled = this.config.registries.length > 0;
npmextraData['@git.zone/cli'].release.accessLevel = this.config.accessLevel; smartconfigData['@git.zone/cli'].release.targets.npm.registries = this.config.registries;
smartconfigData['@git.zone/cli'].release.targets.npm.accessLevel = this.config.accessLevel;
// Write back to file // Write back to file
await plugins.smartfs await plugins.smartfs
.file(npmextraPath) .file(smartconfigPath)
.encoding('utf8') .encoding('utf8')
.write(JSON.stringify(npmextraData, null, 2)); .write(JSON.stringify(smartconfigData, null, 2));
} }
/** /**
+556 -187
View File
@@ -1,73 +1,123 @@
// gitzone config - manage release registry configuration // gitzone config - manage CLI smartconfig configuration
import * as plugins from './mod.plugins.js'; import * as plugins from "./mod.plugins.js";
import { ReleaseConfig } from './classes.releaseconfig.js'; import { ReleaseConfig } from "./classes.releaseconfig.js";
import { CommitConfig } from './classes.commitconfig.js'; import { CommitConfig } from "./classes.commitconfig.js";
import { runFormatter, type ICheckResult } from '../mod_format/index.js'; import { runFormatter, type ICheckResult } from "../mod_format/index.js";
import type { ICliMode } from "../helpers.climode.js";
import { getCliMode, printJson } from "../helpers.climode.js";
import {
getCliConfigValueFromData,
readSmartconfigFile,
setCliConfigValueInData,
unsetCliConfigValueInData,
writeSmartconfigFile,
} from "../helpers.smartconfig.js";
import {
CURRENT_GITZONE_CLI_SCHEMA_VERSION,
migrateSmartconfigData,
} from "../helpers.smartconfigmigrations.js";
export { ReleaseConfig, CommitConfig }; export { ReleaseConfig, CommitConfig };
const defaultCliMode: ICliMode = {
output: "human",
interactive: true,
json: false,
plain: false,
quiet: false,
yes: false,
help: false,
agent: false,
checkUpdates: true,
isTty: true,
};
/** /**
* Format npmextra.json with diff preview * Format .smartconfig.json with diff preview
* Shows diff first, asks for confirmation, then applies * Shows diff first, asks for confirmation, then applies
*/ */
async function formatNpmextraWithDiff(): Promise<void> { async function formatSmartconfigWithDiff(mode: ICliMode): Promise<void> {
if (!mode.interactive) {
return;
}
// Check for diffs first // Check for diffs first
const checkResult = await runFormatter('npmextra', { const checkResult = (await runFormatter("smartconfig", {
checkOnly: true, checkOnly: true,
showDiff: true, showDiff: true,
}) as ICheckResult | void; })) as ICheckResult | void;
if (checkResult && checkResult.hasDiff) { if (checkResult && checkResult.hasDiff) {
const shouldApply = await plugins.smartinteract.SmartInteract.getCliConfirmation( const shouldApply =
'Apply formatting changes to npmextra.json?', await plugins.smartinteract.SmartInteract.getCliConfirmation(
true "Apply formatting changes to .smartconfig.json?",
); true,
);
if (shouldApply) { if (shouldApply) {
await runFormatter('npmextra', { silent: true }); await runFormatter("smartconfig", { silent: true });
} }
} }
} }
export const run = async (argvArg: any) => { export const run = async (argvArg: any) => {
const mode = await getCliMode(argvArg);
const command = argvArg._?.[1]; const command = argvArg._?.[1];
const value = argvArg._?.[2]; const value = argvArg._?.[2];
if (mode.help || command === "help") {
showHelp(mode);
return;
}
// If no command provided, show interactive menu // If no command provided, show interactive menu
if (!command) { if (!command) {
if (!mode.interactive) {
showHelp(mode);
return;
}
await handleInteractiveMenu(); await handleInteractiveMenu();
return; return;
} }
switch (command) { switch (command) {
case 'show': case "show":
await handleShow(); await handleShow(mode);
break; break;
case 'add': case "add":
await handleAdd(value); await handleAdd(value, mode);
break; break;
case 'remove': case "remove":
await handleRemove(value); await handleRemove(value, mode);
break; break;
case 'clear': case "clear":
await handleClear(); await handleClear(mode);
break; break;
case 'access': case "access":
case 'accessLevel': case "accessLevel":
await handleAccessLevel(value); await handleAccessLevel(value, mode);
break; break;
case 'commit': case "commit":
await handleCommit(argvArg._?.[2], argvArg._?.[3]); await handleCommit(argvArg._?.[2], argvArg._?.[3], mode);
break; break;
case 'services': case "services":
await handleServices(); await handleServices(mode);
break; break;
case 'help': case "migrate":
showHelp(); await handleMigrate(value, mode);
break;
case "get":
await handleGet(value, mode);
break;
case "set":
await handleSet(value, argvArg._?.[3], mode);
break;
case "unset":
await handleUnset(value, mode);
break; break;
default: default:
plugins.logger.log('error', `Unknown command: ${command}`); plugins.logger.log("error", `Unknown command: ${command}`);
showHelp(); showHelp(mode);
} }
}; };
@@ -75,55 +125,65 @@ export const run = async (argvArg: any) => {
* Interactive menu for config command * Interactive menu for config command
*/ */
async function handleInteractiveMenu(): Promise<void> { async function handleInteractiveMenu(): Promise<void> {
console.log(''); console.log("");
console.log('╭─────────────────────────────────────────────────────────────╮'); console.log(
console.log('│ gitzone config - Project Configuration │'); "╭─────────────────────────────────────────────────────────────╮",
console.log('╰─────────────────────────────────────────────────────────────╯'); );
console.log(''); console.log(
"│ gitzone config - Project Configuration │",
);
console.log(
"╰─────────────────────────────────────────────────────────────╯",
);
console.log("");
const interactInstance = new plugins.smartinteract.SmartInteract(); const interactInstance = new plugins.smartinteract.SmartInteract();
const response = await interactInstance.askQuestion({ const response = await interactInstance.askQuestion({
type: 'list', type: "list",
name: 'action', name: "action",
message: 'What would you like to do?', message: "What would you like to do?",
default: 'show', default: "show",
choices: [ choices: [
{ name: 'Show current configuration', value: 'show' }, { name: "Show current configuration", value: "show" },
{ name: 'Add a registry', value: 'add' }, { name: "Add an npm target registry", value: "add" },
{ name: 'Remove a registry', value: 'remove' }, { name: "Remove an npm target registry", value: "remove" },
{ name: 'Clear all registries', value: 'clear' }, { name: "Clear npm target registries", value: "clear" },
{ name: 'Set access level (public/private)', value: 'access' }, { name: "Set access level (public/private)", value: "access" },
{ name: 'Configure commit options', value: 'commit' }, { name: "Migrate smartconfig schema", value: "migrate" },
{ name: 'Configure services', value: 'services' }, { name: "Configure commit options", value: "commit" },
{ name: 'Show help', value: 'help' }, { name: "Configure services", value: "services" },
{ name: "Show help", value: "help" },
], ],
}); });
const action = (response as any).value; const action = (response as any).value;
switch (action) { switch (action) {
case 'show': case "show":
await handleShow(); await handleShow(defaultCliMode);
break; break;
case 'add': case "add":
await handleAdd(); await handleAdd(undefined, defaultCliMode);
break; break;
case 'remove': case "remove":
await handleRemove(); await handleRemove(undefined, defaultCliMode);
break; break;
case 'clear': case "clear":
await handleClear(); await handleClear(defaultCliMode);
break; break;
case 'access': case "access":
await handleAccessLevel(); await handleAccessLevel(undefined, defaultCliMode);
break; break;
case 'commit': case "migrate":
await handleCommit(); await handleMigrate(undefined, defaultCliMode);
break; break;
case 'services': case "commit":
await handleServices(); await handleCommit(undefined, undefined, defaultCliMode);
break; break;
case 'help': case "services":
await handleServices(defaultCliMode);
break;
case "help":
showHelp(); showHelp();
break; break;
} }
@@ -132,50 +192,69 @@ async function handleInteractiveMenu(): Promise<void> {
/** /**
* Show current registry configuration * Show current registry configuration
*/ */
async function handleShow(): Promise<void> { async function handleShow(mode: ICliMode): Promise<void> {
if (mode.json) {
const smartconfigData = await readSmartconfigFile();
printJson(getCliConfigValueFromData(smartconfigData, ""));
return;
}
const config = await ReleaseConfig.fromCwd(); const config = await ReleaseConfig.fromCwd();
const registries = config.getRegistries(); const registries = config.getRegistries();
const accessLevel = config.getAccessLevel(); const accessLevel = config.getAccessLevel();
console.log(''); console.log("");
console.log('╭─────────────────────────────────────────────────────────────╮'); console.log(
console.log('│ Release Configuration │'); "╭─────────────────────────────────────────────────────────────╮",
console.log('╰─────────────────────────────────────────────────────────────╯'); );
console.log(''); console.log(
"│ Release NPM Target Configuration │",
);
console.log(
"╰─────────────────────────────────────────────────────────────╯",
);
console.log("");
// Show access level // Show access level
plugins.logger.log('info', `Access Level: ${accessLevel}`); plugins.logger.log("info", `Access Level: ${accessLevel}`);
console.log(''); console.log("");
if (registries.length === 0) { if (registries.length === 0) {
plugins.logger.log('info', 'No release registries configured.'); plugins.logger.log("info", "No npm target registries configured.");
console.log(''); console.log("");
console.log(' Run `gitzone config add <registry-url>` to add one.'); console.log(" Run `gitzone config add <registry-url>` to add one.");
console.log(''); console.log("");
} else { } else {
plugins.logger.log('info', `Configured registries (${registries.length}):`); plugins.logger.log("info", `Configured npm target registries (${registries.length}):`);
console.log(''); console.log("");
registries.forEach((url, index) => { registries.forEach((url, index) => {
console.log(` ${index + 1}. ${url}`); console.log(` ${index + 1}. ${url}`);
}); });
console.log(''); console.log("");
} }
} }
/** /**
* Add a registry URL * Add an npm target registry URL
*/ */
async function handleAdd(url?: string): Promise<void> { async function handleAdd(
url: string | undefined,
mode: ICliMode,
): Promise<void> {
if (!url) { if (!url) {
if (!mode.interactive) {
throw new Error("Registry URL is required in non-interactive mode");
}
// Interactive mode // Interactive mode
const interactInstance = new plugins.smartinteract.SmartInteract(); const interactInstance = new plugins.smartinteract.SmartInteract();
const response = await interactInstance.askQuestion({ const response = await interactInstance.askQuestion({
type: 'input', type: "input",
name: 'registryUrl', name: "registryUrl",
message: 'Enter registry URL:', message: "Enter npm target registry URL:",
default: 'https://registry.npmjs.org', default: "https://registry.npmjs.org",
validate: (input: string) => { validate: (input: string) => {
return !!(input && input.trim() !== ''); return !!(input && input.trim() !== "");
}, },
}); });
url = (response as any).value; url = (response as any).value;
@@ -186,32 +265,48 @@ async function handleAdd(url?: string): Promise<void> {
if (added) { if (added) {
await config.save(); await config.save();
plugins.logger.log('success', `Added registry: ${url}`); if (mode.json) {
await formatNpmextraWithDiff(); printJson({
ok: true,
action: "add",
registry: url,
registries: config.getRegistries(),
});
return;
}
plugins.logger.log("success", `Added npm target registry: ${url}`);
await formatSmartconfigWithDiff(mode);
} else { } else {
plugins.logger.log('warn', `Registry already exists: ${url}`); plugins.logger.log("warn", `Registry already exists: ${url}`);
} }
} }
/** /**
* Remove a registry URL * Remove an npm target registry URL
*/ */
async function handleRemove(url?: string): Promise<void> { async function handleRemove(
url: string | undefined,
mode: ICliMode,
): Promise<void> {
const config = await ReleaseConfig.fromCwd(); const config = await ReleaseConfig.fromCwd();
const registries = config.getRegistries(); const registries = config.getRegistries();
if (registries.length === 0) { if (registries.length === 0) {
plugins.logger.log('warn', 'No registries configured to remove.'); plugins.logger.log("warn", "No npm target registries configured to remove.");
return; return;
} }
if (!url) { if (!url) {
if (!mode.interactive) {
throw new Error("Registry URL is required in non-interactive mode");
}
// Interactive mode - show list to select from // Interactive mode - show list to select from
const interactInstance = new plugins.smartinteract.SmartInteract(); const interactInstance = new plugins.smartinteract.SmartInteract();
const response = await interactInstance.askQuestion({ const response = await interactInstance.askQuestion({
type: 'list', type: "list",
name: 'registryUrl', name: "registryUrl",
message: 'Select registry to remove:', message: "Select npm target registry to remove:",
choices: registries, choices: registries,
default: registries[0], default: registries[0],
}); });
@@ -222,99 +317,135 @@ async function handleRemove(url?: string): Promise<void> {
if (removed) { if (removed) {
await config.save(); await config.save();
plugins.logger.log('success', `Removed registry: ${url}`); if (mode.json) {
await formatNpmextraWithDiff(); printJson({
ok: true,
action: "remove",
registry: url,
registries: config.getRegistries(),
});
return;
}
plugins.logger.log("success", `Removed npm target registry: ${url}`);
await formatSmartconfigWithDiff(mode);
} else { } else {
plugins.logger.log('warn', `Registry not found: ${url}`); plugins.logger.log("warn", `Registry not found: ${url}`);
} }
} }
/** /**
* Clear all registries * Clear all npm target registries
*/ */
async function handleClear(): Promise<void> { async function handleClear(mode: ICliMode): Promise<void> {
const config = await ReleaseConfig.fromCwd(); const config = await ReleaseConfig.fromCwd();
if (!config.hasRegistries()) { if (!config.hasRegistries()) {
plugins.logger.log('info', 'No registries to clear.'); plugins.logger.log("info", "No npm target registries to clear.");
return; return;
} }
// Confirm before clearing // Confirm before clearing
const confirmed = await plugins.smartinteract.SmartInteract.getCliConfirmation( const confirmed = mode.interactive
'Clear all configured registries?', ? await plugins.smartinteract.SmartInteract.getCliConfirmation(
false "Clear all configured npm target registries?",
); false,
)
: true;
if (confirmed) { if (confirmed) {
config.clearRegistries(); config.clearRegistries();
await config.save(); await config.save();
plugins.logger.log('success', 'All registries cleared.'); if (mode.json) {
await formatNpmextraWithDiff(); printJson({ ok: true, action: "clear", registries: [] });
return;
}
plugins.logger.log("success", "All npm target registries cleared.");
await formatSmartconfigWithDiff(mode);
} else { } else {
plugins.logger.log('info', 'Operation cancelled.'); plugins.logger.log("info", "Operation cancelled.");
} }
} }
/** /**
* Set or toggle access level * Set or toggle access level
*/ */
async function handleAccessLevel(level?: string): Promise<void> { async function handleAccessLevel(
level: string | undefined,
mode: ICliMode,
): Promise<void> {
const config = await ReleaseConfig.fromCwd(); const config = await ReleaseConfig.fromCwd();
const currentLevel = config.getAccessLevel(); const currentLevel = config.getAccessLevel();
if (!level) { if (!level) {
if (!mode.interactive) {
throw new Error("Access level is required in non-interactive mode");
}
// Interactive mode - toggle or ask // Interactive mode - toggle or ask
const interactInstance = new plugins.smartinteract.SmartInteract(); const interactInstance = new plugins.smartinteract.SmartInteract();
const response = await interactInstance.askQuestion({ const response = await interactInstance.askQuestion({
type: 'list', type: "list",
name: 'accessLevel', name: "accessLevel",
message: 'Select npm access level for publishing:', message: "Select npm access level for publishing:",
choices: ['public', 'private'], choices: ["public", "private"],
default: currentLevel, default: currentLevel,
}); });
level = (response as any).value; level = (response as any).value;
} }
// Validate the level // Validate the level
if (level !== 'public' && level !== 'private') { if (level !== "public" && level !== "private") {
plugins.logger.log('error', `Invalid access level: ${level}. Must be 'public' or 'private'.`); plugins.logger.log(
"error",
`Invalid access level: ${level}. Must be 'public' or 'private'.`,
);
return; return;
} }
if (level === currentLevel) { if (level === currentLevel) {
plugins.logger.log('info', `Access level is already set to: ${level}`); plugins.logger.log("info", `Access level is already set to: ${level}`);
return; return;
} }
config.setAccessLevel(level as 'public' | 'private'); config.setAccessLevel(level as "public" | "private");
await config.save(); await config.save();
plugins.logger.log('success', `Access level set to: ${level}`); if (mode.json) {
await formatNpmextraWithDiff(); printJson({ ok: true, action: "access", accessLevel: level });
return;
}
plugins.logger.log("success", `Access level set to: ${level}`);
await formatSmartconfigWithDiff(mode);
} }
/** /**
* Handle commit configuration * Handle commit configuration
*/ */
async function handleCommit(setting?: string, value?: string): Promise<void> { async function handleCommit(
setting: string | undefined,
value: string | undefined,
mode: ICliMode,
): Promise<void> {
const config = await CommitConfig.fromCwd(); const config = await CommitConfig.fromCwd();
// No setting = interactive mode // No setting = interactive mode
if (!setting) { if (!setting) {
if (!mode.interactive) {
throw new Error("Commit setting is required in non-interactive mode");
}
await handleCommitInteractive(config); await handleCommitInteractive(config);
return; return;
} }
// Direct setting // Direct setting
switch (setting) { switch (setting) {
case 'alwaysTest': case "alwaysTest":
await handleCommitSetting(config, 'alwaysTest', value); await handleCommitSetting(config, "alwaysTest", value, mode);
break; break;
case 'alwaysBuild': case "alwaysBuild":
await handleCommitSetting(config, 'alwaysBuild', value); await handleCommitSetting(config, "alwaysBuild", value, mode);
break; break;
default: default:
plugins.logger.log('error', `Unknown commit setting: ${setting}`); plugins.logger.log("error", `Unknown commit setting: ${setting}`);
showCommitHelp(); showCommitHelp();
} }
} }
@@ -323,109 +454,347 @@ async function handleCommit(setting?: string, value?: string): Promise<void> {
* Interactive commit configuration * Interactive commit configuration
*/ */
async function handleCommitInteractive(config: CommitConfig): Promise<void> { async function handleCommitInteractive(config: CommitConfig): Promise<void> {
console.log(''); console.log("");
console.log('╭─────────────────────────────────────────────────────────────╮'); console.log(
console.log('│ Commit Configuration │'); "╭─────────────────────────────────────────────────────────────╮",
console.log('╰─────────────────────────────────────────────────────────────╯'); );
console.log(''); console.log(
"│ Commit Configuration │",
);
console.log(
"╰─────────────────────────────────────────────────────────────╯",
);
console.log("");
const interactInstance = new plugins.smartinteract.SmartInteract(); const interactInstance = new plugins.smartinteract.SmartInteract();
const response = await interactInstance.askQuestion({ const response = await interactInstance.askQuestion({
type: 'checkbox', type: "checkbox",
name: 'commitOptions', name: "commitOptions",
message: 'Select commit options to enable:', message: "Select commit options to enable:",
choices: [ choices: [
{ name: 'Always run tests before commit (-t)', value: 'alwaysTest' }, { name: "Always run tests before commit (-t)", value: "alwaysTest" },
{ name: 'Always build after commit (-b)', value: 'alwaysBuild' }, { name: "Always build after commit (-b)", value: "alwaysBuild" },
], ],
default: [ default: [
...(config.getAlwaysTest() ? ['alwaysTest'] : []), ...(config.getAlwaysTest() ? ["alwaysTest"] : []),
...(config.getAlwaysBuild() ? ['alwaysBuild'] : []), ...(config.getAlwaysBuild() ? ["alwaysBuild"] : []),
], ],
}); });
const selected = (response as any).value || []; const selected = (response as any).value || [];
config.setAlwaysTest(selected.includes('alwaysTest')); config.setAlwaysTest(selected.includes("alwaysTest"));
config.setAlwaysBuild(selected.includes('alwaysBuild')); config.setAlwaysBuild(selected.includes("alwaysBuild"));
syncCommitStepsFromBooleans(config);
await config.save(); await config.save();
plugins.logger.log('success', 'Commit configuration updated'); plugins.logger.log("success", "Commit configuration updated");
await formatNpmextraWithDiff(); await formatSmartconfigWithDiff(defaultCliMode);
} }
/** /**
* Set a specific commit setting * Set a specific commit setting
*/ */
async function handleCommitSetting(config: CommitConfig, setting: string, value?: string): Promise<void> { async function handleCommitSetting(
config: CommitConfig,
setting: string,
value: string | undefined,
mode: ICliMode,
): Promise<void> {
// Parse boolean value // Parse boolean value
const boolValue = value === 'true' || value === '1' || value === 'on'; const boolValue = value === "true" || value === "1" || value === "on";
if (setting === 'alwaysTest') { if (setting === "alwaysTest") {
config.setAlwaysTest(boolValue); config.setAlwaysTest(boolValue);
} else if (setting === 'alwaysBuild') { } else if (setting === "alwaysBuild") {
config.setAlwaysBuild(boolValue); config.setAlwaysBuild(boolValue);
} }
syncCommitStepsFromBooleans(config);
await config.save(); await config.save();
plugins.logger.log('success', `Set ${setting} to ${boolValue}`); if (mode.json) {
await formatNpmextraWithDiff(); printJson({ ok: true, action: "commit", setting, value: boolValue });
return;
}
plugins.logger.log("success", `Set ${setting} to ${boolValue}`);
await formatSmartconfigWithDiff(mode);
}
function syncCommitStepsFromBooleans(config: CommitConfig): void {
config.setSteps([
"analyze",
...(config.getAlwaysTest() ? ["test"] : []),
...(config.getAlwaysBuild() ? ["build"] : []),
"changelog",
"commit",
]);
} }
/** /**
* Show help for commit subcommand * Show help for commit subcommand
*/ */
function showCommitHelp(): void { function showCommitHelp(): void {
console.log(''); console.log("");
console.log('Usage: gitzone config commit [setting] [value]'); console.log("Usage: gitzone config commit [setting] [value]");
console.log(''); console.log("");
console.log('Settings:'); console.log("Settings:");
console.log(' alwaysTest [true|false] Always run tests before commit'); console.log(" alwaysTest [true|false] Always run tests before commit");
console.log(' alwaysBuild [true|false] Always build after commit'); console.log(" alwaysBuild [true|false] Always build after commit");
console.log(''); console.log("");
console.log('Examples:'); console.log("Examples:");
console.log(' gitzone config commit # Interactive mode'); console.log(" gitzone config commit # Interactive mode");
console.log(' gitzone config commit alwaysTest true'); console.log(" gitzone config commit alwaysTest true");
console.log(' gitzone config commit alwaysBuild false'); console.log(" gitzone config commit alwaysBuild false");
console.log(''); console.log("");
} }
/** /**
* Handle services configuration * Handle services configuration
*/ */
async function handleServices(): Promise<void> { async function handleServices(mode: ICliMode): Promise<void> {
if (!mode.interactive) {
throw new Error(
"Use `gitzone services config --json` or `gitzone services set ...` in non-interactive mode",
);
}
// Import and use ServiceManager's configureServices // Import and use ServiceManager's configureServices
const { ServiceManager } = await import('../mod_services/classes.servicemanager.js'); const { ServiceManager } =
await import("../mod_services/classes.servicemanager.js");
const serviceManager = new ServiceManager(); const serviceManager = new ServiceManager();
await serviceManager.init(); await serviceManager.init();
await serviceManager.configureServices(); await serviceManager.configureServices();
} }
async function handleGet(
configPath: string | undefined,
mode: ICliMode,
): Promise<void> {
if (!configPath) {
throw new Error("Configuration path is required");
}
const smartconfigData = await readSmartconfigFile();
const value = getCliConfigValueFromData(smartconfigData, configPath);
if (mode.json) {
printJson({ path: configPath, value, exists: value !== undefined });
return;
}
if (value === undefined) {
plugins.logger.log("warn", `No value set for ${configPath}`);
return;
}
if (typeof value === "string") {
console.log(value);
return;
}
printJson(value);
}
async function handleSet(
configPath: string | undefined,
rawValue: string | undefined,
mode: ICliMode,
): Promise<void> {
if (!configPath) {
throw new Error("Configuration path is required");
}
if (rawValue === undefined) {
throw new Error("Configuration value is required");
}
const smartconfigData = await readSmartconfigFile();
const parsedValue = parseConfigValue(rawValue);
setCliConfigValueInData(smartconfigData, configPath, parsedValue);
await writeSmartconfigFile(smartconfigData);
if (mode.json) {
printJson({
ok: true,
action: "set",
path: configPath,
value: parsedValue,
});
return;
}
plugins.logger.log("success", `Set ${configPath}`);
}
async function handleUnset(
configPath: string | undefined,
mode: ICliMode,
): Promise<void> {
if (!configPath) {
throw new Error("Configuration path is required");
}
const smartconfigData = await readSmartconfigFile();
const removed = unsetCliConfigValueInData(smartconfigData, configPath);
if (!removed) {
if (mode.json) {
printJson({
ok: false,
action: "unset",
path: configPath,
removed: false,
});
return;
}
plugins.logger.log("warn", `No value set for ${configPath}`);
return;
}
await writeSmartconfigFile(smartconfigData);
if (mode.json) {
printJson({ ok: true, action: "unset", path: configPath, removed: true });
return;
}
plugins.logger.log("success", `Unset ${configPath}`);
}
async function handleMigrate(
rawTargetVersion: string | undefined,
mode: ICliMode,
): Promise<void> {
const targetVersion = rawTargetVersion
? Number(rawTargetVersion)
: CURRENT_GITZONE_CLI_SCHEMA_VERSION;
if (!Number.isInteger(targetVersion) || targetVersion < 1) {
throw new Error("Migration target version must be a positive integer");
}
const smartconfigData = await readSmartconfigFile();
const result = migrateSmartconfigData(smartconfigData, targetVersion);
if (result.migrated) {
await writeSmartconfigFile(smartconfigData);
}
if (mode.json) {
printJson({ ok: true, action: "migrate", ...result });
return;
}
if (result.migrated) {
plugins.logger.log(
"success",
`Migrated .smartconfig.json from schema v${result.fromVersion} to v${result.toVersion}`,
);
} else {
plugins.logger.log("info", `.smartconfig.json already at schema v${result.toVersion}`);
}
}
function parseConfigValue(rawValue: string): any {
const trimmedValue = rawValue.trim();
if (trimmedValue === "true") {
return true;
}
if (trimmedValue === "false") {
return false;
}
if (trimmedValue === "null") {
return null;
}
if (/^-?\d+(\.\d+)?$/.test(trimmedValue)) {
return Number(trimmedValue);
}
if (
(trimmedValue.startsWith("{") && trimmedValue.endsWith("}")) ||
(trimmedValue.startsWith("[") && trimmedValue.endsWith("]")) ||
(trimmedValue.startsWith('"') && trimmedValue.endsWith('"'))
) {
return JSON.parse(trimmedValue);
}
return rawValue;
}
/** /**
* Show help for config command * Show help for config command
*/ */
function showHelp(): void { export function showHelp(mode?: ICliMode): void {
console.log(''); if (mode?.json) {
console.log('Usage: gitzone config <command> [options]'); printJson({
console.log(''); command: "config",
console.log('Commands:'); usage: "gitzone config <command> [options]",
console.log(' show Display current release configuration'); commands: [
console.log(' add [url] Add a registry URL'); {
console.log(' remove [url] Remove a registry URL'); name: "show",
console.log(' clear Clear all registries'); description: "Display current @git.zone/cli configuration",
console.log(' access [public|private] Set npm access level for publishing'); },
console.log(' commit [setting] [value] Configure commit options'); { name: "get <path>", description: "Read a single config value" },
console.log(' services Configure which services are enabled'); { name: "set <path> <value>", description: "Write a config value" },
console.log(''); { name: "unset <path>", description: "Delete a config value" },
console.log('Examples:'); { name: "add [url]", description: "Add an npm release target registry" },
console.log(' gitzone config show'); { name: "remove [url]", description: "Remove an npm release target registry" },
console.log(' gitzone config add https://registry.npmjs.org'); { name: "clear", description: "Clear npm release target registries" },
console.log(' gitzone config add https://verdaccio.example.com'); {
console.log(' gitzone config remove https://registry.npmjs.org'); name: "access [public|private]",
console.log(' gitzone config clear'); description: "Set npm target publish access level",
console.log(' gitzone config access public'); },
console.log(' gitzone config access private'); {
console.log(' gitzone config commit # Interactive'); name: "commit <setting> <value>",
console.log(' gitzone config commit alwaysTest true'); description: "Set commit defaults",
console.log(' gitzone config services # Interactive'); },
console.log(''); {
name: "migrate [version]",
description: "Run version-targeted .smartconfig.json migrations",
},
],
examples: [
"gitzone config show --json",
"gitzone config get release.targets.npm.accessLevel",
"gitzone config set cli.interactive false",
"gitzone config set cli.output json",
],
});
return;
}
console.log("");
console.log("Usage: gitzone config <command> [options]");
console.log("");
console.log("Commands:");
console.log(
" show Display current @git.zone/cli configuration",
);
console.log(" get <path> Read a single config value");
console.log(" set <path> <value> Write a config value");
console.log(" unset <path> Delete a config value");
console.log(" add [url] Add an npm target registry URL");
console.log(" remove [url] Remove an npm target registry URL");
console.log(" clear Clear npm target registries");
console.log(
" access [public|private] Set npm target access level for publishing",
);
console.log(" commit [setting] [value] Configure commit options");
console.log(" migrate [version] Run version-targeted smartconfig migrations");
console.log(
" services Configure which services are enabled",
);
console.log("");
console.log("Examples:");
console.log(" gitzone config show");
console.log(" gitzone config show --json");
console.log(" gitzone config get release.targets.npm.accessLevel");
console.log(" gitzone config set cli.interactive false");
console.log(" gitzone config set cli.output json");
console.log(" gitzone config unset cli.output");
console.log(" gitzone config add https://registry.npmjs.org");
console.log(" gitzone config add https://verdaccio.example.com");
console.log(" gitzone config remove https://registry.npmjs.org");
console.log(" gitzone config clear");
console.log(" gitzone config access public");
console.log(" gitzone config access private");
console.log(" gitzone config migrate 2");
console.log(" gitzone config commit # Interactive");
console.log(" gitzone config commit alwaysTest true");
console.log(" gitzone config services # Interactive");
console.log("");
} }
+19 -26
View File
@@ -2,11 +2,12 @@ import * as plugins from './mod.plugins.js';
import { FormatContext } from './classes.formatcontext.js'; import { FormatContext } from './classes.formatcontext.js';
import type { IPlannedChange, ICheckResult } from './interfaces.format.js'; import type { IPlannedChange, ICheckResult } from './interfaces.format.js';
import { Project } from '../classes.project.js'; import { Project } from '../classes.project.js';
import { FormatStats } from './classes.formatstats.js';
export abstract class BaseFormatter { export abstract class BaseFormatter {
protected context: FormatContext; protected context: FormatContext;
protected project: Project; protected project: Project;
protected stats: any; // Will be FormatStats from context protected stats: FormatStats;
constructor(context: FormatContext, project: Project) { constructor(context: FormatContext, project: Project) {
this.context = context; this.context = context;
@@ -36,9 +37,6 @@ export abstract class BaseFormatter {
} }
await this.postExecute(); await this.postExecute();
} catch (error) {
// Don't rollback here - let the FormatPlanner handle it
throw error;
} finally { } finally {
this.stats.endModule(this.name, startTime); this.stats.endModule(this.name, startTime);
} }
@@ -53,13 +51,10 @@ export abstract class BaseFormatter {
} }
protected async modifyFile(filepath: string, content: string): Promise<void> { protected async modifyFile(filepath: string, content: string): Promise<void> {
// Validate filepath before writing
if (!filepath || filepath.trim() === '') { if (!filepath || filepath.trim() === '') {
throw new Error(`Invalid empty filepath in modifyFile`); throw new Error(`Invalid empty filepath in modifyFile`);
} }
// Ensure we have a proper path with directory component
// If the path has no directory component (e.g., "package.json"), prepend "./"
let normalizedPath = filepath; let normalizedPath = filepath;
if (!plugins.path.parse(filepath).dir) { if (!plugins.path.parse(filepath).dir) {
normalizedPath = './' + filepath; normalizedPath = './' + filepath;
@@ -69,44 +64,46 @@ export abstract class BaseFormatter {
} }
protected async createFile(filepath: string, content: string): Promise<void> { protected async createFile(filepath: string, content: string): Promise<void> {
await plugins.smartfs.file(filepath).encoding('utf8').write(content); let normalizedPath = filepath;
if (!plugins.path.parse(filepath).dir) {
normalizedPath = './' + filepath;
}
// Ensure parent directory exists
const dir = plugins.path.dirname(normalizedPath);
if (dir && dir !== '.') {
await plugins.smartfs.directory(dir).recursive().create();
}
await plugins.smartfs.file(normalizedPath).encoding('utf8').write(content);
} }
protected async deleteFile(filepath: string): Promise<void> { protected async deleteFile(filepath: string): Promise<void> {
await plugins.smartfs.file(filepath).delete(); await plugins.smartfs.file(filepath).delete();
} }
protected async shouldProcessFile(filepath: string): Promise<boolean> {
return true;
}
/** /**
* Check for diffs without applying changes * Check for diffs without applying changes
* Returns information about what would change
*/ */
async check(): Promise<ICheckResult> { async check(): Promise<ICheckResult> {
const changes = await this.analyze(); const changes = await this.analyze();
const diffs: ICheckResult['diffs'] = []; const diffs: ICheckResult['diffs'] = [];
for (const change of changes) { for (const change of changes) {
// Skip generic changes that don't have actual content
if (change.path === '<various files>') { if (change.path === '<various files>') {
continue; continue;
} }
if (change.type === 'modify' || change.type === 'create') { if (change.type === 'modify' || change.type === 'create') {
// Read current content if file exists
let currentContent: string | undefined; let currentContent: string | undefined;
try { try {
currentContent = await plugins.smartfs.file(change.path).encoding('utf8').read() as string; currentContent = await plugins.smartfs.file(change.path).encoding('utf8').read() as string;
} catch { } catch {
// File doesn't exist yet
currentContent = undefined; currentContent = undefined;
} }
const newContent = change.content; const newContent = change.content;
// Check if there's an actual diff
if (currentContent !== newContent && newContent !== undefined) { if (currentContent !== newContent && newContent !== undefined) {
diffs.push({ diffs.push({
path: change.path, path: change.path,
@@ -116,7 +113,6 @@ export abstract class BaseFormatter {
}); });
} }
} else if (change.type === 'delete') { } else if (change.type === 'delete') {
// Check if file exists before marking for deletion
try { try {
const currentContent = await plugins.smartfs.file(change.path).encoding('utf8').read() as string; const currentContent = await plugins.smartfs.file(change.path).encoding('utf8').read() as string;
diffs.push({ diffs.push({
@@ -137,16 +133,16 @@ export abstract class BaseFormatter {
}; };
} }
/**
* Display a single diff using smartdiff
*/
displayDiff(diff: ICheckResult['diffs'][0]): void { displayDiff(diff: ICheckResult['diffs'][0]): void {
console.log(`\n--- ${diff.path}`); console.log(`\n--- ${diff.path}`);
if (diff.before && diff.after) { if (diff.before && diff.after) {
console.log(plugins.smartdiff.formatLineDiffForConsole(diff.before, diff.after)); console.log(plugins.smartdiff.formatUnifiedDiffForConsole(diff.before, diff.after, {
originalFileName: diff.path,
revisedFileName: diff.path,
context: 3,
}));
} else if (diff.after && !diff.before) { } else if (diff.after && !diff.before) {
console.log(' (new file)'); console.log(' (new file)');
// Show first few lines of new content
const lines = diff.after.split('\n').slice(0, 10); const lines = diff.after.split('\n').slice(0, 10);
lines.forEach(line => console.log(` + ${line}`)); lines.forEach(line => console.log(` + ${line}`));
if (diff.after.split('\n').length > 10) { if (diff.after.split('\n').length > 10) {
@@ -157,9 +153,6 @@ export abstract class BaseFormatter {
} }
} }
/**
* Display all diffs from a check result
*/
displayAllDiffs(result: ICheckResult): void { displayAllDiffs(result: ICheckResult): void {
if (!result.hasDiff) { if (!result.hasDiff) {
console.log(' No changes detected'); console.log(' No changes detected');
-235
View File
@@ -1,235 +0,0 @@
import * as plugins from './mod.plugins.js';
import * as paths from '../paths.js';
export interface IFileCache {
path: string;
checksum: string;
modified: number;
size: number;
}
export interface ICacheManifest {
version: string;
lastFormat: number;
files: IFileCache[];
}
export class ChangeCache {
private cacheDir: string;
private manifestPath: string;
private cacheVersion = '1.0.0';
constructor() {
this.cacheDir = plugins.path.join(paths.cwd, '.nogit', 'gitzone-cache');
this.manifestPath = plugins.path.join(this.cacheDir, 'manifest.json');
}
async initialize(): Promise<void> {
await plugins.smartfs.directory(this.cacheDir).recursive().create();
}
async getManifest(): Promise<ICacheManifest> {
const defaultManifest: ICacheManifest = {
version: this.cacheVersion,
lastFormat: 0,
files: [],
};
const exists = await plugins.smartfs.file(this.manifestPath).exists();
if (!exists) {
return defaultManifest;
}
try {
const content = (await plugins.smartfs
.file(this.manifestPath)
.encoding('utf8')
.read()) as string;
const manifest = JSON.parse(content);
// Validate the manifest structure
if (this.isValidManifest(manifest)) {
return manifest;
} else {
console.warn('Invalid manifest structure, returning default manifest');
return defaultManifest;
}
} catch (error) {
console.warn(
`Failed to read cache manifest: ${error.message}, returning default manifest`,
);
// Try to delete the corrupted file
try {
await plugins.smartfs.file(this.manifestPath).delete();
} catch (removeError) {
// Ignore removal errors
}
return defaultManifest;
}
}
async saveManifest(manifest: ICacheManifest): Promise<void> {
// Validate before saving
if (!this.isValidManifest(manifest)) {
throw new Error('Invalid manifest structure, cannot save');
}
// Ensure directory exists
await plugins.smartfs.directory(this.cacheDir).recursive().create();
// Write directly with proper JSON stringification
const jsonContent = JSON.stringify(manifest, null, 2);
await plugins.smartfs
.file(this.manifestPath)
.encoding('utf8')
.write(jsonContent);
}
async hasFileChanged(filePath: string): Promise<boolean> {
const absolutePath = plugins.path.isAbsolute(filePath)
? filePath
: plugins.path.join(paths.cwd, filePath);
// Check if file exists
const exists = await plugins.smartfs.file(absolutePath).exists();
if (!exists) {
return true; // File doesn't exist, so it's "changed" (will be created)
}
// Get current file stats
const stats = await plugins.smartfs.file(absolutePath).stat();
// Skip directories
if (stats.isDirectory) {
return false; // Directories are not processed
}
const content = (await plugins.smartfs
.file(absolutePath)
.encoding('utf8')
.read()) as string;
const currentChecksum = this.calculateChecksum(content);
// Get cached info
const manifest = await this.getManifest();
const cachedFile = manifest.files.find((f) => f.path === filePath);
if (!cachedFile) {
return true; // Not in cache, so it's changed
}
// Compare checksums
return (
cachedFile.checksum !== currentChecksum ||
cachedFile.size !== stats.size ||
cachedFile.modified !== stats.mtime.getTime()
);
}
async updateFileCache(filePath: string): Promise<void> {
const absolutePath = plugins.path.isAbsolute(filePath)
? filePath
: plugins.path.join(paths.cwd, filePath);
// Get current file stats
const stats = await plugins.smartfs.file(absolutePath).stat();
// Skip directories
if (stats.isDirectory) {
return; // Don't cache directories
}
const content = (await plugins.smartfs
.file(absolutePath)
.encoding('utf8')
.read()) as string;
const checksum = this.calculateChecksum(content);
// Update manifest
const manifest = await this.getManifest();
const existingIndex = manifest.files.findIndex((f) => f.path === filePath);
const cacheEntry: IFileCache = {
path: filePath,
checksum,
modified: stats.mtime.getTime(),
size: stats.size,
};
if (existingIndex !== -1) {
manifest.files[existingIndex] = cacheEntry;
} else {
manifest.files.push(cacheEntry);
}
manifest.lastFormat = Date.now();
await this.saveManifest(manifest);
}
async getChangedFiles(filePaths: string[]): Promise<string[]> {
const changedFiles: string[] = [];
for (const filePath of filePaths) {
if (await this.hasFileChanged(filePath)) {
changedFiles.push(filePath);
}
}
return changedFiles;
}
async clean(): Promise<void> {
const manifest = await this.getManifest();
const validFiles: IFileCache[] = [];
// Remove entries for files that no longer exist
for (const file of manifest.files) {
const absolutePath = plugins.path.isAbsolute(file.path)
? file.path
: plugins.path.join(paths.cwd, file.path);
if (await plugins.smartfs.file(absolutePath).exists()) {
validFiles.push(file);
}
}
manifest.files = validFiles;
await this.saveManifest(manifest);
}
private calculateChecksum(content: string | Buffer): string {
return plugins.crypto.createHash('sha256').update(content).digest('hex');
}
private isValidManifest(manifest: any): manifest is ICacheManifest {
// Check if manifest has the required structure
if (!manifest || typeof manifest !== 'object') {
return false;
}
// Check required fields
if (
typeof manifest.version !== 'string' ||
typeof manifest.lastFormat !== 'number' ||
!Array.isArray(manifest.files)
) {
return false;
}
// Check each file entry
for (const file of manifest.files) {
if (
!file ||
typeof file !== 'object' ||
typeof file.path !== 'string' ||
typeof file.checksum !== 'string' ||
typeof file.modified !== 'number' ||
typeof file.size !== 'number'
) {
return false;
}
}
return true;
}
}
@@ -1,117 +0,0 @@
import * as plugins from './mod.plugins.js';
import { BaseFormatter } from './classes.baseformatter.js';
export interface IModuleDependency {
module: string;
dependencies: Set<string>;
dependents: Set<string>;
}
export class DependencyAnalyzer {
private moduleDependencies: Map<string, IModuleDependency> = new Map();
constructor() {
this.initializeDependencies();
}
private initializeDependencies(): void {
// Define dependencies between format modules
const dependencies = {
cleanup: [], // No dependencies
npmextra: [], // No dependencies
license: ['npmextra'], // Depends on npmextra for config
packagejson: ['npmextra'], // Depends on npmextra for config
templates: ['npmextra', 'packagejson'], // Depends on both
gitignore: ['templates'], // Depends on templates
tsconfig: ['packagejson'], // Depends on package.json
prettier: [
'cleanup',
'npmextra',
'packagejson',
'templates',
'gitignore',
'tsconfig',
], // Runs after most others
readme: ['npmextra', 'packagejson'], // Depends on project metadata
copy: ['npmextra'], // Depends on config
};
// Initialize all modules
for (const [module, deps] of Object.entries(dependencies)) {
this.moduleDependencies.set(module, {
module,
dependencies: new Set(deps),
dependents: new Set(),
});
}
// Build reverse dependencies (dependents)
for (const [module, deps] of Object.entries(dependencies)) {
for (const dep of deps) {
const depModule = this.moduleDependencies.get(dep);
if (depModule) {
depModule.dependents.add(module);
}
}
}
}
getExecutionGroups(modules: BaseFormatter[]): BaseFormatter[][] {
const modulesMap = new Map(modules.map((m) => [m.name, m]));
const executed = new Set<string>();
const groups: BaseFormatter[][] = [];
while (executed.size < modules.length) {
const currentGroup: BaseFormatter[] = [];
for (const module of modules) {
if (executed.has(module.name)) continue;
const dependency = this.moduleDependencies.get(module.name);
if (!dependency) {
// Unknown module, execute in isolation
currentGroup.push(module);
continue;
}
// Check if all dependencies have been executed
const allDepsExecuted = Array.from(dependency.dependencies).every(
(dep) => executed.has(dep) || !modulesMap.has(dep),
);
if (allDepsExecuted) {
currentGroup.push(module);
}
}
if (currentGroup.length === 0) {
// Circular dependency or error - execute remaining modules
for (const module of modules) {
if (!executed.has(module.name)) {
currentGroup.push(module);
}
}
}
currentGroup.forEach((m) => executed.add(m.name));
groups.push(currentGroup);
}
return groups;
}
canRunInParallel(module1: string, module2: string): boolean {
const dep1 = this.moduleDependencies.get(module1);
const dep2 = this.moduleDependencies.get(module2);
if (!dep1 || !dep2) return false;
// Check if module1 depends on module2 or vice versa
return (
!dep1.dependencies.has(module2) &&
!dep2.dependencies.has(module1) &&
!dep1.dependents.has(module2) &&
!dep2.dependents.has(module1)
);
}
}
+20 -3
View File
@@ -1,14 +1,31 @@
import * as plugins from './mod.plugins.js'; import * as plugins from "./mod.plugins.js";
import { FormatStats } from './classes.formatstats.js'; import { FormatStats } from "./classes.formatstats.js";
interface IFormatContextOptions {
interactive?: boolean;
jsonOutput?: boolean;
}
export class FormatContext { export class FormatContext {
private formatStats: FormatStats; private formatStats: FormatStats;
private interactive: boolean;
private jsonOutput: boolean;
constructor() { constructor(options: IFormatContextOptions = {}) {
this.formatStats = new FormatStats(); this.formatStats = new FormatStats();
this.interactive = options.interactive ?? true;
this.jsonOutput = options.jsonOutput ?? false;
} }
getFormatStats(): FormatStats { getFormatStats(): FormatStats {
return this.formatStats; return this.formatStats;
} }
isInteractive(): boolean {
return this.interactive;
}
isJsonOutput(): boolean {
return this.jsonOutput;
}
} }
+10 -39
View File
@@ -2,13 +2,12 @@ import * as plugins from './mod.plugins.js';
import { FormatContext } from './classes.formatcontext.js'; import { FormatContext } from './classes.formatcontext.js';
import { BaseFormatter } from './classes.baseformatter.js'; import { BaseFormatter } from './classes.baseformatter.js';
import type { IFormatPlan, IPlannedChange } from './interfaces.format.js'; import type { IFormatPlan, IPlannedChange } from './interfaces.format.js';
import { getModuleIcon } from './interfaces.format.js';
import { logger } from '../gitzone.logging.js'; import { logger } from '../gitzone.logging.js';
import { DependencyAnalyzer } from './classes.dependency-analyzer.js';
import { DiffReporter } from './classes.diffreporter.js'; import { DiffReporter } from './classes.diffreporter.js';
export class FormatPlanner { export class FormatPlanner {
private plannedChanges: Map<string, IPlannedChange[]> = new Map(); private plannedChanges: Map<string, IPlannedChange[]> = new Map();
private dependencyAnalyzer = new DependencyAnalyzer();
private diffReporter = new DiffReporter(); private diffReporter = new DiffReporter();
async planFormat(modules: BaseFormatter[]): Promise<IFormatPlan> { async planFormat(modules: BaseFormatter[]): Promise<IFormatPlan> {
@@ -18,7 +17,6 @@ export class FormatPlanner {
filesAdded: 0, filesAdded: 0,
filesModified: 0, filesModified: 0,
filesRemoved: 0, filesRemoved: 0,
estimatedTime: 0,
}, },
changes: [], changes: [],
warnings: [], warnings: [],
@@ -32,7 +30,6 @@ export class FormatPlanner {
for (const change of changes) { for (const change of changes) {
plan.changes.push(change); plan.changes.push(change);
// Update summary
switch (change.type) { switch (change.type) {
case 'create': case 'create':
plan.summary.filesAdded++; plan.summary.filesAdded++;
@@ -58,7 +55,6 @@ export class FormatPlanner {
plan.summary.filesAdded + plan.summary.filesAdded +
plan.summary.filesModified + plan.summary.filesModified +
plan.summary.filesRemoved; plan.summary.filesRemoved;
plan.summary.estimatedTime = plan.summary.totalFiles * 100; // 100ms per file estimate
return plan; return plan;
} }
@@ -67,27 +63,20 @@ export class FormatPlanner {
plan: IFormatPlan, plan: IFormatPlan,
modules: BaseFormatter[], modules: BaseFormatter[],
context: FormatContext, context: FormatContext,
parallel: boolean = false,
): Promise<void> { ): Promise<void> {
const startTime = Date.now(); const startTime = Date.now();
try { for (const module of modules) {
// Always use sequential execution to avoid race conditions const changes = this.plannedChanges.get(module.name) || [];
for (const module of modules) {
const changes = this.plannedChanges.get(module.name) || [];
if (changes.length > 0) { if (changes.length > 0) {
logger.log('info', `Executing ${module.name} formatter...`); logger.log('info', `Executing ${module.name} formatter...`);
await module.execute(changes); await module.execute(changes);
}
} }
const endTime = Date.now();
const duration = endTime - startTime;
logger.log('info', `Format operations completed in ${duration}ms`);
} catch (error) {
throw error;
} }
const duration = Date.now() - startTime;
logger.log('info', `Format operations completed in ${duration}ms`);
} }
async displayPlan( async displayPlan(
@@ -103,7 +92,6 @@ export class FormatPlanner {
console.log(''); console.log('');
console.log('Changes by module:'); console.log('Changes by module:');
// Group changes by module
const changesByModule = new Map<string, IPlannedChange[]>(); const changesByModule = new Map<string, IPlannedChange[]>();
for (const change of plan.changes) { for (const change of plan.changes) {
const moduleChanges = changesByModule.get(change.module) || []; const moduleChanges = changesByModule.get(change.module) || [];
@@ -113,14 +101,13 @@ export class FormatPlanner {
for (const [module, changes] of changesByModule) { for (const [module, changes] of changesByModule) {
console.log( console.log(
`\n${this.getModuleIcon(module)} ${module} (${changes.length} ${changes.length === 1 ? 'file' : 'files'})`, `\n${getModuleIcon(module)} ${module} (${changes.length} ${changes.length === 1 ? 'file' : 'files'})`,
); );
for (const change of changes) { for (const change of changes) {
const icon = this.getChangeIcon(change.type); const icon = this.getChangeIcon(change.type);
console.log(` ${icon} ${change.path} - ${change.description}`); console.log(` ${icon} ${change.path} - ${change.description}`);
// Show diff for modified files if detailed view is requested
if (detailed && change.type === 'modify') { if (detailed && change.type === 'modify') {
const diff = await this.diffReporter.generateDiffForChange(change); const diff = await this.diffReporter.generateDiffForChange(change);
if (diff) { if (diff) {
@@ -141,22 +128,6 @@ export class FormatPlanner {
console.log('\n' + '━'.repeat(50)); console.log('\n' + '━'.repeat(50));
} }
private getModuleIcon(module: string): string {
const icons: Record<string, string> = {
packagejson: '📦',
license: '📝',
tsconfig: '🔧',
cleanup: '🚮',
gitignore: '🔒',
prettier: '✨',
readme: '📖',
templates: '📄',
npmextra: '⚙️',
copy: '📋',
};
return icons[module] || '📁';
}
private getChangeIcon(type: 'create' | 'modify' | 'delete'): string { private getChangeIcon(type: 'create' | 'modify' | 'delete'): string {
switch (type) { switch (type) {
case 'create': case 'create':
+2 -42
View File
@@ -1,5 +1,6 @@
import * as plugins from './mod.plugins.js'; import * as plugins from './mod.plugins.js';
import { logger } from '../gitzone.logging.js'; import { logger } from '../gitzone.logging.js';
import { getModuleIcon } from './interfaces.format.js';
export interface IModuleStats { export interface IModuleStats {
name: string; name: string;
@@ -23,8 +24,6 @@ export interface IFormatStats {
totalModified: number; totalModified: number;
totalDeleted: number; totalDeleted: number;
totalErrors: number; totalErrors: number;
cacheHits: number;
cacheMisses: number;
}; };
} }
@@ -43,8 +42,6 @@ export class FormatStats {
totalModified: 0, totalModified: 0,
totalDeleted: 0, totalDeleted: 0,
totalErrors: 0, totalErrors: 0,
cacheHits: 0,
cacheMisses: 0,
}, },
}; };
} }
@@ -107,14 +104,6 @@ export class FormatStats {
} }
} }
recordCacheHit(): void {
this.stats.overallStats.cacheHits++;
}
recordCacheMiss(): void {
this.stats.overallStats.cacheMisses++;
}
finish(): void { finish(): void {
this.stats.endTime = Date.now(); this.stats.endTime = Date.now();
this.stats.totalExecutionTime = this.stats.endTime - this.stats.startTime; this.stats.totalExecutionTime = this.stats.endTime - this.stats.startTime;
@@ -135,20 +124,6 @@ export class FormatStats {
console.log(` • Deleted: ${this.stats.overallStats.totalDeleted}`); console.log(` • Deleted: ${this.stats.overallStats.totalDeleted}`);
console.log(` Errors: ${this.stats.overallStats.totalErrors}`); console.log(` Errors: ${this.stats.overallStats.totalErrors}`);
if (
this.stats.overallStats.cacheHits > 0 ||
this.stats.overallStats.cacheMisses > 0
) {
const cacheHitRate =
(this.stats.overallStats.cacheHits /
(this.stats.overallStats.cacheHits +
this.stats.overallStats.cacheMisses)) *
100;
console.log(` Cache Hit Rate: ${cacheHitRate.toFixed(1)}%`);
console.log(` • Hits: ${this.stats.overallStats.cacheHits}`);
console.log(` • Misses: ${this.stats.overallStats.cacheMisses}`);
}
// Module stats // Module stats
console.log('\nModule Breakdown:'); console.log('\nModule Breakdown:');
console.log('─'.repeat(50)); console.log('─'.repeat(50));
@@ -159,7 +134,7 @@ export class FormatStats {
for (const moduleStats of sortedModules) { for (const moduleStats of sortedModules) {
console.log( console.log(
`\n${this.getModuleIcon(moduleStats.name)} ${moduleStats.name}:`, `\n${getModuleIcon(moduleStats.name)} ${moduleStats.name}:`,
); );
console.log( console.log(
` Execution Time: ${this.formatDuration(moduleStats.executionTime)}`, ` Execution Time: ${this.formatDuration(moduleStats.executionTime)}`,
@@ -211,19 +186,4 @@ export class FormatStats {
} }
} }
private getModuleIcon(module: string): string {
const icons: Record<string, string> = {
packagejson: '📦',
license: '📝',
tsconfig: '🔧',
cleanup: '🚮',
gitignore: '🔒',
prettier: '✨',
readme: '📖',
templates: '📄',
npmextra: '⚙️',
copy: '📋',
};
return icons[module] || '📁';
}
} }
-340
View File
@@ -1,340 +0,0 @@
import * as plugins from './mod.plugins.js';
import * as paths from '../paths.js';
import type { IFormatOperation } from './interfaces.format.js';
export class RollbackManager {
private backupDir: string;
private manifestPath: string;
constructor() {
this.backupDir = plugins.path.join(paths.cwd, '.nogit', 'gitzone-backups');
this.manifestPath = plugins.path.join(this.backupDir, 'manifest.json');
}
async createOperation(): Promise<IFormatOperation> {
await this.ensureBackupDir();
const operation: IFormatOperation = {
id: this.generateOperationId(),
timestamp: Date.now(),
files: [],
status: 'pending',
};
await this.updateManifest(operation);
return operation;
}
async backupFile(filepath: string, operationId: string): Promise<void> {
const operation = await this.getOperation(operationId);
if (!operation) {
throw new Error(`Operation ${operationId} not found`);
}
const absolutePath = plugins.path.isAbsolute(filepath)
? filepath
: plugins.path.join(paths.cwd, filepath);
// Check if file exists
const exists = await plugins.smartfs.file(absolutePath).exists();
if (!exists) {
// File doesn't exist yet (will be created), so we skip backup
return;
}
// Read file content and metadata
const content = (await plugins.smartfs
.file(absolutePath)
.encoding('utf8')
.read()) as string;
const stats = await plugins.smartfs.file(absolutePath).stat();
const checksum = this.calculateChecksum(content);
// Create backup
const backupPath = this.getBackupPath(operationId, filepath);
await plugins.smartfs
.directory(plugins.path.dirname(backupPath))
.recursive()
.create();
await plugins.smartfs.file(backupPath).encoding('utf8').write(content);
// Update operation
operation.files.push({
path: filepath,
originalContent: content,
checksum,
permissions: stats.mode.toString(8),
});
await this.updateManifest(operation);
}
async rollback(operationId: string): Promise<void> {
const operation = await this.getOperation(operationId);
if (!operation) {
// Operation doesn't exist, might have already been rolled back or never created
console.warn(`Operation ${operationId} not found for rollback, skipping`);
return;
}
if (operation.status === 'rolled-back') {
throw new Error(`Operation ${operationId} has already been rolled back`);
}
// Restore files in reverse order
for (let i = operation.files.length - 1; i >= 0; i--) {
const file = operation.files[i];
const absolutePath = plugins.path.isAbsolute(file.path)
? file.path
: plugins.path.join(paths.cwd, file.path);
// Verify backup integrity
const backupPath = this.getBackupPath(operationId, file.path);
const backupContent = await plugins.smartfs
.file(backupPath)
.encoding('utf8')
.read();
const backupChecksum = this.calculateChecksum(backupContent);
if (backupChecksum !== file.checksum) {
throw new Error(`Backup integrity check failed for ${file.path}`);
}
// Restore file
await plugins.smartfs
.file(absolutePath)
.encoding('utf8')
.write(file.originalContent);
// Restore permissions
const mode = parseInt(file.permissions, 8);
// Note: Permissions restoration may not work on all platforms
}
// Update operation status
operation.status = 'rolled-back';
await this.updateManifest(operation);
}
async markComplete(operationId: string): Promise<void> {
const operation = await this.getOperation(operationId);
if (!operation) {
throw new Error(`Operation ${operationId} not found`);
}
operation.status = 'completed';
await this.updateManifest(operation);
}
async cleanOldBackups(retentionDays: number): Promise<void> {
const manifest = await this.getManifest();
const cutoffTime = Date.now() - retentionDays * 24 * 60 * 60 * 1000;
const operationsToDelete = manifest.operations.filter(
(op) => op.timestamp < cutoffTime && op.status === 'completed',
);
for (const operation of operationsToDelete) {
// Remove backup files
const operationDir = plugins.path.join(
this.backupDir,
'operations',
operation.id,
);
await plugins.smartfs.directory(operationDir).recursive().delete();
// Remove from manifest
manifest.operations = manifest.operations.filter(
(op) => op.id !== operation.id,
);
}
await this.saveManifest(manifest);
}
async verifyBackup(operationId: string): Promise<boolean> {
const operation = await this.getOperation(operationId);
if (!operation) {
return false;
}
for (const file of operation.files) {
const backupPath = this.getBackupPath(operationId, file.path);
const exists = await plugins.smartfs.file(backupPath).exists();
if (!exists) {
return false;
}
const content = await plugins.smartfs
.file(backupPath)
.encoding('utf8')
.read();
const checksum = this.calculateChecksum(content);
if (checksum !== file.checksum) {
return false;
}
}
return true;
}
async listBackups(): Promise<IFormatOperation[]> {
const manifest = await this.getManifest();
return manifest.operations;
}
private async ensureBackupDir(): Promise<void> {
await plugins.smartfs.directory(this.backupDir).recursive().create();
await plugins.smartfs
.directory(plugins.path.join(this.backupDir, 'operations'))
.recursive()
.create();
}
private generateOperationId(): string {
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
const random = Math.random().toString(36).substring(2, 8);
return `${timestamp}-${random}`;
}
private getBackupPath(operationId: string, filepath: string): string {
const filename = plugins.path.basename(filepath);
const dir = plugins.path.dirname(filepath);
const safeDir = dir.replace(/[/\\]/g, '__');
return plugins.path.join(
this.backupDir,
'operations',
operationId,
'files',
safeDir,
`${filename}.backup`,
);
}
private calculateChecksum(content: string | Buffer): string {
return plugins.crypto.createHash('sha256').update(content).digest('hex');
}
private async getManifest(): Promise<{ operations: IFormatOperation[] }> {
const defaultManifest = { operations: [] };
const exists = await plugins.smartfs.file(this.manifestPath).exists();
if (!exists) {
return defaultManifest;
}
try {
const content = (await plugins.smartfs
.file(this.manifestPath)
.encoding('utf8')
.read()) as string;
const manifest = JSON.parse(content);
// Validate the manifest structure
if (this.isValidManifest(manifest)) {
return manifest;
} else {
console.warn(
'Invalid rollback manifest structure, returning default manifest',
);
return defaultManifest;
}
} catch (error) {
console.warn(
`Failed to read rollback manifest: ${error.message}, returning default manifest`,
);
// Try to delete the corrupted file
try {
await plugins.smartfs.file(this.manifestPath).delete();
} catch (removeError) {
// Ignore removal errors
}
return defaultManifest;
}
}
private async saveManifest(manifest: {
operations: IFormatOperation[];
}): Promise<void> {
// Validate before saving
if (!this.isValidManifest(manifest)) {
throw new Error('Invalid rollback manifest structure, cannot save');
}
// Ensure directory exists
await this.ensureBackupDir();
// Write directly with proper JSON stringification
const jsonContent = JSON.stringify(manifest, null, 2);
await plugins.smartfs
.file(this.manifestPath)
.encoding('utf8')
.write(jsonContent);
}
private async getOperation(
operationId: string,
): Promise<IFormatOperation | null> {
const manifest = await this.getManifest();
return manifest.operations.find((op) => op.id === operationId) || null;
}
private async updateManifest(operation: IFormatOperation): Promise<void> {
const manifest = await this.getManifest();
const existingIndex = manifest.operations.findIndex(
(op) => op.id === operation.id,
);
if (existingIndex !== -1) {
manifest.operations[existingIndex] = operation;
} else {
manifest.operations.push(operation);
}
await this.saveManifest(manifest);
}
private isValidManifest(
manifest: any,
): manifest is { operations: IFormatOperation[] } {
// Check if manifest has the required structure
if (!manifest || typeof manifest !== 'object') {
return false;
}
// Check required fields
if (!Array.isArray(manifest.operations)) {
return false;
}
// Check each operation entry
for (const operation of manifest.operations) {
if (
!operation ||
typeof operation !== 'object' ||
typeof operation.id !== 'string' ||
typeof operation.timestamp !== 'number' ||
typeof operation.status !== 'string' ||
!Array.isArray(operation.files)
) {
return false;
}
// Check each file in the operation
for (const file of operation.files) {
if (
!file ||
typeof file !== 'object' ||
typeof file.path !== 'string' ||
typeof file.checksum !== 'string'
) {
return false;
}
}
}
return true;
}
}
-26
View File
@@ -1,26 +0,0 @@
import * as plugins from './mod.plugins.js';
import * as paths from '../paths.js';
import { logger } from '../gitzone.logging.js';
import { Project } from '../classes.project.js';
const filesToDelete = [
'defaults.yml',
'yarn.lock',
'package-lock.json',
'tslint.json',
];
export const run = async (projectArg: Project) => {
for (const relativeFilePath of filesToDelete) {
const fileExists = await plugins.smartfs.file(relativeFilePath).exists();
if (fileExists) {
logger.log('info', `Found ${relativeFilePath}! Removing it!`);
await plugins.smartfs
.file(plugins.path.join(paths.cwd, relativeFilePath))
.delete();
} else {
logger.log('info', `Project is free of ${relativeFilePath}`);
}
}
};
-93
View File
@@ -1,93 +0,0 @@
import type { Project } from '../classes.project.js';
import * as plugins from './mod.plugins.js';
import { logger } from '../gitzone.logging.js';
export const run = async (projectArg: Project) => {
const gitzoneConfig = await projectArg.gitzoneConfig;
// Get copy configuration from npmextra.json
const npmextraConfig = new plugins.npmextra.Npmextra();
const copyConfig = npmextraConfig.dataFor<any>('gitzone.format.copy', {
patterns: [],
});
if (!copyConfig.patterns || copyConfig.patterns.length === 0) {
logger.log('info', 'No copy patterns configured in npmextra.json');
return;
}
for (const pattern of copyConfig.patterns) {
if (!pattern.from || !pattern.to) {
logger.log('warn', 'Invalid copy pattern - missing "from" or "to" field');
continue;
}
try {
// Handle glob patterns
const entries = await plugins.smartfs
.directory('.')
.recursive()
.filter(pattern.from)
.list();
const files = entries.map((entry) => entry.path);
for (const file of files) {
const sourcePath = file;
let destPath = pattern.to;
// If destination is a directory, preserve filename
if (pattern.to.endsWith('/')) {
const filename = plugins.path.basename(file);
destPath = plugins.path.join(pattern.to, filename);
}
// Handle template variables in destination path
if (pattern.preservePath) {
const relativePath = plugins.path.relative(
plugins.path.dirname(pattern.from.replace(/\*/g, '')),
file,
);
destPath = plugins.path.join(pattern.to, relativePath);
}
// Ensure destination directory exists
await plugins.smartfs
.directory(plugins.path.dirname(destPath))
.recursive()
.create();
// Copy file
await plugins.smartfs.file(sourcePath).copy(destPath);
logger.log('info', `Copied ${sourcePath} to ${destPath}`);
}
} catch (error) {
logger.log(
'error',
`Failed to copy pattern ${pattern.from}: ${error.message}`,
);
}
}
};
/**
* Example npmextra.json configuration:
* {
* "gitzone": {
* "format": {
* "copy": {
* "patterns": [
* {
* "from": "src/assets/*",
* "to": "dist/assets/",
* "preservePath": true
* },
* {
* "from": "config/*.json",
* "to": "dist/"
* }
* ]
* }
* }
* }
* }
*/
-54
View File
@@ -1,54 +0,0 @@
import * as plugins from './mod.plugins.js';
import * as paths from '../paths.js';
import { Project } from '../classes.project.js';
import { logger } from '../gitzone.logging.js';
const gitignorePath = plugins.path.join(paths.cwd, './.gitignore');
export const run = async (projectArg: Project) => {
const gitignoreExists = await plugins.smartfs.file(gitignorePath).exists();
let customContent = '';
if (gitignoreExists) {
// lets get the existing gitignore file
const existingGitIgnoreString = (await plugins.smartfs
.file(gitignorePath)
.encoding('utf8')
.read()) as string;
// Check for different custom section markers
const customMarkers = ['#------# custom', '# custom'];
for (const marker of customMarkers) {
const splitResult = existingGitIgnoreString.split(marker);
if (splitResult.length > 1) {
// Get everything after the marker (excluding the marker itself)
customContent = splitResult[1].trim();
break;
}
}
}
// Write the template
const templateModule = await import('../mod_template/index.js');
const ciTemplate = await templateModule.getTemplate('gitignore');
await ciTemplate.writeToDisk(paths.cwd);
// Append the custom content if it exists
if (customContent) {
const newGitignoreContent = (await plugins.smartfs
.file(gitignorePath)
.encoding('utf8')
.read()) as string;
// The template already ends with "#------# custom", so just append the content
const finalContent =
newGitignoreContent.trimEnd() + '\n' + customContent + '\n';
await plugins.smartfs
.file(gitignorePath)
.encoding('utf8')
.write(finalContent);
logger.log('info', 'Updated .gitignore while preserving custom section!');
} else {
logger.log('info', 'Added a .gitignore!');
}
};
-32
View File
@@ -1,32 +0,0 @@
import * as plugins from './mod.plugins.js';
import * as paths from '../paths.js';
import { Project } from '../classes.project.js';
import { logger } from '../gitzone.logging.js';
const incompatibleLicenses: string[] = ['AGPL', 'GPL', 'SSPL'];
export const run = async (projectArg: Project) => {
const nodeModulesInstalled = await plugins.smartfs
.directory(plugins.path.join(paths.cwd, 'node_modules'))
.exists();
if (!nodeModulesInstalled) {
logger.log('warn', 'No node_modules found. Skipping license check');
return;
}
const licenseChecker = await plugins.smartlegal.createLicenseChecker();
const licenseCheckResult = await licenseChecker.excludeLicenseWithinPath(
paths.cwd,
incompatibleLicenses,
);
if (licenseCheckResult.failingModules.length === 0) {
logger.log('info', 'Success -> licenses passed!');
} else {
logger.log('error', 'Error -> licenses failed. Here is why:');
for (const failedModule of licenseCheckResult.failingModules) {
console.log(
`${failedModule.name} fails with license ${failedModule.license}`,
);
}
}
};
-142
View File
@@ -1,142 +0,0 @@
import * as plugins from './mod.plugins.js';
import * as paths from '../paths.js';
import * as gulpFunction from '@push.rocks/gulp-function';
import { Project } from '../classes.project.js';
/**
* Migrates npmextra.json from old namespace keys to new package-scoped keys
*/
const migrateNamespaceKeys = (npmextraJson: any): boolean => {
let migrated = false;
const migrations = [
{ oldKey: 'gitzone', newKey: '@git.zone/cli' },
{ oldKey: 'tsdoc', newKey: '@git.zone/tsdoc' },
{ oldKey: 'npmdocker', newKey: '@git.zone/tsdocker' },
{ oldKey: 'npmci', newKey: '@ship.zone/szci' },
{ oldKey: 'szci', newKey: '@ship.zone/szci' },
];
for (const { oldKey, newKey } of migrations) {
if (npmextraJson[oldKey] && !npmextraJson[newKey]) {
npmextraJson[newKey] = npmextraJson[oldKey];
delete npmextraJson[oldKey];
migrated = true;
console.log(`Migrated npmextra.json: ${oldKey} -> ${newKey}`);
}
}
return migrated;
};
/**
* Migrates npmAccessLevel from @ship.zone/szci to @git.zone/cli.release.accessLevel
* This is a one-time migration for projects using the old location
*/
const migrateAccessLevel = (npmextraJson: any): boolean => {
const szciConfig = npmextraJson['@ship.zone/szci'];
// Check if szci has npmAccessLevel that needs to be migrated
if (!szciConfig?.npmAccessLevel) {
return false;
}
// Check if we already have the new location
const gitzoneConfig = npmextraJson['@git.zone/cli'] || {};
if (gitzoneConfig?.release?.accessLevel) {
// Already migrated, just remove from szci
delete szciConfig.npmAccessLevel;
return true;
}
// Ensure @git.zone/cli and release exist
if (!npmextraJson['@git.zone/cli']) {
npmextraJson['@git.zone/cli'] = {};
}
if (!npmextraJson['@git.zone/cli'].release) {
npmextraJson['@git.zone/cli'].release = {};
}
// Migrate the value
npmextraJson['@git.zone/cli'].release.accessLevel = szciConfig.npmAccessLevel;
delete szciConfig.npmAccessLevel;
console.log(`Migrated npmAccessLevel to @git.zone/cli.release.accessLevel`);
return true;
};
/**
* runs the npmextra file checking
*/
export const run = async (projectArg: Project) => {
const formatSmartstream = new plugins.smartstream.StreamWrapper([
plugins.smartgulp.src([`npmextra.json`]),
gulpFunction.forEach(async (fileArg: plugins.smartfile.SmartFile) => {
const fileString = fileArg.contents.toString();
const npmextraJson = JSON.parse(fileString);
// Migrate old namespace keys to new package-scoped keys
migrateNamespaceKeys(npmextraJson);
// Migrate npmAccessLevel from szci to @git.zone/cli.release.accessLevel
migrateAccessLevel(npmextraJson);
if (!npmextraJson['@git.zone/cli']) {
npmextraJson['@git.zone/cli'] = {};
}
const expectedRepoInformation: string[] = [
'projectType',
'module.githost',
'module.gitscope',
'module.gitrepo',
'module.description',
'module.npmPackagename',
'module.license',
];
const interactInstance = new plugins.smartinteract.SmartInteract();
for (const expectedRepoInformationItem of expectedRepoInformation) {
if (
!plugins.smartobject.smartGet(
npmextraJson['@git.zone/cli'],
expectedRepoInformationItem,
)
) {
interactInstance.addQuestions([
{
message: `What is the value of ${expectedRepoInformationItem}`,
name: expectedRepoInformationItem,
type: 'input',
default: 'undefined variable',
},
]);
}
}
const answerbucket = await interactInstance.runQueue();
for (const expectedRepoInformationItem of expectedRepoInformation) {
const cliProvidedValue = answerbucket.getAnswerFor(
expectedRepoInformationItem,
);
if (cliProvidedValue) {
plugins.smartobject.smartAdd(
npmextraJson['@git.zone/cli'],
expectedRepoInformationItem,
cliProvidedValue,
);
}
}
// delete obsolete
// tbd
if (!npmextraJson['@ship.zone/szci']) {
npmextraJson['@ship.zone/szci'] = {};
}
fileArg.setContentsFromString(JSON.stringify(npmextraJson, null, 2));
}),
plugins.smartgulp.replace(),
]);
await formatSmartstream.run().catch((error) => {
console.log(error);
});
};
-196
View File
@@ -1,196 +0,0 @@
import * as plugins from './mod.plugins.js';
import * as paths from '../paths.js';
import * as gulpFunction from '@push.rocks/gulp-function';
import { Project } from '../classes.project.js';
import { logger } from '../gitzone.logging.js';
/**
* ensures a certain dependency
*/
const ensureDependency = async (
packageJsonObjectArg: any,
position: 'dep' | 'devDep' | 'everywhere',
constraint: 'exclude' | 'include' | 'latest',
dependencyArg: string,
) => {
const [packageName, version] = dependencyArg.includes('@')
? dependencyArg.split('@').filter(Boolean)
: [dependencyArg, 'latest'];
const targetSections: string[] = [];
switch (position) {
case 'dep':
targetSections.push('dependencies');
break;
case 'devDep':
targetSections.push('devDependencies');
break;
case 'everywhere':
targetSections.push('dependencies', 'devDependencies');
break;
}
for (const section of targetSections) {
if (!packageJsonObjectArg[section]) {
packageJsonObjectArg[section] = {};
}
switch (constraint) {
case 'exclude':
delete packageJsonObjectArg[section][packageName];
break;
case 'include':
if (!packageJsonObjectArg[section][packageName]) {
packageJsonObjectArg[section][packageName] =
version === 'latest' ? '^1.0.0' : version;
}
break;
case 'latest':
// Fetch latest version from npm
try {
const registry = new plugins.smartnpm.NpmRegistry();
const packageInfo = await registry.getPackageInfo(packageName);
const latestVersion = packageInfo['dist-tags'].latest;
packageJsonObjectArg[section][packageName] = `^${latestVersion}`;
} catch (error) {
logger.log(
'warn',
`Could not fetch latest version for ${packageName}, using existing or default`,
);
if (!packageJsonObjectArg[section][packageName]) {
packageJsonObjectArg[section][packageName] =
version === 'latest' ? '^1.0.0' : version;
}
}
break;
}
}
};
export const run = async (projectArg: Project) => {
const formatStreamWrapper = new plugins.smartstream.StreamWrapper([
plugins.smartgulp.src([`package.json`]),
gulpFunction.forEach(async (fileArg: plugins.smartfile.SmartFile) => {
const npmextraConfig = new plugins.npmextra.Npmextra(paths.cwd);
const gitzoneData: any = npmextraConfig.dataFor('@git.zone/cli', {});
const fileString = fileArg.contents.toString();
const packageJson = JSON.parse(fileString);
// metadata
packageJson.repository = {
type: 'git',
url: `https://${gitzoneData.module.githost}/${gitzoneData.module.gitscope}/${gitzoneData.module.gitrepo}.git`,
};
((packageJson.bugs = {
url: `https://${gitzoneData.module.githost}/${gitzoneData.module.gitscope}/${gitzoneData.module.gitrepo}/issues`,
}),
(packageJson.homepage = `https://${gitzoneData.module.githost}/${gitzoneData.module.gitscope}/${gitzoneData.module.gitrepo}#readme`));
// Check for module type
if (!packageJson.type) {
logger.log('info', `setting packageJson.type to "module"`);
packageJson.type = 'module';
}
// Check for private or public
if (packageJson.private !== undefined) {
logger.log(
'info',
'Success -> found private/public info in package.json!',
);
} else {
logger.log(
'error',
'found no private boolean! Setting it to private for now!',
);
packageJson.private = true;
}
// Check for license
if (packageJson.license) {
logger.log('info', 'Success -> found license in package.json!');
} else {
logger.log(
'error',
'found no license! Setting it to UNLICENSED for now!',
);
packageJson.license = 'UNLICENSED';
}
// Check for build script
if (packageJson.scripts.build) {
logger.log('info', 'Success -> found build script in package.json!');
} else {
logger.log(
'error',
'found no build script! Putting a placeholder there for now!',
);
packageJson.scripts.build = `echo "Not needed for now"`;
}
// Check for buildDocs script
if (!packageJson.scripts.buildDocs) {
logger.log(
'info',
'found no buildDocs script! Putting tsdoc script there now.',
);
packageJson.scripts.buildDocs = `tsdoc`;
}
// check for files
packageJson.files = [
'ts/**/*',
'ts_web/**/*',
'dist/**/*',
'dist_*/**/*',
'dist_ts/**/*',
'dist_ts_web/**/*',
'assets/**/*',
'cli.js',
'npmextra.json',
'readme.md',
];
// check for dependencies
// Note: @push.rocks/tapbundle is deprecated - use @git.zone/tstest/tapbundle instead
await ensureDependency(
packageJson,
'devDep',
'exclude',
'@push.rocks/tapbundle',
);
await ensureDependency(
packageJson,
'devDep',
'latest',
'@git.zone/tstest',
);
await ensureDependency(
packageJson,
'devDep',
'latest',
'@git.zone/tsbuild',
);
// set overrides
const overridesContent = (await plugins.smartfs
.file(plugins.path.join(paths.assetsDir, 'overrides.json'))
.encoding('utf8')
.read()) as string;
const overrides = JSON.parse(overridesContent);
packageJson.pnpm = packageJson.pnpm || {};
packageJson.pnpm.overrides = overrides;
// exclude
// TODO
fileArg.setContentsFromString(JSON.stringify(packageJson, null, 2));
}),
plugins.smartgulp.replace(),
]);
await formatStreamWrapper.run().catch((error) => {
console.log(error);
});
};
-66
View File
@@ -1,66 +0,0 @@
import * as plugins from './mod.plugins.js';
import prettier from 'prettier';
import { Project } from '../classes.project.js';
import { logger } from '../gitzone.logging.js';
const prettierDefaultTypeScriptConfig: prettier.Options = {
printWidth: 100,
parser: 'typescript',
singleQuote: true,
};
const prettierDefaultMarkdownConfig: prettier.Options = {
singleQuote: true,
printWidth: 100,
parser: 'markdown',
};
const filesToFormat = [
`ts/**/*.ts`,
`test/**/*.ts`,
`readme.md`,
`docs/**/*.md`,
];
const choosePrettierConfig = (fileArg: plugins.smartfile.SmartFile) => {
switch (fileArg.parsedPath.ext) {
case '.ts':
return prettierDefaultTypeScriptConfig;
case '.md':
return prettierDefaultMarkdownConfig;
default:
return {};
}
};
const prettierTypeScriptPipestop = plugins.through2.obj(
async (fileArg: plugins.smartfile.SmartFile, enc, cb) => {
const fileString = fileArg.contentBuffer.toString();
const chosenConfig = choosePrettierConfig(fileArg);
const filePasses = await prettier.check(fileString, chosenConfig);
if (filePasses) {
logger.log('info', `OK! -> ${fileArg.path} passes!`);
cb(null);
} else {
logger.log('info', `${fileArg.path} is being reformated!`);
const formatedFileString = await prettier.format(
fileString,
chosenConfig,
);
fileArg.setContentsFromString(formatedFileString);
cb(null, fileArg);
}
},
);
export const run = async (projectArg: Project) => {
const formatStreamWrapper = new plugins.smartstream.StreamWrapper([
plugins.smartgulp.src(filesToFormat),
prettierTypeScriptPipestop,
plugins.smartgulp.replace(),
]);
await formatStreamWrapper.run().catch((error) => {
console.log(error);
});
};
-29
View File
@@ -1,29 +0,0 @@
import * as plugins from './mod.plugins.js';
import * as paths from '../paths.js';
export const run = async () => {
const readmePath = plugins.path.join(paths.cwd, 'readme.md');
const readmeHintsPath = plugins.path.join(paths.cwd, 'readme.hints.md');
// Check and initialize readme.md if it doesn't exist
const readmeExists = await plugins.smartfs.file(readmePath).exists();
if (!readmeExists) {
await plugins.smartfs.file(readmePath)
.encoding('utf8')
.write('# Project Readme\n\nThis is the initial readme file.');
console.log('Initialized readme.md');
} else {
console.log('readme.md already exists');
}
// Check and initialize readme.hints.md if it doesn't exist
const readmeHintsExists = await plugins.smartfs.file(readmeHintsPath).exists();
if (!readmeHintsExists) {
await plugins.smartfs.file(readmeHintsPath)
.encoding('utf8')
.write('# Project Readme Hints\n\nThis is the initial readme hints file.');
console.log('Initialized readme.hints.md');
} else {
console.log('readme.hints.md already exists');
}
};
-79
View File
@@ -1,79 +0,0 @@
import * as plugins from './mod.plugins.js';
import * as paths from '../paths.js';
import { logger } from '../gitzone.logging.js';
import { Project } from '../classes.project.js';
/**
* takes care of updating files from templates
*/
export const run = async (project: Project) => {
const templateModule = await import('../mod_template/index.js');
// update vscode
const vscodeTemplate = await templateModule.getTemplate('vscode');
await vscodeTemplate.writeToDisk(paths.cwd);
logger.log('info', `Updated vscode template!`);
// update gitlab ci and Dockerfile
switch (project.gitzoneConfig.data.projectType) {
case 'npm':
case 'wcc':
if (project.gitzoneConfig.data.npmciOptions.npmAccessLevel === 'public') {
const ciTemplateDefault =
await templateModule.getTemplate('ci_default');
ciTemplateDefault.writeToDisk(paths.cwd);
} else {
const ciTemplateDefault =
await templateModule.getTemplate('ci_default_private');
ciTemplateDefault.writeToDisk(paths.cwd);
}
logger.log('info', 'Updated .gitlabci.yml!');
break;
case 'service':
case 'website':
const ciTemplateDocker = await templateModule.getTemplate('ci_docker');
await ciTemplateDocker.writeToDisk(paths.cwd);
logger.log('info', 'Updated CI/CD config files!');
// lets care about docker
const dockerTemplate =
await templateModule.getTemplate('dockerfile_service');
dockerTemplate.writeToDisk(paths.cwd);
logger.log('info', 'Updated Dockerfile!');
// lets care about cli
const cliTemplate = await templateModule.getTemplate('cli');
await cliTemplate.writeToDisk(paths.cwd);
logger.log('info', 'Updated cli.ts.js and cli.js!');
break;
default:
break;
}
// update html
if (project.gitzoneConfig.data.projectType === 'website') {
const websiteUpdateTemplate =
await templateModule.getTemplate('website_update');
const variables = {
assetbrokerUrl: project.gitzoneConfig.data.module.assetbrokerUrl,
legalUrl: project.gitzoneConfig.data.module.legalUrl,
};
console.log(
'updating website template with variables\n',
JSON.stringify(variables, null, 2),
);
websiteUpdateTemplate.supplyVariables(variables);
await websiteUpdateTemplate.writeToDisk(paths.cwd);
logger.log('info', `Updated html for website!`);
} else if (project.gitzoneConfig.data.projectType === 'service') {
const websiteUpdateTemplate =
await templateModule.getTemplate('service_update');
await websiteUpdateTemplate.writeToDisk(paths.cwd);
logger.log('info', `Updated html for element template!`);
} else if (project.gitzoneConfig.data.projectType === 'wcc') {
const wccUpdateTemplate = await templateModule.getTemplate('wcc_update');
await wccUpdateTemplate.writeToDisk(paths.cwd);
logger.log('info', `Updated html for wcc template!`);
}
};
-31
View File
@@ -1,31 +0,0 @@
import * as plugins from './mod.plugins.js';
import * as paths from '../paths.js';
import { logger } from '../gitzone.logging.js';
import { Project } from '../classes.project.js';
export const run = async (projectArg: Project) => {
// lets care about tsconfig.json
logger.log('info', 'Formatting tsconfig.json...');
const factory = plugins.smartfile.SmartFileFactory.nodeFs();
const tsconfigSmartfile = await factory.fromFilePath(
plugins.path.join(paths.cwd, 'tsconfig.json'),
);
const tsconfigObject = JSON.parse(tsconfigSmartfile.parseContentAsString());
tsconfigObject.compilerOptions = tsconfigObject.compilerOptions || {};
tsconfigObject.compilerOptions.baseUrl = '.';
tsconfigObject.compilerOptions.paths = {};
const tsPublishMod = await import('@git.zone/tspublish');
const tsPublishInstance = new tsPublishMod.TsPublish();
const publishModules = await tsPublishInstance.getModuleSubDirs(paths.cwd);
for (const publishModule of Object.keys(publishModules)) {
const publishConfig = publishModules[publishModule];
tsconfigObject.compilerOptions.paths[`${publishConfig.name}`] = [
`./${publishModule}/index.js`,
];
}
await tsconfigSmartfile.editContentAsString(async () => {
return JSON.stringify(tsconfigObject, null, 2);
});
await tsconfigSmartfile.write();
};
@@ -1,7 +1,6 @@
import { BaseFormatter } from '../classes.baseformatter.js'; import { BaseFormatter } from '../classes.baseformatter.js';
import type { IPlannedChange } from '../interfaces.format.js'; import type { IPlannedChange } from '../interfaces.format.js';
import * as plugins from '../mod.plugins.js'; import * as plugins from '../mod.plugins.js';
import * as cleanupFormatter from '../format.cleanup.js';
export class CleanupFormatter extends BaseFormatter { export class CleanupFormatter extends BaseFormatter {
get name(): string { get name(): string {
+4 -8
View File
@@ -17,15 +17,15 @@ export class CopyFormatter extends BaseFormatter {
async analyze(): Promise<IPlannedChange[]> { async analyze(): Promise<IPlannedChange[]> {
const changes: IPlannedChange[] = []; const changes: IPlannedChange[] = [];
// Get copy configuration from npmextra.json // Get copy configuration from .smartconfig.json
const npmextraConfig = new plugins.npmextra.Npmextra(); const smartconfigInstance = new plugins.smartconfig.Smartconfig();
const copyConfig = npmextraConfig.dataFor<{ patterns: ICopyPattern[] }>( const copyConfig = smartconfigInstance.dataFor<{ patterns: ICopyPattern[] }>(
'gitzone.format.copy', 'gitzone.format.copy',
{ patterns: [] }, { patterns: [] },
); );
if (!copyConfig.patterns || copyConfig.patterns.length === 0) { if (!copyConfig.patterns || copyConfig.patterns.length === 0) {
logVerbose('No copy patterns configured in npmextra.json'); logVerbose('No copy patterns configured in .smartconfig.json');
return changes; return changes;
} }
@@ -103,10 +103,6 @@ export class CopyFormatter extends BaseFormatter {
async applyChange(change: IPlannedChange): Promise<void> { async applyChange(change: IPlannedChange): Promise<void> {
if (!change.content) return; if (!change.content) return;
// Ensure destination directory exists
const destDir = plugins.path.dirname(change.path);
await plugins.smartfs.directory(destDir).recursive().create();
if (change.type === 'create') { if (change.type === 'create') {
await this.createFile(change.path, change.content); await this.createFile(change.path, change.content);
} else { } else {
+25 -29
View File
@@ -1,42 +1,39 @@
import { BaseFormatter } from '../classes.baseformatter.js'; import { BaseFormatter } from '../classes.baseformatter.js';
import type { IPlannedChange } from '../interfaces.format.js'; import type { IPlannedChange } from '../interfaces.format.js';
import * as plugins from '../mod.plugins.js'; import * as plugins from '../mod.plugins.js';
import * as paths from '../../paths.js';
import { logger } from '../../gitzone.logging.js'; import { logger } from '../../gitzone.logging.js';
// Standard gitignore template content (without front-matter)
const GITIGNORE_TEMPLATE = `.nogit/
# artifacts
coverage/
public/
# installs
node_modules/
# caches
.yarn/
.cache/
.rpt2_cache
# builds
dist/
dist_*/
# AI
.claude/
.serena/
#------# custom`;
export class GitignoreFormatter extends BaseFormatter { export class GitignoreFormatter extends BaseFormatter {
get name(): string { get name(): string {
return 'gitignore'; return 'gitignore';
} }
/**
* Read the standard gitignore template from the asset file,
* stripping the YAML frontmatter.
*/
private async getStandardTemplate(): Promise<string> {
const templatePath = plugins.path.join(paths.templatesDir, 'gitignore', '_gitignore');
const raw = (await plugins.smartfs
.file(templatePath)
.encoding('utf8')
.read()) as string;
// Strip YAML frontmatter (---\n...\n---)
const frontmatterEnd = raw.indexOf('---', 3);
if (frontmatterEnd !== -1) {
return raw.slice(frontmatterEnd + 3).trimStart();
}
return raw;
}
async analyze(): Promise<IPlannedChange[]> { async analyze(): Promise<IPlannedChange[]> {
const changes: IPlannedChange[] = []; const changes: IPlannedChange[] = [];
const gitignorePath = '.gitignore'; const gitignorePath = '.gitignore';
const standardTemplate = await this.getStandardTemplate();
// Check if file exists and extract custom content // Check if file exists and extract custom content
let customContent = ''; let customContent = '';
const exists = await plugins.smartfs.file(gitignorePath).exists(); const exists = await plugins.smartfs.file(gitignorePath).exists();
@@ -59,11 +56,11 @@ export class GitignoreFormatter extends BaseFormatter {
} }
// Compute new content // Compute new content
let newContent = GITIGNORE_TEMPLATE; let newContent = standardTemplate;
if (customContent) { if (customContent) {
newContent = GITIGNORE_TEMPLATE + '\n' + customContent + '\n'; newContent = standardTemplate + '\n' + customContent + '\n';
} else { } else {
newContent = GITIGNORE_TEMPLATE + '\n'; newContent = standardTemplate + '\n';
} }
// Read current content to compare // Read current content to compare
@@ -75,7 +72,6 @@ export class GitignoreFormatter extends BaseFormatter {
.read()) as string; .read()) as string;
} }
// Determine change type
if (!exists) { if (!exists) {
changes.push({ changes.push({
type: 'create', type: 'create',
@@ -1,165 +0,0 @@
import { BaseFormatter } from '../classes.baseformatter.js';
import type { IPlannedChange } from '../interfaces.format.js';
import * as plugins from '../mod.plugins.js';
import { logger, logVerbose } from '../../gitzone.logging.js';
/**
* Migrates npmextra.json from old namespace keys to new package-scoped keys
*/
const migrateNamespaceKeys = (npmextraJson: any): boolean => {
let migrated = false;
const migrations = [
{ oldKey: 'gitzone', newKey: '@git.zone/cli' },
{ oldKey: 'tsdoc', newKey: '@git.zone/tsdoc' },
{ oldKey: 'npmdocker', newKey: '@git.zone/tsdocker' },
{ oldKey: 'npmci', newKey: '@ship.zone/szci' },
{ oldKey: 'szci', newKey: '@ship.zone/szci' },
];
for (const { oldKey, newKey } of migrations) {
if (npmextraJson[oldKey] && !npmextraJson[newKey]) {
npmextraJson[newKey] = npmextraJson[oldKey];
delete npmextraJson[oldKey];
migrated = true;
}
}
return migrated;
};
/**
* Migrates npmAccessLevel from @ship.zone/szci to @git.zone/cli.release.accessLevel
*/
const migrateAccessLevel = (npmextraJson: any): boolean => {
const szciConfig = npmextraJson['@ship.zone/szci'];
if (!szciConfig?.npmAccessLevel) {
return false;
}
const gitzoneConfig = npmextraJson['@git.zone/cli'] || {};
if (gitzoneConfig?.release?.accessLevel) {
delete szciConfig.npmAccessLevel;
return true;
}
if (!npmextraJson['@git.zone/cli']) {
npmextraJson['@git.zone/cli'] = {};
}
if (!npmextraJson['@git.zone/cli'].release) {
npmextraJson['@git.zone/cli'].release = {};
}
npmextraJson['@git.zone/cli'].release.accessLevel = szciConfig.npmAccessLevel;
delete szciConfig.npmAccessLevel;
return true;
};
export class NpmextraFormatter extends BaseFormatter {
get name(): string {
return 'npmextra';
}
async analyze(): Promise<IPlannedChange[]> {
const changes: IPlannedChange[] = [];
const npmextraPath = 'npmextra.json';
// Check if file exists
const exists = await plugins.smartfs.file(npmextraPath).exists();
if (!exists) {
logVerbose('npmextra.json does not exist, skipping');
return changes;
}
// Read current content
const currentContent = (await plugins.smartfs
.file(npmextraPath)
.encoding('utf8')
.read()) as string;
// Parse and compute new content
const npmextraJson = JSON.parse(currentContent);
// Apply migrations (these are automatic, non-interactive)
migrateNamespaceKeys(npmextraJson);
migrateAccessLevel(npmextraJson);
// Ensure namespaces exist
if (!npmextraJson['@git.zone/cli']) {
npmextraJson['@git.zone/cli'] = {};
}
if (!npmextraJson['@ship.zone/szci']) {
npmextraJson['@ship.zone/szci'] = {};
}
const newContent = JSON.stringify(npmextraJson, null, 2);
// Only add change if content differs
if (newContent !== currentContent) {
changes.push({
type: 'modify',
path: npmextraPath,
module: this.name,
description: 'Migrate and format npmextra.json',
content: newContent,
});
}
return changes;
}
async applyChange(change: IPlannedChange): Promise<void> {
if (change.type !== 'modify' || !change.content) return;
// Parse the content to check for missing required fields
const npmextraJson = JSON.parse(change.content);
// Check for missing required module information
const expectedRepoInformation: string[] = [
'projectType',
'module.githost',
'module.gitscope',
'module.gitrepo',
'module.description',
'module.npmPackagename',
'module.license',
];
const interactInstance = new plugins.smartinteract.SmartInteract();
for (const expectedRepoInformationItem of expectedRepoInformation) {
if (
!plugins.smartobject.smartGet(
npmextraJson['@git.zone/cli'],
expectedRepoInformationItem,
)
) {
interactInstance.addQuestions([
{
message: `What is the value of ${expectedRepoInformationItem}`,
name: expectedRepoInformationItem,
type: 'input',
default: 'undefined variable',
},
]);
}
}
const answerbucket = await interactInstance.runQueue();
for (const expectedRepoInformationItem of expectedRepoInformation) {
const cliProvidedValue = answerbucket.getAnswerFor(
expectedRepoInformationItem,
);
if (cliProvidedValue) {
plugins.smartobject.smartAdd(
npmextraJson['@git.zone/cli'],
expectedRepoInformationItem,
cliProvidedValue,
);
}
}
// Write the final content
const finalContent = JSON.stringify(npmextraJson, null, 2);
await this.modifyFile(change.path, finalContent);
logger.log('info', 'Updated npmextra.json');
}
}
@@ -4,68 +4,6 @@ import * as plugins from '../mod.plugins.js';
import * as paths from '../../paths.js'; import * as paths from '../../paths.js';
import { logger, logVerbose } from '../../gitzone.logging.js'; import { logger, logVerbose } from '../../gitzone.logging.js';
/**
* Ensures a certain dependency exists or is excluded
*/
const ensureDependency = async (
packageJsonObject: any,
position: 'dep' | 'devDep' | 'everywhere',
constraint: 'exclude' | 'include' | 'latest',
dependencyArg: string,
): Promise<void> => {
const [packageName, version] = dependencyArg.includes('@')
? dependencyArg.split('@').filter(Boolean)
: [dependencyArg, 'latest'];
const targetSections: string[] = [];
switch (position) {
case 'dep':
targetSections.push('dependencies');
break;
case 'devDep':
targetSections.push('devDependencies');
break;
case 'everywhere':
targetSections.push('dependencies', 'devDependencies');
break;
}
for (const section of targetSections) {
if (!packageJsonObject[section]) {
packageJsonObject[section] = {};
}
switch (constraint) {
case 'exclude':
delete packageJsonObject[section][packageName];
break;
case 'include':
if (!packageJsonObject[section][packageName]) {
packageJsonObject[section][packageName] =
version === 'latest' ? '^1.0.0' : version;
}
break;
case 'latest':
try {
const registry = new plugins.smartnpm.NpmRegistry();
const packageInfo = await registry.getPackageInfo(packageName);
const latestVersion = packageInfo['dist-tags'].latest;
packageJsonObject[section][packageName] = `^${latestVersion}`;
} catch (error) {
logVerbose(
`Could not fetch latest version for ${packageName}, using existing or default`,
);
if (!packageJsonObject[section][packageName]) {
packageJsonObject[section][packageName] =
version === 'latest' ? '^1.0.0' : version;
}
}
break;
}
}
};
export class PackageJsonFormatter extends BaseFormatter { export class PackageJsonFormatter extends BaseFormatter {
get name(): string { get name(): string {
return 'packagejson'; return 'packagejson';
@@ -91,9 +29,9 @@ export class PackageJsonFormatter extends BaseFormatter {
// Parse and compute new content // Parse and compute new content
const packageJson = JSON.parse(currentContent); const packageJson = JSON.parse(currentContent);
// Get gitzone config from npmextra // Get gitzone config from smartconfig
const npmextraConfig = new plugins.npmextra.Npmextra(paths.cwd); const smartconfigInstance = new plugins.smartconfig.Smartconfig(paths.cwd);
const gitzoneData: any = npmextraConfig.dataFor('@git.zone/cli', {}); const gitzoneData: any = smartconfigInstance.dataFor('@git.zone/cli', {});
// Set metadata from gitzone config // Set metadata from gitzone config
if (gitzoneData.module) { if (gitzoneData.module) {
@@ -132,11 +70,6 @@ export class PackageJsonFormatter extends BaseFormatter {
packageJson.scripts.build = `echo "Not needed for now"`; packageJson.scripts.build = `echo "Not needed for now"`;
} }
// Ensure buildDocs script exists
if (!packageJson.scripts.buildDocs) {
packageJson.scripts.buildDocs = `tsdoc`;
}
// Set files array // Set files array
packageJson.files = [ packageJson.files = [
'ts/**/*', 'ts/**/*',
@@ -147,25 +80,10 @@ export class PackageJsonFormatter extends BaseFormatter {
'dist_ts_web/**/*', 'dist_ts_web/**/*',
'assets/**/*', 'assets/**/*',
'cli.js', 'cli.js',
'npmextra.json', '.smartconfig.json',
'readme.md', 'readme.md',
]; ];
// Handle dependencies
await ensureDependency(
packageJson,
'devDep',
'exclude',
'@push.rocks/tapbundle',
);
await ensureDependency(packageJson, 'devDep', 'latest', '@git.zone/tstest');
await ensureDependency(
packageJson,
'devDep',
'latest',
'@git.zone/tsbuild',
);
// Set pnpm overrides from assets // Set pnpm overrides from assets
try { try {
const overridesContent = (await plugins.smartfs const overridesContent = (await plugins.smartfs
+3 -13
View File
@@ -21,7 +21,7 @@ export class PrettierFormatter extends BaseFormatter {
const rootConfigFiles = [ const rootConfigFiles = [
'package.json', 'package.json',
'tsconfig.json', 'tsconfig.json',
'npmextra.json', '.smartconfig.json',
'.prettierrc', '.prettierrc',
'.prettierrc.json', '.prettierrc.json',
'.prettierrc.js', '.prettierrc.js',
@@ -79,12 +79,9 @@ export class PrettierFormatter extends BaseFormatter {
// Remove duplicates // Remove duplicates
const uniqueFiles = [...new Set(allFiles)]; const uniqueFiles = [...new Set(allFiles)];
// Get all files that match the pattern
const files = uniqueFiles;
// Ensure we only process actual files (not directories) // Ensure we only process actual files (not directories)
const validFiles: string[] = []; const validFiles: string[] = [];
for (const file of files) { for (const file of uniqueFiles) {
try { try {
const stats = await plugins.smartfs.file(file).stat(); const stats = await plugins.smartfs.file(file).stat();
if (!stats.isDirectory) { if (!stats.isDirectory) {
@@ -96,14 +93,7 @@ export class PrettierFormatter extends BaseFormatter {
} }
} }
// Check which files need formatting
for (const file of validFiles) { for (const file of validFiles) {
// Skip files that haven't changed
if (!(await this.shouldProcessFile(file))) {
logVerbose(`Skipping ${file} - no changes detected`);
continue;
}
changes.push({ changes.push({
type: 'modify', type: 'modify',
path: file, path: file,
@@ -232,7 +222,7 @@ export class PrettierFormatter extends BaseFormatter {
private async getPrettierConfig(): Promise<any> { private async getPrettierConfig(): Promise<any> {
// Try to load prettier config from the project // Try to load prettier config from the project
const prettierConfig = new plugins.npmextra.Npmextra(); const prettierConfig = new plugins.smartconfig.Smartconfig();
return prettierConfig.dataFor('prettier', { return prettierConfig.dataFor('prettier', {
// Default prettier config // Default prettier config
singleQuote: true, singleQuote: true,
@@ -0,0 +1,126 @@
import { BaseFormatter } from "../classes.baseformatter.js";
import type { IPlannedChange } from "../interfaces.format.js";
import * as plugins from "../mod.plugins.js";
import { logger, logVerbose } from "../../gitzone.logging.js";
import { migrateSmartconfigData } from "../../helpers.smartconfigmigrations.js";
const CONFIG_FILE = ".smartconfig.json";
export class SmartconfigFormatter extends BaseFormatter {
get name(): string {
return "smartconfig";
}
async analyze(): Promise<IPlannedChange[]> {
const changes: IPlannedChange[] = [];
// File rename (npmextra.json/smartconfig.json → .smartconfig.json)
// is handled by the orchestrator before analysis.
// This formatter only operates on .smartconfig.json.
const exists = await plugins.smartfs.file(CONFIG_FILE).exists();
if (!exists) {
logVerbose(".smartconfig.json does not exist, skipping");
return changes;
}
const currentContent = (await plugins.smartfs
.file(CONFIG_FILE)
.encoding("utf8")
.read()) as string;
const smartconfigJson = JSON.parse(currentContent);
migrateSmartconfigData(smartconfigJson);
// Ensure namespaces exist
if (!smartconfigJson["@git.zone/cli"]) {
smartconfigJson["@git.zone/cli"] = {};
}
if (!smartconfigJson["@ship.zone/szci"]) {
smartconfigJson["@ship.zone/szci"] = {};
}
const newContent = JSON.stringify(smartconfigJson, null, 2);
if (newContent !== currentContent) {
changes.push({
type: "modify",
path: CONFIG_FILE,
module: this.name,
description: "Migrate and format .smartconfig.json",
content: newContent,
});
}
return changes;
}
async applyChange(change: IPlannedChange): Promise<void> {
if (change.type !== "modify" || !change.content) return;
const smartconfigJson = JSON.parse(change.content);
// Check for missing required module information
const expectedRepoInformation: string[] = [
"projectType",
"module.githost",
"module.gitscope",
"module.gitrepo",
"module.description",
"module.npmPackagename",
"module.license",
];
const interactInstance = new plugins.smartinteract.SmartInteract();
const missingRepoInformation = expectedRepoInformation.filter(
(expectedRepoInformationItem) => {
return !plugins.smartobject.smartGet(
smartconfigJson["@git.zone/cli"],
expectedRepoInformationItem,
);
},
);
if (missingRepoInformation.length > 0 && !this.context.isInteractive()) {
throw new Error(
`Missing required .smartconfig.json fields: ${missingRepoInformation.join(", ")}`,
);
}
for (const expectedRepoInformationItem of expectedRepoInformation) {
if (
!plugins.smartobject.smartGet(
smartconfigJson["@git.zone/cli"],
expectedRepoInformationItem,
)
) {
interactInstance.addQuestions([
{
message: `What is the value of ${expectedRepoInformationItem}`,
name: expectedRepoInformationItem,
type: "input",
default: "undefined variable",
},
]);
}
}
const answerbucket = await interactInstance.runQueue();
for (const expectedRepoInformationItem of expectedRepoInformation) {
const cliProvidedValue = answerbucket.getAnswerFor(
expectedRepoInformationItem,
);
if (cliProvidedValue) {
plugins.smartobject.smartAdd(
smartconfigJson["@git.zone/cli"],
expectedRepoInformationItem,
cliProvidedValue,
);
}
}
const finalContent = JSON.stringify(smartconfigJson, null, 2);
await this.modifyFile(change.path, finalContent);
logger.log("info", "Updated .smartconfig.json");
}
}
+56 -44
View File
@@ -9,6 +9,32 @@ export class TemplatesFormatter extends BaseFormatter {
return 'templates'; return 'templates';
} }
/**
* Render a template directory through smartscaf and return a map of path → content.
*/
private async renderTemplate(templateName: string): Promise<Map<string, string>> {
const templateDir = plugins.path.join(paths.templatesDir, templateName);
const scafTemplate = new plugins.smartscaf.ScafTemplate(templateDir);
await scafTemplate.readTemplateFromDir();
const gitzoneData = this.project.gitzoneConfig?.data;
if (gitzoneData) {
await scafTemplate.supplyVariables({
module: gitzoneData.module,
projectType: gitzoneData.projectType,
});
}
const renderedFiles = await scafTemplate.renderToMemory();
const fileMap = new Map<string, string>();
for (const file of renderedFiles) {
fileMap.set(file.path, file.contents.toString());
}
return fileMap;
}
async analyze(): Promise<IPlannedChange[]> { async analyze(): Promise<IPlannedChange[]> {
const changes: IPlannedChange[] = []; const changes: IPlannedChange[] = [];
const project = this.project; const project = this.project;
@@ -25,7 +51,8 @@ export class TemplatesFormatter extends BaseFormatter {
switch (projectType) { switch (projectType) {
case 'npm': case 'npm':
case 'wcc': case 'wcc':
const accessLevel = project.gitzoneConfig?.data?.npmciOptions?.npmAccessLevel; const accessLevel = (project.gitzoneConfig?.data as any)?.release?.accessLevel
|| project.gitzoneConfig?.data?.npmciOptions?.npmAccessLevel;
const ciTemplate = accessLevel === 'public' ? 'ci_default' : 'ci_default_private'; const ciTemplate = accessLevel === 'public' ? 'ci_default' : 'ci_default_private';
const ciChanges = await this.analyzeTemplate(ciTemplate, [ const ciChanges = await this.analyzeTemplate(ciTemplate, [
{ templatePath: '.gitea/workflows/default_nottags.yaml', destPath: '.gitea/workflows/default_nottags.yaml' }, { templatePath: '.gitea/workflows/default_nottags.yaml', destPath: '.gitea/workflows/default_nottags.yaml' },
@@ -62,9 +89,6 @@ export class TemplatesFormatter extends BaseFormatter {
{ templatePath: 'html/index.html', destPath: 'html/index.html' }, { templatePath: 'html/index.html', destPath: 'html/index.html' },
]); ]);
changes.push(...websiteChanges); changes.push(...websiteChanges);
} else if (projectType === 'service') {
const serviceChanges = await this.analyzeTemplate('service_update', []);
changes.push(...serviceChanges);
} else if (projectType === 'wcc') { } else if (projectType === 'wcc') {
const wccChanges = await this.analyzeTemplate('wcc_update', [ const wccChanges = await this.analyzeTemplate('wcc_update', [
{ templatePath: 'html/index.html', destPath: 'html/index.html' }, { templatePath: 'html/index.html', destPath: 'html/index.html' },
@@ -83,53 +107,47 @@ export class TemplatesFormatter extends BaseFormatter {
const changes: IPlannedChange[] = []; const changes: IPlannedChange[] = [];
const templateDir = plugins.path.join(paths.templatesDir, templateName); const templateDir = plugins.path.join(paths.templatesDir, templateName);
// Check if template exists
const templateExists = await plugins.smartfs.directory(templateDir).exists(); const templateExists = await plugins.smartfs.directory(templateDir).exists();
if (!templateExists) { if (!templateExists) {
logVerbose(`Template ${templateName} not found`); logVerbose(`Template ${templateName} not found`);
return changes; return changes;
} }
for (const file of files) { let renderedFiles: Map<string, string>;
const templateFilePath = plugins.path.join(templateDir, file.templatePath); try {
const destFilePath = file.destPath; renderedFiles = await this.renderTemplate(templateName);
} catch (error) {
logVerbose(`Failed to render template ${templateName}: ${error.message}`);
return changes;
}
// Check if template file exists for (const file of files) {
const fileExists = await plugins.smartfs.file(templateFilePath).exists(); // Look up by templatePath first, then destPath (frontmatter may rename files)
if (!fileExists) { const processedContent = renderedFiles.get(file.templatePath)
logVerbose(`Template file ${templateFilePath} not found`); || renderedFiles.get(file.destPath);
if (!processedContent) {
logVerbose(`Template file ${file.templatePath} not found in rendered output`);
continue; continue;
} }
try { const destExists = await plugins.smartfs.file(file.destPath).exists();
// Read template content let currentContent = '';
const templateContent = (await plugins.smartfs if (destExists) {
.file(templateFilePath) currentContent = (await plugins.smartfs
.file(file.destPath)
.encoding('utf8') .encoding('utf8')
.read()) as string; .read()) as string;
}
// Check if destination file exists if (processedContent !== currentContent) {
const destExists = await plugins.smartfs.file(destFilePath).exists(); changes.push({
let currentContent = ''; type: destExists ? 'modify' : 'create',
if (destExists) { path: file.destPath,
currentContent = (await plugins.smartfs module: this.name,
.file(destFilePath) description: `Apply template ${templateName}/${file.templatePath}`,
.encoding('utf8') content: processedContent,
.read()) as string; });
}
// Only add change if content differs
if (templateContent !== currentContent) {
changes.push({
type: destExists ? 'modify' : 'create',
path: destFilePath,
module: this.name,
description: `Apply template ${templateName}/${file.templatePath}`,
content: templateContent,
});
}
} catch (error) {
logVerbose(`Failed to read template ${templateFilePath}: ${error.message}`);
} }
} }
@@ -139,12 +157,6 @@ export class TemplatesFormatter extends BaseFormatter {
async applyChange(change: IPlannedChange): Promise<void> { async applyChange(change: IPlannedChange): Promise<void> {
if (!change.content) return; if (!change.content) return;
// Ensure destination directory exists
const destDir = plugins.path.dirname(change.path);
if (destDir && destDir !== '.') {
await plugins.smartfs.directory(destDir).recursive().create();
}
if (change.type === 'create') { if (change.type === 'create') {
await this.createFile(change.path, change.content); await this.createFile(change.path, change.content);
} else { } else {
@@ -30,9 +30,10 @@ export class TsconfigFormatter extends BaseFormatter {
const tsconfigObject = JSON.parse(currentContent); const tsconfigObject = JSON.parse(currentContent);
tsconfigObject.compilerOptions = tsconfigObject.compilerOptions || {}; tsconfigObject.compilerOptions = tsconfigObject.compilerOptions || {};
tsconfigObject.compilerOptions.baseUrl = '.'; tsconfigObject.compilerOptions.baseUrl = '.';
tsconfigObject.compilerOptions.paths = {}; const existingPaths = tsconfigObject.compilerOptions.paths || {};
// Get module paths from tspublish // Get module paths from tspublish, merging with existing custom paths
const tspublishPaths: Record<string, string[]> = {};
try { try {
const tsPublishMod = await import('@git.zone/tspublish'); const tsPublishMod = await import('@git.zone/tspublish');
const tsPublishInstance = new tsPublishMod.TsPublish(); const tsPublishInstance = new tsPublishMod.TsPublish();
@@ -40,7 +41,7 @@ export class TsconfigFormatter extends BaseFormatter {
for (const publishModule of Object.keys(publishModules)) { for (const publishModule of Object.keys(publishModules)) {
const publishConfig = publishModules[publishModule]; const publishConfig = publishModules[publishModule];
tsconfigObject.compilerOptions.paths[`${publishConfig.name}`] = [ tspublishPaths[`${publishConfig.name}`] = [
`./${publishModule}/index.js`, `./${publishModule}/index.js`,
]; ];
} }
@@ -48,6 +49,8 @@ export class TsconfigFormatter extends BaseFormatter {
logVerbose(`Could not get tspublish modules: ${error.message}`); logVerbose(`Could not get tspublish modules: ${error.message}`);
} }
tsconfigObject.compilerOptions.paths = { ...existingPaths, ...tspublishPaths };
const newContent = JSON.stringify(tsconfigObject, null, 2); const newContent = JSON.stringify(tsconfigObject, null, 2);
// Only add change if content differs // Only add change if content differs
+315 -165
View File
@@ -1,120 +1,239 @@
import * as plugins from './mod.plugins.js'; import * as plugins from "./mod.plugins.js";
import { Project } from '../classes.project.js'; import { Project } from "../classes.project.js";
import { FormatContext } from './classes.formatcontext.js'; import { FormatContext } from "./classes.formatcontext.js";
import { FormatPlanner } from './classes.formatplanner.js'; import { FormatPlanner } from "./classes.formatplanner.js";
import { BaseFormatter } from './classes.baseformatter.js'; import { BaseFormatter } from "./classes.baseformatter.js";
import { logger, setVerboseMode } from '../gitzone.logging.js'; import { logger, setVerboseMode } from "../gitzone.logging.js";
import type { ICliMode } from "../helpers.climode.js";
import {
getCliMode,
printJson,
runWithSuppressedOutput,
} from "../helpers.climode.js";
import { getCliConfigValue } from "../helpers.smartconfig.js";
// Import wrapper classes for formatters import { CleanupFormatter } from "./formatters/cleanup.formatter.js";
import { CleanupFormatter } from './formatters/cleanup.formatter.js'; import { SmartconfigFormatter } from "./formatters/smartconfig.formatter.js";
import { NpmextraFormatter } from './formatters/npmextra.formatter.js'; import { LicenseFormatter } from "./formatters/license.formatter.js";
import { LicenseFormatter } from './formatters/license.formatter.js'; import { PackageJsonFormatter } from "./formatters/packagejson.formatter.js";
import { PackageJsonFormatter } from './formatters/packagejson.formatter.js'; import { TemplatesFormatter } from "./formatters/templates.formatter.js";
import { TemplatesFormatter } from './formatters/templates.formatter.js'; import { GitignoreFormatter } from "./formatters/gitignore.formatter.js";
import { GitignoreFormatter } from './formatters/gitignore.formatter.js'; import { TsconfigFormatter } from "./formatters/tsconfig.formatter.js";
import { TsconfigFormatter } from './formatters/tsconfig.formatter.js'; import { PrettierFormatter } from "./formatters/prettier.formatter.js";
import { PrettierFormatter } from './formatters/prettier.formatter.js'; import { ReadmeFormatter } from "./formatters/readme.formatter.js";
import { ReadmeFormatter } from './formatters/readme.formatter.js'; import { CopyFormatter } from "./formatters/copy.formatter.js";
import { CopyFormatter } from './formatters/copy.formatter.js';
/**
* Rename npmextra.json or smartconfig.json to .smartconfig.json
* before any formatter tries to read config.
*/
async function migrateConfigFile(allowWrite: boolean): Promise<void> {
const target = ".smartconfig.json";
const targetExists = await plugins.smartfs.file(target).exists();
if (targetExists) return;
for (const oldName of ["smartconfig.json", "npmextra.json"]) {
const exists = await plugins.smartfs.file(oldName).exists();
if (exists) {
if (!allowWrite) {
return;
}
const content = (await plugins.smartfs
.file(oldName)
.encoding("utf8")
.read()) as string;
await plugins.smartfs.file(`./${target}`).encoding("utf8").write(content);
await plugins.smartfs.file(oldName).delete();
logger.log("info", `Migrated ${oldName} to ${target}`);
return;
}
}
}
// Shared formatter class map used by both run() and runFormatter()
const formatterMap: Record<
string,
new (ctx: FormatContext, proj: Project) => BaseFormatter
> = {
cleanup: CleanupFormatter,
smartconfig: SmartconfigFormatter,
license: LicenseFormatter,
packagejson: PackageJsonFormatter,
templates: TemplatesFormatter,
gitignore: GitignoreFormatter,
tsconfig: TsconfigFormatter,
prettier: PrettierFormatter,
readme: ReadmeFormatter,
copy: CopyFormatter,
};
// Formatters that don't require projectType to be set
const formattersNotRequiringProjectType = [
"smartconfig",
"prettier",
"cleanup",
"packagejson",
];
const getFormatConfig = async () => {
const rawFormatConfig = await getCliConfigValue<Record<string, any>>(
"format",
{},
);
return {
interactive: true,
showDiffs: false,
autoApprove: false,
showStats: true,
modules: {
skip: [],
only: [],
...(rawFormatConfig.modules || {}),
},
...rawFormatConfig,
};
};
const createActiveFormatters = async (options: {
interactive: boolean;
jsonOutput: boolean;
}) => {
const project = await Project.fromCwd({ requireProjectType: false });
const context = new FormatContext(options);
const planner = new FormatPlanner();
const formatConfig = await getFormatConfig();
const formatters = Object.entries(formatterMap).map(
([, FormatterClass]) => new FormatterClass(context, project),
);
const activeFormatters = formatters.filter((formatter) => {
if (formatConfig.modules.only.length > 0) {
return formatConfig.modules.only.includes(formatter.name);
}
if (formatConfig.modules.skip.includes(formatter.name)) {
return false;
}
return true;
});
return {
context,
planner,
formatConfig,
activeFormatters,
};
};
const buildFormatPlan = async (options: {
fromPlan?: string;
interactive: boolean;
jsonOutput: boolean;
}) => {
const { context, planner, formatConfig, activeFormatters } =
await createActiveFormatters({
interactive: options.interactive,
jsonOutput: options.jsonOutput,
});
const plan = options.fromPlan
? JSON.parse(
(await plugins.smartfs
.file(options.fromPlan)
.encoding("utf8")
.read()) as string,
)
: await planner.planFormat(activeFormatters);
return {
context,
planner,
formatConfig,
activeFormatters,
plan,
};
};
const serializePlan = (plan: any) => {
return {
summary: plan.summary,
warnings: plan.warnings,
changes: plan.changes.map((change: any) => ({
type: change.type,
path: change.path,
module: change.module,
description: change.description,
})),
};
};
export let run = async ( export let run = async (
options: { options: {
write?: boolean; // Explicitly write changes (default: false, dry-mode) write?: boolean;
dryRun?: boolean; // Deprecated, kept for compatibility dryRun?: boolean; // Deprecated, kept for compatibility
yes?: boolean; yes?: boolean;
planOnly?: boolean; planOnly?: boolean;
savePlan?: string; savePlan?: string;
fromPlan?: string; fromPlan?: string;
detailed?: boolean; detailed?: boolean;
interactive?: boolean; interactive?: boolean;
parallel?: boolean;
verbose?: boolean; verbose?: boolean;
diff?: boolean; // Show file diffs diff?: boolean;
[key: string]: any;
} = {}, } = {},
): Promise<any> => { ): Promise<any> => {
// Set verbose mode if requested const mode = await getCliMode(options as any);
const subcommand = (options as any)?._?.[1];
if (mode.help || subcommand === "help") {
showHelp(mode);
return;
}
if (options.verbose) { if (options.verbose) {
setVerboseMode(true); setVerboseMode(true);
} }
// Determine if we should write changes const shouldWrite = options.write ?? options.dryRun === false;
// Default is dry-mode (no writing) unless --write/-w is specified const treatAsPlan = subcommand === "plan";
const shouldWrite = options.write ?? (options.dryRun === false);
const project = await Project.fromCwd({ requireProjectType: false }); if (mode.json && shouldWrite) {
const context = new FormatContext(); printJson({
// Cache system removed - no longer needed ok: false,
const planner = new FormatPlanner(); error:
"JSON output is only supported for read-only format planning. Use `gitzone format plan --json` or omit `--json` when applying changes.",
});
return;
}
// Get configuration from npmextra // Migrate config file before anything reads it
const npmextraConfig = new plugins.npmextra.Npmextra(); await migrateConfigFile(shouldWrite);
const formatConfig = npmextraConfig.dataFor<any>('@git.zone/cli.format', {
interactive: true,
showDiffs: false,
autoApprove: false,
planTimeout: 30000,
rollback: {
enabled: true,
autoRollbackOnError: true,
backupRetentionDays: 7,
maxBackupSize: '100MB',
excludePatterns: ['node_modules/**', '.git/**'],
},
modules: {
skip: [],
only: [],
order: [],
},
parallel: true,
cache: {
enabled: true,
clean: true, // Clean invalid entries from cache
},
});
// Cache cleaning removed - no longer using cache system const formatConfig = await getFormatConfig();
const interactive =
// Override config with command options options.interactive ?? (mode.interactive && formatConfig.interactive);
const interactive = options.interactive ?? formatConfig.interactive;
const autoApprove = options.yes ?? formatConfig.autoApprove; const autoApprove = options.yes ?? formatConfig.autoApprove;
const parallel = options.parallel ?? formatConfig.parallel;
try { try {
// Initialize formatters const planBuilder = async () => {
const formatters = [ return await buildFormatPlan({
new CleanupFormatter(context, project), fromPlan: options.fromPlan,
new NpmextraFormatter(context, project), interactive,
new LicenseFormatter(context, project), jsonOutput: mode.json,
new PackageJsonFormatter(context, project), });
new TemplatesFormatter(context, project), };
new GitignoreFormatter(context, project),
new TsconfigFormatter(context, project),
new PrettierFormatter(context, project),
new ReadmeFormatter(context, project),
new CopyFormatter(context, project),
];
// Filter formatters based on configuration if (!mode.json) {
const activeFormatters = formatters.filter((formatter) => { logger.log("info", "Analyzing project for format operations...");
if (formatConfig.modules.only.length > 0) { }
return formatConfig.modules.only.includes(formatter.name); const { context, planner, activeFormatters, plan } = mode.json
} ? await runWithSuppressedOutput(planBuilder)
if (formatConfig.modules.skip.includes(formatter.name)) { : await planBuilder();
return false;
}
return true;
});
// Plan phase if (mode.json) {
logger.log('info', 'Analyzing project for format operations...'); printJson(serializePlan(plan));
let plan = options.fromPlan return;
? JSON.parse( }
(await plugins.smartfs
.file(options.fromPlan)
.encoding('utf8')
.read()) as string,
)
: await planner.planFormat(activeFormatters);
// Display plan // Display plan
await planner.displayPlan(plan, options.detailed); await planner.displayPlan(plan, options.detailed);
@@ -123,34 +242,35 @@ export let run = async (
if (options.savePlan) { if (options.savePlan) {
await plugins.smartfs await plugins.smartfs
.file(options.savePlan) .file(options.savePlan)
.encoding('utf8') .encoding("utf8")
.write(JSON.stringify(plan, null, 2)); .write(JSON.stringify(plan, null, 2));
logger.log('info', `Plan saved to ${options.savePlan}`); logger.log("info", `Plan saved to ${options.savePlan}`);
} }
// Exit if plan-only mode if (options.planOnly || treatAsPlan) {
if (options.planOnly) {
return; return;
} }
// Show diffs if requested (works in both dry-run and write modes) // Show diffs if explicitly requested or before interactive write confirmation
if (options.diff) { const showDiffs =
logger.log('info', 'Showing file diffs:'); options.diff || (shouldWrite && interactive && !autoApprove);
console.log(''); if (showDiffs) {
logger.log("info", "Showing file diffs:");
console.log("");
for (const formatter of activeFormatters) { for (const formatter of activeFormatters) {
const checkResult = await formatter.check(); const checkResult = await formatter.check();
if (checkResult.hasDiff) { if (checkResult.hasDiff) {
logger.log('info', `[${formatter.name}]`); logger.log("info", `[${formatter.name}]`);
formatter.displayAllDiffs(checkResult); formatter.displayAllDiffs(checkResult);
console.log(''); console.log("");
} }
} }
} }
// Dry-run mode (default behavior) // Dry-run mode (default behavior)
if (!shouldWrite) { if (!shouldWrite) {
logger.log('info', 'Dry-run mode - use --write (-w) to apply changes'); logger.log("info", "Dry-run mode - use --write (-w) to apply changes");
return; return;
} }
@@ -158,73 +278,45 @@ export let run = async (
if (interactive && !autoApprove) { if (interactive && !autoApprove) {
const interactInstance = new plugins.smartinteract.SmartInteract(); const interactInstance = new plugins.smartinteract.SmartInteract();
const response = await interactInstance.askQuestion({ const response = await interactInstance.askQuestion({
type: 'confirm', type: "confirm",
name: 'proceed', name: "proceed",
message: 'Proceed with formatting?', message: "Proceed with formatting?",
default: true, default: true,
}); });
if (!(response as any).value) { if (!(response as any).value) {
logger.log('info', 'Format operation cancelled by user'); logger.log("info", "Format operation cancelled by user");
return; return;
} }
} }
// Execute phase // Execute phase
logger.log( logger.log("info", "Executing format operations...");
'info', await planner.executePlan(plan, activeFormatters, context);
`Executing format operations${parallel ? ' in parallel' : ' sequentially'}...`,
);
await planner.executePlan(plan, activeFormatters, context, parallel);
// Finish statistics tracking
context.getFormatStats().finish(); context.getFormatStats().finish();
// Display statistics const showStats = formatConfig.showStats ?? true;
const showStats = npmextraConfig.dataFor('gitzone.format.showStats', true);
if (showStats) { if (showStats) {
context.getFormatStats().displayStats(); context.getFormatStats().displayStats();
} }
// Save stats if requested
if (options.detailed) { if (options.detailed) {
const statsPath = `.nogit/format-stats-${Date.now()}.json`; const statsPath = `.nogit/format-stats-${Date.now()}.json`;
await context.getFormatStats().saveReport(statsPath); await context.getFormatStats().saveReport(statsPath);
} }
logger.log('success', 'Format operations completed successfully!'); logger.log("success", "Format operations completed successfully!");
} catch (error) { } catch (error) {
logger.log('error', `Format operation failed: ${error.message}`); const errorMessage = error instanceof Error ? error.message : String(error);
logger.log("error", `Format operation failed: ${errorMessage}`);
// Rollback system has been removed for stability
throw error; throw error;
} }
}; };
// Export CLI command handlers import type { ICheckResult } from "./interfaces.format.js";
export const handleRollback = async (operationId?: string): Promise<void> => {
logger.log('info', 'Rollback system has been disabled for stability');
};
export const handleListBackups = async (): Promise<void> => {
logger.log('info', 'Backup system has been disabled for stability');
};
export const handleCleanBackups = async (): Promise<void> => {
logger.log(
'info',
'Backup cleaning has been disabled - backup system removed',
);
};
// Import the ICheckResult type for external use
import type { ICheckResult } from './interfaces.format.js';
export type { ICheckResult }; export type { ICheckResult };
// Formatters that don't require projectType to be set
const formattersNotRequiringProjectType = ['npmextra', 'prettier', 'cleanup', 'packagejson'];
/** /**
* Run a single formatter by name (for use by other modules) * Run a single formatter by name (for use by other modules)
*/ */
@@ -232,28 +324,14 @@ export const runFormatter = async (
formatterName: string, formatterName: string,
options: { options: {
silent?: boolean; silent?: boolean;
checkOnly?: boolean; // Only check for diffs, don't apply checkOnly?: boolean;
showDiff?: boolean; // Show the diff output showDiff?: boolean;
} = {} } = {},
): Promise<ICheckResult | void> => { ): Promise<ICheckResult | void> => {
// Determine if this formatter requires projectType const requireProjectType =
const requireProjectType = !formattersNotRequiringProjectType.includes(formatterName); !formattersNotRequiringProjectType.includes(formatterName);
const project = await Project.fromCwd({ requireProjectType }); const project = await Project.fromCwd({ requireProjectType });
const context = new FormatContext(); const context = new FormatContext({ interactive: true, jsonOutput: false });
// Map formatter names to classes
const formatterMap: Record<string, new (ctx: FormatContext, proj: Project) => BaseFormatter> = {
cleanup: CleanupFormatter,
npmextra: NpmextraFormatter,
license: LicenseFormatter,
packagejson: PackageJsonFormatter,
templates: TemplatesFormatter,
gitignore: GitignoreFormatter,
tsconfig: TsconfigFormatter,
prettier: PrettierFormatter,
readme: ReadmeFormatter,
copy: CopyFormatter,
};
const FormatterClass = formatterMap[formatterName]; const FormatterClass = formatterMap[formatterName];
if (!FormatterClass) { if (!FormatterClass) {
@@ -262,7 +340,6 @@ export const runFormatter = async (
const formatter = new FormatterClass(context, project); const formatter = new FormatterClass(context, project);
// Check-only mode: just check for diffs and optionally display them
if (options.checkOnly) { if (options.checkOnly) {
const result = await formatter.check(); const result = await formatter.check();
if (result.hasDiff && options.showDiff) { if (result.hasDiff && options.showDiff) {
@@ -271,7 +348,6 @@ export const runFormatter = async (
return result; return result;
} }
// Normal mode: analyze and apply changes
const changes = await formatter.analyze(); const changes = await formatter.analyze();
for (const change of changes) { for (const change of changes) {
@@ -279,6 +355,80 @@ export const runFormatter = async (
} }
if (!options.silent) { if (!options.silent) {
logger.log('success', `Formatter '${formatterName}' completed`); logger.log("success", `Formatter '${formatterName}' completed`);
} }
}; };
export function showHelp(mode?: ICliMode): void {
if (mode?.json) {
printJson({
command: "format",
usage: "gitzone format [plan] [options]",
description:
"Plans formatting changes by default and applies them only with --write.",
flags: [
{ flag: "--write, -w", description: "Apply planned changes" },
{
flag: "--yes",
description: "Skip the interactive confirmation before writing",
},
{
flag: "--plan-only",
description: "Show the plan without applying changes",
},
{
flag: "--save-plan <file>",
description: "Write the format plan to a file",
},
{
flag: "--from-plan <file>",
description: "Load a previously saved plan",
},
{
flag: "--detailed",
description: "Show detailed diffs and save stats",
},
{ flag: "--verbose", description: "Enable verbose logging" },
{
flag: "--diff",
description: "Show per-file diffs before applying changes",
},
{ flag: "--json", description: "Emit a read-only format plan as JSON" },
],
examples: [
"gitzone format",
"gitzone format plan --json",
"gitzone format --write --yes",
],
});
return;
}
console.log("");
console.log("Usage: gitzone format [plan] [options]");
console.log("");
console.log(
"Plans formatting changes by default and applies them only with --write.",
);
console.log("");
console.log("Flags:");
console.log(" --write, -w Apply planned changes");
console.log(
" --yes Skip the interactive confirmation before writing",
);
console.log(" --plan-only Show the plan without applying changes");
console.log(" --save-plan <file> Write the format plan to a file");
console.log(" --from-plan <file> Load a previously saved plan");
console.log(" --detailed Show detailed diffs and save stats");
console.log(" --verbose Enable verbose logging");
console.log(
" --diff Show per-file diffs before applying changes",
);
console.log(" --json Emit a read-only format plan as JSON");
console.log("");
console.log("Examples:");
console.log(" gitzone format");
console.log(" gitzone format plan --json");
console.log(" gitzone format --write --yes");
console.log("");
}
+16 -19
View File
@@ -1,31 +1,15 @@
export type IFormatOperation = {
id: string;
timestamp: number;
files: Array<{
path: string;
originalContent: string;
checksum: string;
permissions: string;
}>;
status: 'pending' | 'in-progress' | 'completed' | 'failed' | 'rolled-back';
error?: Error;
};
export type IFormatPlan = { export type IFormatPlan = {
summary: { summary: {
totalFiles: number; totalFiles: number;
filesAdded: number; filesAdded: number;
filesModified: number; filesModified: number;
filesRemoved: number; filesRemoved: number;
estimatedTime: number;
}; };
changes: Array<{ changes: Array<{
type: 'create' | 'modify' | 'delete'; type: 'create' | 'modify' | 'delete';
path: string; path: string;
module: string; module: string;
description: string; description: string;
diff?: string;
size?: number;
}>; }>;
warnings: Array<{ warnings: Array<{
level: 'info' | 'warning' | 'error'; level: 'info' | 'warning' | 'error';
@@ -40,9 +24,6 @@ export type IPlannedChange = {
module: string; module: string;
description: string; description: string;
content?: string; // New content for create/modify operations content?: string; // New content for create/modify operations
originalContent?: string; // Original content for comparison
diff?: string;
size?: number;
}; };
export interface ICheckResult { export interface ICheckResult {
@@ -54,3 +35,19 @@ export interface ICheckResult {
after?: string; after?: string;
}>; }>;
} }
export function getModuleIcon(module: string): string {
const icons: Record<string, string> = {
packagejson: '📦',
license: '📝',
tsconfig: '🔧',
cleanup: '🚮',
gitignore: '🔒',
prettier: '✨',
readme: '📖',
templates: '📄',
smartconfig: '⚙️',
copy: '📋',
};
return icons[module] || '📁';
}
+4 -14
View File
@@ -1,31 +1,21 @@
export * from '../plugins.js'; export * from '../plugins.js';
import * as crypto from 'crypto';
import * as path from 'path'; import * as path from 'path';
import * as lik from '@push.rocks/lik';
import * as smartfile from '@push.rocks/smartfile'; import * as smartfile from '@push.rocks/smartfile';
import * as smartgulp from '@push.rocks/smartgulp';
import * as smartinteract from '@push.rocks/smartinteract'; import * as smartinteract from '@push.rocks/smartinteract';
import * as smartlegal from '@push.rocks/smartlegal'; import * as smartlegal from '@push.rocks/smartlegal';
import * as smartobject from '@push.rocks/smartobject'; import * as smartobject from '@push.rocks/smartobject';
import * as smartnpm from '@push.rocks/smartnpm'; import * as smartconfig from '@push.rocks/smartconfig';
import * as smartstream from '@push.rocks/smartstream';
import * as through2 from 'through2';
import * as npmextra from '@push.rocks/npmextra';
import * as smartdiff from '@push.rocks/smartdiff'; import * as smartdiff from '@push.rocks/smartdiff';
import * as smartscaf from '@push.rocks/smartscaf';
export { export {
crypto,
path, path,
lik,
smartfile, smartfile,
smartgulp,
smartinteract, smartinteract,
smartlegal, smartlegal,
smartobject, smartobject,
smartnpm, smartconfig,
smartstream,
through2,
npmextra,
smartdiff, smartdiff,
smartscaf,
}; };
+393
View File
@@ -0,0 +1,393 @@
import * as plugins from "./mod.plugins.js";
import * as paths from "../paths.js";
import { logger } from "../gitzone.logging.js";
import type { ICliMode } from "../helpers.climode.js";
import { getCliMode, printJson } from "../helpers.climode.js";
import {
inferVersionTypeFromPending,
movePendingToVersion,
readPendingChangelog,
} from "../helpers.changelog.js";
import {
resolveReleaseWorkflow,
type IResolvedReleaseWorkflow,
} from "../helpers.workflow.js";
import * as commitHelpers from "../mod_commit/mod.helpers.js";
type TTargetStatus = "success" | "already-published" | "skipped" | "failed";
interface ITargetResult {
target: string;
status: TTargetStatus;
message?: string;
}
export const run = async (argvArg: any) => {
const mode = await getCliMode(argvArg);
const subcommand = argvArg._?.[1];
if (mode.help || subcommand === "help") {
showHelp(mode);
return;
}
if (mode.json) {
printJson({
ok: false,
error: "JSON output is not supported for mutating release workflows yet. Use `gitzone release --plan` for a human-readable plan.",
});
return;
}
const workflow = await resolveReleaseWorkflow(argvArg);
printReleasePlan(workflow);
if (workflow.confirmation === "plan") {
return;
}
const smartshellInstance = new plugins.smartshell.Smartshell({
executor: "bash",
sourceFilePaths: [],
});
const pending = await readPendingChangelog(
plugins.path.join(paths.cwd, workflow.changelogFile),
workflow.changelogPendingSection,
);
if (pending.isEmpty && !argvArg["allow-empty"] && !argvArg.allowEmpty) {
logger.log("error", "No pending changelog entries. Nothing to release.");
process.exit(1);
}
const versionType = resolveVersionType(argvArg, pending.block);
const projectType = await commitHelpers.detectProjectType();
const currentVersion = await commitHelpers.readCurrentVersion(projectType);
const plannedVersion = commitHelpers.calculateNewVersion(currentVersion, versionType);
if (workflow.confirmation === "prompt") {
if (!mode.interactive) {
throw new Error("Release confirmation requires an interactive terminal. Use `-y` or set release.confirmation to `auto`.");
}
const confirmed = await plugins.smartinteract.SmartInteract.getCliConfirmation(
`Release v${plannedVersion} (${versionType}) now?`,
true,
);
if (!confirmed) {
logger.log("info", "Release cancelled.");
return;
}
}
let newVersion = plannedVersion;
const gitResults: ITargetResult[] = [];
const npmResults: ITargetResult[] = [];
const dockerResults: ITargetResult[] = [];
if (workflow.requireCleanTree) {
await verifyCleanTree(smartshellInstance, "Working tree is not clean. Commit or stash changes before releasing.");
}
if (workflow.runTests) {
await runCommandStep(smartshellInstance, "Running tests", workflow.testCommand);
}
newVersion = await runVersionStep(projectType, versionType);
await runChangelogStep(workflow, newVersion);
await runReleaseCommitStep(smartshellInstance, newVersion);
await runTagStep(smartshellInstance, newVersion);
if (workflow.runBuild) {
await runCommandStep(smartshellInstance, "Running release build", workflow.buildCommand);
await verifyCleanTree(smartshellInstance, "Build produced uncommitted changes. Aborting release.");
}
if (workflow.targets.includes("git")) {
gitResults.push(...(await runGitTarget(smartshellInstance, workflow)));
}
if (workflow.targets.includes("npm")) {
npmResults.push(...(await runNpmTarget(smartshellInstance, workflow)));
}
if (workflow.targets.includes("docker")) {
dockerResults.push(...(await runDockerTarget(smartshellInstance, workflow, newVersion)));
}
printReleaseSummary(newVersion, gitResults, npmResults, dockerResults);
if ([...gitResults, ...npmResults, ...dockerResults].some((result) => result.status === "failed")) {
process.exit(1);
}
};
function resolveVersionType(argvArg: any, pendingBlock: string): commitHelpers.VersionType {
if (argvArg.major) return "major";
if (argvArg.minor) return "minor";
if (argvArg.patch) return "patch";
return inferVersionTypeFromPending(pendingBlock);
}
async function runCommandStep(
smartshellInstance: plugins.smartshell.Smartshell,
label: string,
command: string,
): Promise<void> {
console.log(`\n${label}`);
const result = await smartshellInstance.exec(command);
if (result.exitCode !== 0) {
logger.log("error", `${label} failed. Aborting release.`);
process.exit(1);
}
logger.log("success", `${label} passed.`);
}
async function verifyCleanTree(
smartshellInstance: plugins.smartshell.Smartshell,
errorMessage: string,
): Promise<void> {
const statusResult = await smartshellInstance.exec("git status --porcelain");
if (statusResult.stdout.trim() !== "") {
logger.log("error", errorMessage);
console.log(statusResult.stdout);
process.exit(1);
}
}
async function runVersionStep(
projectType: commitHelpers.ProjectType,
versionType: commitHelpers.VersionType,
): Promise<string> {
const currentVersion = await commitHelpers.readCurrentVersion(projectType);
const newVersion = commitHelpers.calculateNewVersion(currentVersion, versionType);
logger.log("info", `Bumping version: ${currentVersion} -> ${newVersion}`);
const commitInfo = new plugins.commitinfo.CommitInfo(paths.cwd, versionType);
await commitInfo.writeIntoPotentialDirs();
await commitHelpers.updateProjectVersionFiles(projectType, newVersion);
return newVersion;
}
async function runChangelogStep(
workflow: IResolvedReleaseWorkflow,
newVersion: string,
): Promise<void> {
const dateString = new Date().toISOString().slice(0, 10);
await movePendingToVersion(
plugins.path.join(paths.cwd, workflow.changelogFile),
workflow.changelogPendingSection,
workflow.changelogVersionHeading,
newVersion,
dateString,
);
}
async function runReleaseCommitStep(
smartshellInstance: plugins.smartshell.Smartshell,
newVersion: string,
): Promise<void> {
await smartshellInstance.exec("git add -A");
const result = await smartshellInstance.exec(`git commit -m ${shellQuote(`v${newVersion}`)}`);
if (result.exitCode !== 0) {
logger.log("error", "Release commit failed.");
process.exit(1);
}
}
async function runTagStep(
smartshellInstance: plugins.smartshell.Smartshell,
newVersion: string,
): Promise<void> {
const result = await smartshellInstance.exec(`git tag v${newVersion} -m ${shellQuote(`v${newVersion}`)}`);
if (result.exitCode !== 0) {
logger.log("error", "Release tag failed.");
process.exit(1);
}
}
async function runGitTarget(
smartshellInstance: plugins.smartshell.Smartshell,
workflow: IResolvedReleaseWorkflow,
): Promise<ITargetResult[]> {
const currentBranchResult = await smartshellInstance.exec("git branch --show-current");
const currentBranch = currentBranchResult.stdout.trim() || "master";
const commands: Array<{ target: string; command: string }> = [];
if (workflow.pushBranch) {
commands.push({
target: `${workflow.gitRemote}/${currentBranch}`,
command: `git push ${workflow.gitRemote} ${currentBranch}`,
});
}
if (workflow.pushTags) {
commands.push({
target: `${workflow.gitRemote}/tags`,
command: `git push ${workflow.gitRemote} --tags`,
});
}
const results: ITargetResult[] = [];
for (const { target, command } of commands) {
const result = await smartshellInstance.exec(command);
results.push({
target,
status: result.exitCode === 0 ? "success" : "failed",
message: result.exitCode === 0 ? undefined : "push failed",
});
}
return results;
}
async function runNpmTarget(
smartshellInstance: plugins.smartshell.Smartshell,
workflow: IResolvedReleaseWorkflow,
): Promise<ITargetResult[]> {
if (!workflow.npmEnabled) {
return [{ target: "npm", status: "skipped", message: "disabled" }];
}
if (workflow.npmRegistries.length === 0) {
return [{ target: "npm", status: "failed", message: "no registries configured" }];
}
const results: ITargetResult[] = [];
for (const registry of workflow.npmRegistries) {
const command = `pnpm publish --registry=${registry} --access=${workflow.npmAccessLevel}`;
const result = await smartshellInstance.exec(command);
const output = `${result.stdout || ""}\n${(result as any).stderr || ""}\n${(result as any).combinedOutput || ""}`;
if (result.exitCode === 0) {
results.push({ target: registry, status: "success" });
} else if (isAlreadyPublishedOutput(output) && workflow.npmAlreadyPublished === "success") {
results.push({ target: registry, status: "already-published" });
} else {
results.push({ target: registry, status: "failed", message: firstMeaningfulLine(output) });
}
}
return results;
}
async function runDockerTarget(
smartshellInstance: plugins.smartshell.Smartshell,
workflow: IResolvedReleaseWorkflow,
newVersion: string,
): Promise<ITargetResult[]> {
if (!workflow.dockerEnabled) {
return [{ target: "docker", status: "skipped", message: "disabled" }];
}
if (workflow.dockerImages.length === 0) {
return [{ target: "docker", status: "failed", message: "no images configured" }];
}
const results: ITargetResult[] = [];
for (const imageTemplate of workflow.dockerImages) {
const image = imageTemplate.replaceAll("{{version}}", newVersion);
const buildResult = await smartshellInstance.exec(`docker build -t ${shellQuote(image)} .`);
if (buildResult.exitCode !== 0) {
results.push({ target: image, status: "failed", message: "docker build failed" });
continue;
}
const pushResult = await smartshellInstance.exec(`docker push ${shellQuote(image)}`);
results.push({
target: image,
status: pushResult.exitCode === 0 ? "success" : "failed",
message: pushResult.exitCode === 0 ? undefined : "docker push failed",
});
}
return results;
}
function isAlreadyPublishedOutput(output: string): boolean {
return /previously published versions|cannot publish over|already exists/i.test(output);
}
function firstMeaningfulLine(output: string): string {
return output
.split("\n")
.map((line) => line.trim())
.find((line) => line.length > 0) || "command failed";
}
function shellQuote(value: string): string {
return `'${value.replaceAll("'", "'\\''")}'`;
}
function printReleasePlan(workflow: IResolvedReleaseWorkflow): void {
console.log("");
console.log("gitzone release - resolved workflow");
console.log(`confirmation: ${workflow.confirmation}`);
console.log(`plan: ${workflow.plan.join(" -> ")}`);
console.log(`targets: ${workflow.targets.length > 0 ? workflow.targets.join(", ") : "none"}`);
console.log(`changelog: ${workflow.changelogFile}#${workflow.changelogPendingSection}`);
if (workflow.targets.includes("npm")) {
console.log(`npm registries: ${workflow.npmRegistries.length > 0 ? workflow.npmRegistries.join(", ") : "none"}`);
}
if (workflow.targets.includes("docker")) {
console.log(`docker images: ${workflow.dockerImages.length > 0 ? workflow.dockerImages.join(", ") : "none"}`);
}
console.log("");
}
function printReleaseSummary(
newVersion: string,
gitResults: ITargetResult[],
npmResults: ITargetResult[],
dockerResults: ITargetResult[],
): void {
console.log("");
console.log(`Release v${newVersion}`);
console.log("");
if (gitResults.length > 0) {
console.log("git:");
for (const result of gitResults) {
console.log(` ${result.target} ${result.status}${result.message ? ` (${result.message})` : ""}`);
}
}
if (npmResults.length > 0) {
console.log("npm:");
for (const result of npmResults) {
console.log(` ${result.target} ${result.status}${result.message ? ` (${result.message})` : ""}`);
}
}
if (dockerResults.length > 0) {
console.log("docker:");
for (const result of dockerResults) {
console.log(` ${result.target} ${result.status}${result.message ? ` (${result.message})` : ""}`);
}
}
}
export function showHelp(mode?: ICliMode): void {
if (mode?.json) {
printJson({
command: "release",
usage: "gitzone release [options]",
description: "Creates a versioned release from pending changelog entries and publishes configured artifacts.",
flags: [
{ flag: "-y, --yes", description: "Run without interactive confirmation" },
{ flag: "-t, --test", description: "Enable release preflight tests" },
{ flag: "-b, --build", description: "Enable release preflight build" },
{ flag: "-p, --push", description: "Enable the git release target" },
{ flag: "--target <names>", description: "Release only selected targets: git,npm,docker" },
{ flag: "--npm", description: "Enable the npm release target" },
{ flag: "--docker", description: "Enable the Docker release target" },
{ flag: "--no-publish", description: "Run release core and git target only" },
{ flag: "--plan", description: "Show resolved workflow without mutating files" },
],
});
return;
}
console.log("");
console.log("Usage: gitzone release [options]");
console.log("");
console.log("Creates a versioned release from changelog Pending entries.");
console.log("");
console.log("Flags:");
console.log(" -y, --yes Run without interactive confirmation");
console.log(" -t, --test Enable release preflight tests");
console.log(" -b, --build Enable release preflight build");
console.log(" -p, --push Enable the git release target");
console.log(" --target <names> Release only selected targets: git,npm,docker");
console.log(" --npm Enable the npm release target");
console.log(" --docker Enable the Docker release target");
console.log(" --no-publish Run release core and git target only");
console.log(" --major|--minor|--patch Override inferred semver level");
console.log(" --plan Show resolved workflow without mutating files");
console.log("");
}
+5
View File
@@ -0,0 +1,5 @@
export * from "../plugins.js";
import * as commitinfo from "@push.rocks/commitinfo";
export { commitinfo };
+2 -2
View File
@@ -26,11 +26,11 @@ export interface IGlobalRegistryData {
export class GlobalRegistry { export class GlobalRegistry {
private static instance: GlobalRegistry | null = null; private static instance: GlobalRegistry | null = null;
private kvStore: plugins.npmextra.KeyValueStore<IGlobalRegistryData>; private kvStore: plugins.smartconfig.KeyValueStore<IGlobalRegistryData>;
private docker: DockerContainer; private docker: DockerContainer;
private constructor() { private constructor() {
this.kvStore = new plugins.npmextra.KeyValueStore({ this.kvStore = new plugins.smartconfig.KeyValueStore({
typeArg: 'userHomeDir', typeArg: 'userHomeDir',
identityArg: 'gitzone-services', identityArg: 'gitzone-services',
}); });
+20 -20
View File
@@ -31,7 +31,7 @@ export class ServiceManager {
await this.config.loadOrCreate(); await this.config.loadOrCreate();
logger.log('info', `📋 Project: ${this.config.getConfig().PROJECT_NAME}`); logger.log('info', `📋 Project: ${this.config.getConfig().PROJECT_NAME}`);
// Load service selection from npmextra.json // Load service selection from .smartconfig.json
await this.loadServiceConfiguration(); await this.loadServiceConfiguration();
// Validate and update ports if needed // Validate and update ports if needed
@@ -39,11 +39,11 @@ export class ServiceManager {
} }
/** /**
* Load service configuration from npmextra.json * Load service configuration from .smartconfig.json
*/ */
private async loadServiceConfiguration(): Promise<void> { private async loadServiceConfiguration(): Promise<void> {
const npmextraConfig = new plugins.npmextra.Npmextra(process.cwd()); const smartconfigInstance = new plugins.smartconfig.Smartconfig(process.cwd());
const gitzoneConfig = npmextraConfig.dataFor<any>('@git.zone/cli', {}); const gitzoneConfig = smartconfigInstance.dataFor<any>('@git.zone/cli', {});
// Check if services array exists // Check if services array exists
if (!gitzoneConfig.services || !Array.isArray(gitzoneConfig.services) || gitzoneConfig.services.length === 0) { if (!gitzoneConfig.services || !Array.isArray(gitzoneConfig.services) || gitzoneConfig.services.length === 0) {
@@ -63,7 +63,7 @@ export class ServiceManager {
this.enabledServices = response.value || ['mongodb', 'minio', 'elasticsearch']; this.enabledServices = response.value || ['mongodb', 'minio', 'elasticsearch'];
// Save to npmextra.json // Save to .smartconfig.json
await this.saveServiceConfiguration(this.enabledServices); await this.saveServiceConfiguration(this.enabledServices);
} else { } else {
this.enabledServices = gitzoneConfig.services; this.enabledServices = gitzoneConfig.services;
@@ -72,31 +72,31 @@ export class ServiceManager {
} }
/** /**
* Save service configuration to npmextra.json * Save service configuration to .smartconfig.json
*/ */
private async saveServiceConfiguration(services: string[]): Promise<void> { private async saveServiceConfiguration(services: string[]): Promise<void> {
const npmextraPath = plugins.path.join(process.cwd(), 'npmextra.json'); const smartconfigPath = plugins.path.join(process.cwd(), '.smartconfig.json');
let npmextraData: any = {}; let smartconfigData: any = {};
// Read existing npmextra.json if it exists // Read existing .smartconfig.json if it exists
if (await plugins.smartfs.file(npmextraPath).exists()) { if (await plugins.smartfs.file(smartconfigPath).exists()) {
const content = await plugins.smartfs.file(npmextraPath).encoding('utf8').read(); const content = await plugins.smartfs.file(smartconfigPath).encoding('utf8').read();
npmextraData = JSON.parse(content as string); smartconfigData = JSON.parse(content as string);
} }
// Update @git.zone/cli.services // Update @git.zone/cli.services
if (!npmextraData['@git.zone/cli']) { if (!smartconfigData['@git.zone/cli']) {
npmextraData['@git.zone/cli'] = {}; smartconfigData['@git.zone/cli'] = {};
} }
npmextraData['@git.zone/cli'].services = services; smartconfigData['@git.zone/cli'].services = services;
// Write back to npmextra.json // Write back to .smartconfig.json
await plugins.smartfs await plugins.smartfs
.file(npmextraPath) .file(smartconfigPath)
.encoding('utf8') .encoding('utf8')
.write(JSON.stringify(npmextraData, null, 2)); .write(JSON.stringify(smartconfigData, null, 2));
logger.log('ok', `✅ Saved service configuration to npmextra.json`); logger.log('ok', `✅ Saved service configuration to .smartconfig.json`);
logger.log('info', `🔧 Enabled services: ${services.join(', ')}`); logger.log('info', `🔧 Enabled services: ${services.join(', ')}`);
} }
@@ -904,7 +904,7 @@ export class ServiceManager {
this.enabledServices = response.value || ['mongodb', 'minio', 'elasticsearch']; this.enabledServices = response.value || ['mongodb', 'minio', 'elasticsearch'];
// Save to npmextra.json // Save to .smartconfig.json
await this.saveServiceConfiguration(this.enabledServices); await this.saveServiceConfiguration(this.enabledServices);
logger.log('ok', '✅ Service configuration updated'); logger.log('ok', '✅ Service configuration updated');
+543 -176
View File
@@ -1,12 +1,26 @@
import * as plugins from './mod.plugins.js'; import * as plugins from "./mod.plugins.js";
import * as helpers from './helpers.js'; import * as helpers from "./helpers.js";
import { ServiceManager } from './classes.servicemanager.js'; import { ServiceManager } from "./classes.servicemanager.js";
import { GlobalRegistry } from './classes.globalregistry.js'; import { GlobalRegistry } from "./classes.globalregistry.js";
import { logger } from '../gitzone.logging.js'; import { logger } from "../gitzone.logging.js";
import type { ICliMode } from "../helpers.climode.js";
import { getCliMode, printJson } from "../helpers.climode.js";
import {
getCliConfigValueFromData,
readSmartconfigFile,
setCliConfigValueInData,
writeSmartconfigFile,
} from "../helpers.smartconfig.js";
export const run = async (argvArg: any) => { export const run = async (argvArg: any) => {
const mode = await getCliMode(argvArg);
const isGlobal = argvArg.g || argvArg.global; const isGlobal = argvArg.g || argvArg.global;
const command = argvArg._[1] || 'help'; const command = argvArg._[1] || "help";
if (mode.help || command === "help") {
showHelp(mode);
return;
}
// Handle global commands first // Handle global commands first
if (isGlobal) { if (isGlobal) {
@@ -14,264 +28,597 @@ export const run = async (argvArg: any) => {
return; return;
} }
// Local project commands const service = argvArg._[2] || "all";
const serviceManager = new ServiceManager();
await serviceManager.init();
const service = argvArg._[2] || 'all';
switch (command) { switch (command) {
case 'start': case "config":
await handleStart(serviceManager, service); if (service === "services" || argvArg._[2] === "services") {
break; const serviceManager = new ServiceManager();
await serviceManager.init();
case 'stop':
await handleStop(serviceManager, service);
break;
case 'restart':
await handleRestart(serviceManager, service);
break;
case 'status':
await serviceManager.showStatus();
break;
case 'config':
if (service === 'services' || argvArg._[2] === 'services') {
await handleConfigureServices(serviceManager); await handleConfigureServices(serviceManager);
} else { } else {
await serviceManager.showConfig(); await handleShowConfig(mode);
} }
break; break;
case 'compass': case "set":
await serviceManager.showCompassConnection(); await handleSetServices(argvArg._[2], mode);
break; break;
case 'logs': case "enable":
const lines = parseInt(argvArg._[3]) || 20; await handleEnableServices(argvArg._.slice(2), mode);
await serviceManager.showLogs(service, lines);
break; break;
case 'remove': case "disable":
await handleRemove(serviceManager); await handleDisableServices(argvArg._.slice(2), mode);
break; break;
case 'clean': case "start":
await handleClean(serviceManager); case "stop":
break; case "restart":
case "status":
case "compass":
case "logs":
case "remove":
case "clean":
case "reconfigure": {
const serviceManager = new ServiceManager();
await serviceManager.init();
case 'reconfigure': switch (command) {
await serviceManager.reconfigure(); case "start":
break; await handleStart(serviceManager, service);
break;
case 'help': case "stop":
await handleStop(serviceManager, service);
break;
case "restart":
await handleRestart(serviceManager, service);
break;
case "status":
await serviceManager.showStatus();
break;
case "compass":
await serviceManager.showCompassConnection();
break;
case "logs": {
const lines = parseInt(argvArg._[3]) || 20;
await serviceManager.showLogs(service, lines);
break;
}
case "remove":
await handleRemove(serviceManager);
break;
case "clean":
await handleClean(serviceManager);
break;
case "reconfigure":
await serviceManager.reconfigure();
break;
}
break;
}
default: default:
showHelp(); showHelp(mode);
break; break;
} }
}; };
const allowedServices = ["mongodb", "minio", "elasticsearch"];
const normalizeServiceName = (service: string): string => {
switch (service) {
case "mongo":
case "mongodb":
return "mongodb";
case "minio":
case "s3":
return "minio";
case "elastic":
case "elasticsearch":
case "es":
return "elasticsearch";
default:
return service;
}
};
async function readServicesConfig(): Promise<{
enabledServices: string[];
environment: Record<string, any> | null;
}> {
const smartconfigData = await readSmartconfigFile();
const enabledServices = getCliConfigValueFromData(
smartconfigData,
"services",
);
let environment: Record<string, any> | null = null;
const envPath = plugins.path.join(process.cwd(), ".nogit", "env.json");
if (await plugins.smartfs.file(envPath).exists()) {
const envContent = (await plugins.smartfs
.file(envPath)
.encoding("utf8")
.read()) as string;
environment = JSON.parse(envContent);
}
return {
enabledServices: Array.isArray(enabledServices) ? enabledServices : [],
environment,
};
}
async function updateEnabledServices(services: string[]): Promise<void> {
const smartconfigData = await readSmartconfigFile();
setCliConfigValueInData(smartconfigData, "services", services);
await writeSmartconfigFile(smartconfigData);
}
async function handleShowConfig(mode: ICliMode) {
const configData = await readServicesConfig();
if (mode.json) {
printJson(configData);
return;
}
helpers.printHeader("Current Services Configuration");
logger.log(
"info",
`Enabled Services: ${configData.enabledServices.length > 0 ? configData.enabledServices.join(", ") : "none configured"}`,
);
console.log();
if (!configData.environment) {
logger.log(
"note",
"No .nogit/env.json found yet. Start a service once to create runtime defaults.",
);
return;
}
const env = configData.environment;
logger.log("note", "MongoDB:");
logger.log("info", ` Host: ${env.MONGODB_HOST}:${env.MONGODB_PORT}`);
logger.log("info", ` Database: ${env.MONGODB_NAME}`);
logger.log("info", ` User: ${env.MONGODB_USER}`);
logger.log("info", ` Container: ${env.PROJECT_NAME}-mongodb`);
logger.log(
"info",
` Data: ${plugins.path.join(process.cwd(), ".nogit", "mongodata")}`,
);
logger.log("info", ` Connection: ${env.MONGODB_URL}`);
console.log();
logger.log("note", "S3/MinIO:");
logger.log("info", ` Host: ${env.S3_HOST}`);
logger.log("info", ` API Port: ${env.S3_PORT}`);
logger.log("info", ` Console Port: ${env.S3_CONSOLE_PORT}`);
logger.log("info", ` Bucket: ${env.S3_BUCKET}`);
logger.log("info", ` Container: ${env.PROJECT_NAME}-minio`);
logger.log(
"info",
` Data: ${plugins.path.join(process.cwd(), ".nogit", "miniodata")}`,
);
logger.log("info", ` Endpoint: ${env.S3_ENDPOINT}`);
console.log();
logger.log("note", "Elasticsearch:");
logger.log(
"info",
` Host: ${env.ELASTICSEARCH_HOST}:${env.ELASTICSEARCH_PORT}`,
);
logger.log("info", ` User: ${env.ELASTICSEARCH_USER}`);
logger.log("info", ` Container: ${env.PROJECT_NAME}-elasticsearch`);
logger.log(
"info",
` Data: ${plugins.path.join(process.cwd(), ".nogit", "esdata")}`,
);
logger.log("info", ` Connection: ${env.ELASTICSEARCH_URL}`);
}
async function handleSetServices(rawValue: string | undefined, mode: ICliMode) {
if (!rawValue) {
throw new Error("Specify a comma-separated list of services");
}
const requestedServices = rawValue
.split(",")
.map((service) => normalizeServiceName(service.trim()))
.filter(Boolean);
validateRequestedServices(requestedServices);
await updateEnabledServices(requestedServices);
if (mode.json) {
printJson({ ok: true, action: "set", enabledServices: requestedServices });
return;
}
logger.log("ok", `Enabled services set to: ${requestedServices.join(", ")}`);
}
async function handleEnableServices(
requestedServices: string[],
mode: ICliMode,
) {
const normalizedServices = requestedServices.map((service) =>
normalizeServiceName(service),
);
validateRequestedServices(normalizedServices);
const configData = await readServicesConfig();
const nextServices = Array.from(
new Set([...configData.enabledServices, ...normalizedServices]),
);
await updateEnabledServices(nextServices);
if (mode.json) {
printJson({ ok: true, action: "enable", enabledServices: nextServices });
return;
}
logger.log("ok", `Enabled services: ${nextServices.join(", ")}`);
}
async function handleDisableServices(
requestedServices: string[],
mode: ICliMode,
) {
const normalizedServices = requestedServices.map((service) =>
normalizeServiceName(service),
);
validateRequestedServices(normalizedServices);
const configData = await readServicesConfig();
const nextServices = configData.enabledServices.filter(
(service) => !normalizedServices.includes(service),
);
await updateEnabledServices(nextServices);
if (mode.json) {
printJson({ ok: true, action: "disable", enabledServices: nextServices });
return;
}
logger.log("ok", `Enabled services: ${nextServices.join(", ")}`);
}
function validateRequestedServices(services: string[]): void {
if (services.length === 0) {
throw new Error("Specify at least one service");
}
const invalidServices = services.filter(
(service) => !allowedServices.includes(service),
);
if (invalidServices.length > 0) {
throw new Error(`Unknown service(s): ${invalidServices.join(", ")}`);
}
}
async function handleStart(serviceManager: ServiceManager, service: string) { async function handleStart(serviceManager: ServiceManager, service: string) {
helpers.printHeader('Starting Services'); helpers.printHeader("Starting Services");
switch (service) { switch (service) {
case 'mongo': case "mongo":
case 'mongodb': case "mongodb":
await serviceManager.startMongoDB(); await serviceManager.startMongoDB();
break; break;
case 'minio': case "minio":
case 's3': case "s3":
await serviceManager.startMinIO(); await serviceManager.startMinIO();
break; break;
case 'elasticsearch': case "elasticsearch":
case 'es': case "es":
await serviceManager.startElasticsearch(); await serviceManager.startElasticsearch();
break; break;
case 'all': case "all":
case '': case "":
await serviceManager.startAll(); await serviceManager.startAll();
break; break;
default: default:
logger.log('error', `Unknown service: ${service}`); logger.log("error", `Unknown service: ${service}`);
logger.log('note', 'Use: mongo, s3, elasticsearch, or all'); logger.log("note", "Use: mongo, s3, elasticsearch, or all");
break; break;
} }
} }
async function handleStop(serviceManager: ServiceManager, service: string) { async function handleStop(serviceManager: ServiceManager, service: string) {
helpers.printHeader('Stopping Services'); helpers.printHeader("Stopping Services");
switch (service) { switch (service) {
case 'mongo': case "mongo":
case 'mongodb': case "mongodb":
await serviceManager.stopMongoDB(); await serviceManager.stopMongoDB();
break; break;
case 'minio': case "minio":
case 's3': case "s3":
await serviceManager.stopMinIO(); await serviceManager.stopMinIO();
break; break;
case 'elasticsearch': case "elasticsearch":
case 'es': case "es":
await serviceManager.stopElasticsearch(); await serviceManager.stopElasticsearch();
break; break;
case 'all': case "all":
case '': case "":
await serviceManager.stopAll(); await serviceManager.stopAll();
break; break;
default: default:
logger.log('error', `Unknown service: ${service}`); logger.log("error", `Unknown service: ${service}`);
logger.log('note', 'Use: mongo, s3, elasticsearch, or all'); logger.log("note", "Use: mongo, s3, elasticsearch, or all");
break; break;
} }
} }
async function handleRestart(serviceManager: ServiceManager, service: string) { async function handleRestart(serviceManager: ServiceManager, service: string) {
helpers.printHeader('Restarting Services'); helpers.printHeader("Restarting Services");
switch (service) { switch (service) {
case 'mongo': case "mongo":
case 'mongodb': case "mongodb":
await serviceManager.stopMongoDB(); await serviceManager.stopMongoDB();
await plugins.smartdelay.delayFor(2000); await plugins.smartdelay.delayFor(2000);
await serviceManager.startMongoDB(); await serviceManager.startMongoDB();
break; break;
case 'minio': case "minio":
case 's3': case "s3":
await serviceManager.stopMinIO(); await serviceManager.stopMinIO();
await plugins.smartdelay.delayFor(2000); await plugins.smartdelay.delayFor(2000);
await serviceManager.startMinIO(); await serviceManager.startMinIO();
break; break;
case 'elasticsearch': case "elasticsearch":
case 'es': case "es":
await serviceManager.stopElasticsearch(); await serviceManager.stopElasticsearch();
await plugins.smartdelay.delayFor(2000); await plugins.smartdelay.delayFor(2000);
await serviceManager.startElasticsearch(); await serviceManager.startElasticsearch();
break; break;
case 'all': case "all":
case '': case "":
await serviceManager.stopAll(); await serviceManager.stopAll();
await plugins.smartdelay.delayFor(2000); await plugins.smartdelay.delayFor(2000);
await serviceManager.startAll(); await serviceManager.startAll();
break; break;
default: default:
logger.log('error', `Unknown service: ${service}`); logger.log("error", `Unknown service: ${service}`);
break; break;
} }
} }
async function handleRemove(serviceManager: ServiceManager) { async function handleRemove(serviceManager: ServiceManager) {
helpers.printHeader('Removing Containers'); helpers.printHeader("Removing Containers");
logger.log('note', '⚠️ This will remove containers but preserve data'); logger.log("note", "⚠️ This will remove containers but preserve data");
const shouldContinue = await plugins.smartinteract.SmartInteract.getCliConfirmation('Continue?', false); const shouldContinue =
await plugins.smartinteract.SmartInteract.getCliConfirmation(
"Continue?",
false,
);
if (shouldContinue) { if (shouldContinue) {
await serviceManager.removeContainers(); await serviceManager.removeContainers();
} else { } else {
logger.log('note', 'Cancelled'); logger.log("note", "Cancelled");
} }
} }
async function handleClean(serviceManager: ServiceManager) { async function handleClean(serviceManager: ServiceManager) {
helpers.printHeader('Clean All'); helpers.printHeader("Clean All");
logger.log('error', '⚠️ WARNING: This will remove all containers and data!'); logger.log("error", "⚠️ WARNING: This will remove all containers and data!");
logger.log('error', 'This action cannot be undone!'); logger.log("error", "This action cannot be undone!");
const smartinteraction = new plugins.smartinteract.SmartInteract(); const smartinteraction = new plugins.smartinteract.SmartInteract();
const confirmAnswer = await smartinteraction.askQuestion({ const confirmAnswer = await smartinteraction.askQuestion({
name: 'confirm', name: "confirm",
type: 'input', type: "input",
message: 'Type "yes" to confirm:', message: 'Type "yes" to confirm:',
default: 'no' default: "no",
}); });
if (confirmAnswer.value === 'yes') { if (confirmAnswer.value === "yes") {
await serviceManager.removeContainers(); await serviceManager.removeContainers();
console.log(); console.log();
await serviceManager.cleanData(); await serviceManager.cleanData();
logger.log('ok', 'All cleaned ✓'); logger.log("ok", "All cleaned ✓");
} else { } else {
logger.log('note', 'Cancelled'); logger.log("note", "Cancelled");
} }
} }
async function handleConfigureServices(serviceManager: ServiceManager) { async function handleConfigureServices(serviceManager: ServiceManager) {
helpers.printHeader('Configure Services'); helpers.printHeader("Configure Services");
await serviceManager.configureServices(); await serviceManager.configureServices();
} }
function showHelp() { export function showHelp(mode?: ICliMode) {
helpers.printHeader('GitZone Services Manager'); if (mode?.json) {
printJson({
command: "services",
usage: "gitzone services <command> [options]",
commands: [
{
name: "config",
description:
"Show configured services and any existing runtime env.json data",
},
{
name: "set <csv>",
description: "Set the enabled service list without prompts",
},
{
name: "enable <service...>",
description: "Enable one or more services without prompts",
},
{
name: "disable <service...>",
description: "Disable one or more services without prompts",
},
{ name: "start [service]", description: "Start services" },
{ name: "stop [service]", description: "Stop services" },
{ name: "status", description: "Show service status" },
],
examples: [
"gitzone services config --json",
"gitzone services set mongodb,minio",
"gitzone services enable elasticsearch",
],
});
return;
}
logger.log('ok', 'Usage: gitzone services [command] [options]'); helpers.printHeader("GitZone Services Manager");
logger.log("ok", "Usage: gitzone services [command] [options]");
console.log(); console.log();
logger.log('note', 'Commands:'); logger.log("note", "Commands:");
logger.log('info', ' start [service] Start services (mongo|s3|elasticsearch|all)'); logger.log(
logger.log('info', ' stop [service] Stop services (mongo|s3|elasticsearch|all)'); "info",
logger.log('info', ' restart [service] Restart services (mongo|s3|elasticsearch|all)'); " start [service] Start services (mongo|s3|elasticsearch|all)",
logger.log('info', ' status Show service status'); );
logger.log('info', ' config Show current configuration'); logger.log(
logger.log('info', ' config services Configure which services are enabled'); "info",
logger.log('info', ' compass Show MongoDB Compass connection string'); " stop [service] Stop services (mongo|s3|elasticsearch|all)",
logger.log('info', ' logs [service] Show logs (mongo|s3|elasticsearch|all) [lines]'); );
logger.log('info', ' reconfigure Reassign ports and restart services'); logger.log(
logger.log('info', ' remove Remove all containers'); "info",
logger.log('info', ' clean Remove all containers and data ⚠️'); " restart [service] Restart services (mongo|s3|elasticsearch|all)",
logger.log('info', ' help Show this help message'); );
logger.log("info", " status Show service status");
logger.log("info", " config Show current configuration");
logger.log(
"info",
" config services Configure which services are enabled",
);
logger.log(
"info",
" set <csv> Set enabled services without prompts",
);
logger.log("info", " enable <svc...> Enable one or more services");
logger.log("info", " disable <svc...> Disable one or more services");
logger.log(
"info",
" compass Show MongoDB Compass connection string",
);
logger.log(
"info",
" logs [service] Show logs (mongo|s3|elasticsearch|all) [lines]",
);
logger.log("info", " reconfigure Reassign ports and restart services");
logger.log("info", " remove Remove all containers");
logger.log("info", " clean Remove all containers and data ⚠️");
logger.log("info", " help Show this help message");
console.log(); console.log();
logger.log('note', 'Available Services:'); logger.log("note", "Available Services:");
logger.log('info', ' • MongoDB (mongo) - Document database'); logger.log("info", " • MongoDB (mongo) - Document database");
logger.log('info', ' • MinIO (s3) - S3-compatible object storage'); logger.log("info", " • MinIO (s3) - S3-compatible object storage");
logger.log('info', ' • Elasticsearch (elasticsearch) - Search and analytics engine'); logger.log(
"info",
" • Elasticsearch (elasticsearch) - Search and analytics engine",
);
console.log(); console.log();
logger.log('note', 'Features:'); logger.log("note", "Features:");
logger.log('info', ' • Auto-creates .nogit/env.json with smart defaults'); logger.log("info", " • Auto-creates .nogit/env.json with smart defaults");
logger.log('info', ' • Random ports (20000-30000) for MongoDB/MinIO to avoid conflicts'); logger.log(
logger.log('info', ' • Elasticsearch uses standard port 9200'); "info",
logger.log('info', ' • Project-specific containers for multi-project support'); " • Random ports (20000-30000) for MongoDB/MinIO to avoid conflicts",
logger.log('info', ' • Preserves custom configuration values'); );
logger.log('info', 'MongoDB Compass connection support'); logger.log("info", "Elasticsearch uses standard port 9200");
logger.log(
"info",
" • Project-specific containers for multi-project support",
);
logger.log("info", " • Preserves custom configuration values");
logger.log("info", " • MongoDB Compass connection support");
console.log(); console.log();
logger.log('note', 'Examples:'); logger.log("note", "Examples:");
logger.log('info', ' gitzone services start # Start all services'); logger.log(
logger.log('info', ' gitzone services start mongo # Start only MongoDB'); "info",
logger.log('info', ' gitzone services start elasticsearch # Start only Elasticsearch'); " gitzone services start # Start all services",
logger.log('info', ' gitzone services stop # Stop all services'); );
logger.log('info', ' gitzone services status # Check service status'); logger.log(
logger.log('info', ' gitzone services config # Show configuration'); "info",
logger.log('info', ' gitzone services compass # Get MongoDB Compass connection'); " gitzone services start mongo # Start only MongoDB",
logger.log('info', ' gitzone services logs elasticsearch # Show Elasticsearch logs'); );
logger.log(
"info",
" gitzone services start elasticsearch # Start only Elasticsearch",
);
logger.log(
"info",
" gitzone services stop # Stop all services",
);
logger.log(
"info",
" gitzone services status # Check service status",
);
logger.log(
"info",
" gitzone services config # Show configuration",
);
logger.log(
"info",
" gitzone services config --json # Show configuration as JSON",
);
logger.log(
"info",
" gitzone services set mongodb,minio # Configure services without prompts",
);
logger.log(
"info",
" gitzone services compass # Get MongoDB Compass connection",
);
logger.log(
"info",
" gitzone services logs elasticsearch # Show Elasticsearch logs",
);
console.log(); console.log();
logger.log('note', 'Global Commands (-g/--global):'); logger.log("note", "Global Commands (-g/--global):");
logger.log('info', ' list -g List all registered projects'); logger.log("info", " list -g List all registered projects");
logger.log('info', ' status -g Show status across all projects'); logger.log("info", " status -g Show status across all projects");
logger.log('info', ' stop -g Stop all containers across all projects'); logger.log(
logger.log('info', ' cleanup -g Remove stale registry entries'); "info",
" stop -g Stop all containers across all projects",
);
logger.log("info", " cleanup -g Remove stale registry entries");
console.log(); console.log();
logger.log('note', 'Global Examples:'); logger.log("note", "Global Examples:");
logger.log('info', ' gitzone services list -g # List all registered projects'); logger.log(
logger.log('info', ' gitzone services status -g # Show global container status'); "info",
logger.log('info', ' gitzone services stop -g # Stop all (prompts for confirmation)'); " gitzone services list -g # List all registered projects",
);
logger.log(
"info",
" gitzone services status -g # Show global container status",
);
logger.log(
"info",
" gitzone services stop -g # Stop all (prompts for confirmation)",
);
} }
// ==================== Global Command Handlers ==================== // ==================== Global Command Handlers ====================
@@ -280,23 +627,23 @@ async function handleGlobalCommand(command: string) {
const globalRegistry = GlobalRegistry.getInstance(); const globalRegistry = GlobalRegistry.getInstance();
switch (command) { switch (command) {
case 'list': case "list":
await handleGlobalList(globalRegistry); await handleGlobalList(globalRegistry);
break; break;
case 'status': case "status":
await handleGlobalStatus(globalRegistry); await handleGlobalStatus(globalRegistry);
break; break;
case 'stop': case "stop":
await handleGlobalStop(globalRegistry); await handleGlobalStop(globalRegistry);
break; break;
case 'cleanup': case "cleanup":
await handleGlobalCleanup(globalRegistry); await handleGlobalCleanup(globalRegistry);
break; break;
case 'help': case "help":
default: default:
showHelp(); showHelp();
break; break;
@@ -304,13 +651,13 @@ async function handleGlobalCommand(command: string) {
} }
async function handleGlobalList(globalRegistry: GlobalRegistry) { async function handleGlobalList(globalRegistry: GlobalRegistry) {
helpers.printHeader('Registered Projects (Global)'); helpers.printHeader("Registered Projects (Global)");
const projects = await globalRegistry.getAllProjects(); const projects = await globalRegistry.getAllProjects();
const projectPaths = Object.keys(projects); const projectPaths = Object.keys(projects);
if (projectPaths.length === 0) { if (projectPaths.length === 0) {
logger.log('note', 'No projects registered'); logger.log("note", "No projects registered");
return; return;
} }
@@ -319,20 +666,20 @@ async function handleGlobalList(globalRegistry: GlobalRegistry) {
const lastActive = new Date(project.lastActive).toLocaleString(); const lastActive = new Date(project.lastActive).toLocaleString();
console.log(); console.log();
logger.log('ok', `📁 ${project.projectName}`); logger.log("ok", `📁 ${project.projectName}`);
logger.log('info', ` Path: ${project.projectPath}`); logger.log("info", ` Path: ${project.projectPath}`);
logger.log('info', ` Services: ${project.enabledServices.join(', ')}`); logger.log("info", ` Services: ${project.enabledServices.join(", ")}`);
logger.log('info', ` Last Active: ${lastActive}`); logger.log("info", ` Last Active: ${lastActive}`);
} }
} }
async function handleGlobalStatus(globalRegistry: GlobalRegistry) { async function handleGlobalStatus(globalRegistry: GlobalRegistry) {
helpers.printHeader('Global Service Status'); helpers.printHeader("Global Service Status");
const statuses = await globalRegistry.getGlobalStatus(); const statuses = await globalRegistry.getGlobalStatus();
if (statuses.length === 0) { if (statuses.length === 0) {
logger.log('note', 'No projects registered'); logger.log("note", "No projects registered");
return; return;
} }
@@ -341,28 +688,39 @@ async function handleGlobalStatus(globalRegistry: GlobalRegistry) {
for (const project of statuses) { for (const project of statuses) {
console.log(); console.log();
logger.log('ok', `📁 ${project.projectName}`); logger.log("ok", `📁 ${project.projectName}`);
logger.log('info', ` Path: ${project.projectPath}`); logger.log("info", ` Path: ${project.projectPath}`);
if (project.containers.length === 0) { if (project.containers.length === 0) {
logger.log('note', ' No containers configured'); logger.log("note", " No containers configured");
continue; continue;
} }
for (const container of project.containers) { for (const container of project.containers) {
totalContainers++; totalContainers++;
const statusIcon = container.status === 'running' ? '🟢' : container.status === 'exited' ? '🟡' : '⚪'; const statusIcon =
if (container.status === 'running') runningCount++; container.status === "running"
logger.log('info', ` ${statusIcon} ${container.name}: ${container.status}`); ? "🟢"
: container.status === "exited"
? "🟡"
: "⚪";
if (container.status === "running") runningCount++;
logger.log(
"info",
` ${statusIcon} ${container.name}: ${container.status}`,
);
} }
} }
console.log(); console.log();
logger.log('note', `Summary: ${runningCount}/${totalContainers} containers running across ${statuses.length} project(s)`); logger.log(
"note",
`Summary: ${runningCount}/${totalContainers} containers running across ${statuses.length} project(s)`,
);
} }
async function handleGlobalStop(globalRegistry: GlobalRegistry) { async function handleGlobalStop(globalRegistry: GlobalRegistry) {
helpers.printHeader('Stop All Containers (Global)'); helpers.printHeader("Stop All Containers (Global)");
const statuses = await globalRegistry.getGlobalStatus(); const statuses = await globalRegistry.getGlobalStatus();
@@ -370,64 +728,73 @@ async function handleGlobalStop(globalRegistry: GlobalRegistry) {
let runningCount = 0; let runningCount = 0;
for (const project of statuses) { for (const project of statuses) {
for (const container of project.containers) { for (const container of project.containers) {
if (container.status === 'running') runningCount++; if (container.status === "running") runningCount++;
} }
} }
if (runningCount === 0) { if (runningCount === 0) {
logger.log('note', 'No running containers found'); logger.log("note", "No running containers found");
return; return;
} }
logger.log('note', `Found ${runningCount} running container(s) across ${statuses.length} project(s)`); logger.log(
"note",
`Found ${runningCount} running container(s) across ${statuses.length} project(s)`,
);
console.log(); console.log();
// Show what will be stopped // Show what will be stopped
for (const project of statuses) { for (const project of statuses) {
const runningContainers = project.containers.filter(c => c.status === 'running'); const runningContainers = project.containers.filter(
(c) => c.status === "running",
);
if (runningContainers.length > 0) { if (runningContainers.length > 0) {
logger.log('info', `${project.projectName}:`); logger.log("info", `${project.projectName}:`);
for (const container of runningContainers) { for (const container of runningContainers) {
logger.log('info', `${container.name}`); logger.log("info", `${container.name}`);
} }
} }
} }
console.log(); console.log();
const shouldContinue = await plugins.smartinteract.SmartInteract.getCliConfirmation( const shouldContinue =
'Stop all containers?', await plugins.smartinteract.SmartInteract.getCliConfirmation(
false "Stop all containers?",
); false,
);
if (!shouldContinue) { if (!shouldContinue) {
logger.log('note', 'Cancelled'); logger.log("note", "Cancelled");
return; return;
} }
logger.log('note', 'Stopping all containers...'); logger.log("note", "Stopping all containers...");
const result = await globalRegistry.stopAll(); const result = await globalRegistry.stopAll();
if (result.stopped.length > 0) { if (result.stopped.length > 0) {
logger.log('ok', `Stopped: ${result.stopped.join(', ')}`); logger.log("ok", `Stopped: ${result.stopped.join(", ")}`);
} }
if (result.failed.length > 0) { if (result.failed.length > 0) {
logger.log('error', `Failed to stop: ${result.failed.join(', ')}`); logger.log("error", `Failed to stop: ${result.failed.join(", ")}`);
} }
} }
async function handleGlobalCleanup(globalRegistry: GlobalRegistry) { async function handleGlobalCleanup(globalRegistry: GlobalRegistry) {
helpers.printHeader('Cleanup Registry (Global)'); helpers.printHeader("Cleanup Registry (Global)");
logger.log('note', 'Checking for stale registry entries...'); logger.log("note", "Checking for stale registry entries...");
const removed = await globalRegistry.cleanup(); const removed = await globalRegistry.cleanup();
if (removed.length === 0) { if (removed.length === 0) {
logger.log('ok', 'No stale entries found'); logger.log("ok", "No stale entries found");
return; return;
} }
logger.log('ok', `Removed ${removed.length} stale entr${removed.length === 1 ? 'y' : 'ies'}:`); logger.log(
"ok",
`Removed ${removed.length} stale entr${removed.length === 1 ? "y" : "ies"}:`,
);
for (const path of removed) { for (const path of removed) {
logger.log('info', `${path}`); logger.log("info", `${path}`);
} }
} }
+217 -58
View File
@@ -1,91 +1,250 @@
/* ----------------------------------------------- /* -----------------------------------------------
* executes as standard task * executes as standard task
* ----------------------------------------------- */ * ----------------------------------------------- */
import * as plugins from './mod.plugins.js'; import * as plugins from "./mod.plugins.js";
import * as paths from '../paths.js'; import * as paths from "../paths.js";
import type { ICliMode } from "../helpers.climode.js";
import { getCliMode, printJson } from "../helpers.climode.js";
import { logger } from '../gitzone.logging.js'; import { logger } from "../gitzone.logging.js";
export let run = async () => { type ICommandHelpSummary = {
console.log(''); name: string;
console.log('╭─────────────────────────────────────────────────────────────╮'); description: string;
console.log('│ gitzone - Development Workflow CLI │'); };
console.log('╰─────────────────────────────────────────────────────────────╯');
console.log(''); const commandSummaries: ICommandHelpSummary[] = [
{
name: "commit",
description:
"Analyze changes and create semantic source commits",
},
{ name: "release", description: "Create versioned releases from pending changelog entries" },
{ name: "format", description: "Plan or apply project formatting changes" },
{ name: "config", description: "Read and change .smartconfig.json settings" },
{ name: "services", description: "Manage or configure development services" },
{ name: "tools", description: "Manage the global @git.zone toolchain" },
{ name: "template", description: "Create a project from a template" },
{ name: "open", description: "Open project assets and CI pages" },
{ name: "docker", description: "Run Docker-related maintenance tasks" },
{
name: "deprecate",
description: "Deprecate npm packages across registries",
},
{ name: "meta", description: "Run meta-repository commands" },
{ name: "start", description: "Prepare a project for local work" },
{ name: "helpers", description: "Run helper utilities" },
];
export let run = async (argvArg: any = {}) => {
const mode = await getCliMode(argvArg);
const requestedCommandHelp =
argvArg._?.[0] === "help" ? argvArg._?.[1] : undefined;
if (mode.help || requestedCommandHelp) {
await showHelp(mode, requestedCommandHelp);
return;
}
if (!mode.interactive) {
await showHelp(mode);
return;
}
console.log("");
console.log(
"╭─────────────────────────────────────────────────────────────╮",
);
console.log(
"│ gitzone - Development Workflow CLI │",
);
console.log(
"╰─────────────────────────────────────────────────────────────╯",
);
console.log("");
const interactInstance = new plugins.smartinteract.SmartInteract(); const interactInstance = new plugins.smartinteract.SmartInteract();
const response = await interactInstance.askQuestion({ const response = await interactInstance.askQuestion({
type: 'list', type: "list",
name: 'action', name: "action",
message: 'What would you like to do?', message: "What would you like to do?",
default: 'commit', default: "commit",
choices: [ choices: [
{ name: 'Commit changes (semantic versioning)', value: 'commit' }, { name: "Commit changes", value: "commit" },
{ name: 'Format project files', value: 'format' }, { name: "Release pending changes", value: "release" },
{ name: 'Configure release settings', value: 'config' }, { name: "Format project files", value: "format" },
{ name: 'Create from template', value: 'template' }, { name: "Configure release settings", value: "config" },
{ name: 'Manage dev services (MongoDB, S3)', value: 'services' }, { name: "Create from template", value: "template" },
{ name: 'Open project assets', value: 'open' }, { name: "Manage dev services (MongoDB, S3)", value: "services" },
{ name: 'Show help', value: 'help' }, { name: "Manage global @git.zone tools", value: "tools" },
{ name: "Open project assets", value: "open" },
{ name: "Show help", value: "help" },
], ],
}); });
const action = (response as any).value; const action = (response as any).value;
switch (action) { switch (action) {
case 'commit': { case "commit": {
const modCommit = await import('../mod_commit/index.js'); const modCommit = await import("../mod_commit/index.js");
await modCommit.run({ _: ['commit'] }); await modCommit.run({ _: ["commit"] });
break; break;
} }
case 'format': { case "release": {
const modFormat = await import('../mod_format/index.js'); const modRelease = await import("../mod_release/index.js");
await modRelease.run({ _: ["release"] });
break;
}
case "format": {
const modFormat = await import("../mod_format/index.js");
await modFormat.run({ interactive: true }); await modFormat.run({ interactive: true });
break; break;
} }
case 'config': { case "config": {
const modConfig = await import('../mod_config/index.js'); const modConfig = await import("../mod_config/index.js");
await modConfig.run({ _: ['config'] }); await modConfig.run({ _: ["config"] });
break; break;
} }
case 'template': { case "template": {
const modTemplate = await import('../mod_template/index.js'); const modTemplate = await import("../mod_template/index.js");
await modTemplate.run({ _: ['template'] }); await modTemplate.run({ _: ["template"] });
break; break;
} }
case 'services': { case "services": {
const modServices = await import('../mod_services/index.js'); const modServices = await import("../mod_services/index.js");
await modServices.run({ _: ['services'] }); await modServices.run({ _: ["services"] });
break; break;
} }
case 'open': { case "tools": {
const modOpen = await import('../mod_open/index.js'); const modTools = await import("../mod_tools/index.js");
await modOpen.run({ _: ['open'] }); await modTools.run({ _: ["tools"] });
break; break;
} }
case 'help': case "open": {
showHelp(); const modOpen = await import("../mod_open/index.js");
await modOpen.run({ _: ["open"] });
break;
}
case "help":
await showHelp(mode);
break; break;
} }
}; };
function showHelp(): void { export async function showHelp(
console.log(''); mode: ICliMode,
console.log('Usage: gitzone <command> [options]'); commandName?: string,
console.log(''); ): Promise<void> {
console.log('Commands:'); if (commandName) {
console.log(' commit Create a semantic commit with versioning'); const handled = await showCommandHelp(commandName, mode);
console.log(' format Format and standardize project files'); if (handled) {
console.log(' config Manage release registry configuration'); return;
console.log(' template Create a new project from template'); }
console.log(' services Manage dev services (MongoDB, S3/MinIO)'); }
console.log(' open Open project assets (GitLab, npm, etc.)');
console.log(' docker Docker-related operations'); if (mode.json) {
console.log(' deprecate Deprecate a package on npm'); printJson({
console.log(' meta Run meta commands'); name: "gitzone",
console.log(' start Start working on a project'); usage: "gitzone <command> [options]",
console.log(' helpers Run helper utilities'); commands: commandSummaries,
console.log(''); globalFlags: [
console.log('Run gitzone <command> --help for more information on a command.'); { flag: "--help, -h", description: "Show help output" },
console.log(''); {
flag: "--json",
description: "Emit machine-readable JSON when supported",
},
{
flag: "--plain",
description: "Use plain text output when supported",
},
{
flag: "--agent",
description: "Prefer non-interactive machine-friendly output",
},
{
flag: "--no-interactive",
description: "Disable prompts and interactive menus",
},
{
flag: "--no-check-updates",
description: "Skip the update check banner",
},
],
});
return;
}
console.log("");
console.log("Usage: gitzone <command> [options]");
console.log("");
console.log("Commands:");
for (const commandSummary of commandSummaries) {
console.log(
` ${commandSummary.name.padEnd(11)} ${commandSummary.description}`,
);
}
console.log("");
console.log("Global flags:");
console.log(" --help, -h Show help output");
console.log(
" --json Emit machine-readable JSON when supported",
);
console.log(" --plain Use plain text output when supported");
console.log(
" --agent Prefer non-interactive machine-friendly output",
);
console.log(" --no-interactive Disable prompts and interactive menus");
console.log(" --no-check-updates Skip the update check banner");
console.log("");
console.log("Examples:");
console.log(" gitzone help commit");
console.log(" gitzone config show --json");
console.log(" gitzone commit recommend --json");
console.log(" gitzone release --plan");
console.log(" gitzone format plan --json");
console.log(" gitzone services set mongodb,minio");
console.log(" gitzone tools update");
console.log("");
console.log("Run gitzone <command> --help for command-specific usage.");
console.log("");
}
async function showCommandHelp(
commandName: string,
mode: ICliMode,
): Promise<boolean> {
switch (commandName) {
case "commit": {
const modCommit = await import("../mod_commit/index.js");
modCommit.showHelp(mode);
return true;
}
case "release": {
const modRelease = await import("../mod_release/index.js");
modRelease.showHelp(mode);
return true;
}
case "config": {
const modConfig = await import("../mod_config/index.js");
modConfig.showHelp(mode);
return true;
}
case "format": {
const modFormat = await import("../mod_format/index.js");
modFormat.showHelp(mode);
return true;
}
case "services": {
const modServices = await import("../mod_services/index.js");
modServices.showHelp(mode);
return true;
}
case "tools": {
const modTools = await import("../mod_tools/index.js");
modTools.showHelp(mode);
return true;
}
default:
return false;
}
} }
+176
View File
@@ -0,0 +1,176 @@
import * as plugins from "./mod.plugins.js";
export interface IInstalledPackage {
name: string;
version: string;
}
export interface IPackageUpdateInfo {
name: string;
currentVersion: string;
latestVersion: string;
needsUpdate: boolean;
}
export interface IPackageManagerInfo {
available: boolean;
currentVersion: string;
latestVersion: string | null;
needsUpdate: boolean;
}
export class PackageManagerUtil {
private shell = new plugins.smartshell.Smartshell({
executor: "bash",
});
public async detectPnpm(): Promise<boolean> {
try {
const result = await this.shell.execSilent("pnpm --version 2>/dev/null");
return result.exitCode === 0 && Boolean(result.stdout.trim());
} catch {
return false;
}
}
public async getPnpmVersionInfo(): Promise<IPackageManagerInfo> {
const available = await this.detectPnpm();
if (!available) {
return {
available: false,
currentVersion: "unknown",
latestVersion: null,
needsUpdate: false,
};
}
const currentVersion = await this.getCurrentPnpmVersion();
const latestVersion = await this.getLatestVersion("pnpm", ["https://registry.npmjs.org"]);
return {
available: true,
currentVersion,
latestVersion,
needsUpdate: latestVersion ? this.isNewerVersion(currentVersion, latestVersion) : false,
};
}
public async getInstalledPackages(): Promise<IInstalledPackage[]> {
const packages: IInstalledPackage[] = [];
try {
const result = await this.shell.execSilent("pnpm list -g --depth=0 --json 2>/dev/null || true");
const output = result.stdout.trim();
if (!output) {
return packages;
}
const data = JSON.parse(output);
const dataArray = Array.isArray(data) ? data : [data];
for (const item of dataArray) {
const dependencies = item.dependencies || {};
for (const [name, info] of Object.entries(dependencies)) {
if (!name.startsWith("@git.zone/")) {
continue;
}
packages.push({
name,
version: (info as any).version || "unknown",
});
}
}
} catch {
return packages;
}
return packages;
}
public async getLatestVersion(
packageName: string,
registries = ["https://verdaccio.lossless.digital", "https://registry.npmjs.org"],
): Promise<string | null> {
for (const registry of registries) {
const latest = await this.getLatestVersionFromRegistry(registry, packageName);
if (latest) {
return latest;
}
}
return null;
}
public async installLatest(packageName: string): Promise<boolean> {
const packageSpecifier = `${packageName}@latest`;
console.log(` Installing ${packageSpecifier} via pnpm...`);
try {
const result = await this.shell.exec(`pnpm add -g ${shellQuote(packageSpecifier)}`);
return result.exitCode === 0;
} catch {
return false;
}
}
public isNewerVersion(current: string, latest: string): boolean {
const currentParts = normalizeSemver(current);
const latestParts = normalizeSemver(latest);
for (let i = 0; i < Math.max(currentParts.length, latestParts.length); i++) {
const currentPart = currentParts[i] || 0;
const latestPart = latestParts[i] || 0;
if (latestPart > currentPart) return true;
if (latestPart < currentPart) return false;
}
return false;
}
private async getCurrentPnpmVersion(): Promise<string> {
try {
const result = await this.shell.execSilent("pnpm --version 2>/dev/null");
const versionMatch = result.stdout.trim().match(/(\d+\.\d+\.\d+)/);
return versionMatch?.[1] || "unknown";
} catch {
return "unknown";
}
}
private async getLatestVersionFromRegistry(
registry: string,
packageName: string,
): Promise<string | null> {
const encodedName = packageName.replace("/", "%2f");
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 8000);
try {
const response = await fetch(`${registry}/${encodedName}`, {
signal: controller.signal,
headers: {
accept: "application/json",
},
});
if (!response.ok) {
return null;
}
const data = await response.json();
const latest = (data as any)["dist-tags"]?.latest;
return typeof latest === "string" && latest.length > 0 ? latest : null;
} catch {
return null;
} finally {
clearTimeout(timeout);
}
}
}
function normalizeSemver(version: string): number[] {
return version
.replace(/^[^\d]*/, "")
.split(".")
.map((part) => parseInt(part, 10) || 0);
}
function shellQuote(value: string): string {
return `'${value.replaceAll("'", "'\\''")}'`;
}
+359
View File
@@ -0,0 +1,359 @@
import * as plugins from "./mod.plugins.js";
import { commitinfo } from "../00_commitinfo_data.js";
import type { ICliMode } from "../helpers.climode.js";
import { getCliMode, printJson } from "../helpers.climode.js";
import {
PackageManagerUtil,
type IInstalledPackage,
type IPackageUpdateInfo,
} from "./classes.packagemanager.js";
export const GITZONE_PACKAGES = [
"@git.zone/cli",
"@git.zone/tsdoc",
"@git.zone/tsbuild",
"@git.zone/tstest",
"@git.zone/tspublish",
"@git.zone/tsbundle",
"@git.zone/tsdocker",
"@git.zone/tsview",
"@git.zone/tswatch",
"@git.zone/tsrust",
];
export const run = async (argvArg: any = {}): Promise<void> => {
const mode = await getCliMode(argvArg);
const command = argvArg._?.[1] || "help";
if (mode.help || command === "help") {
showHelp(mode);
return;
}
switch (command) {
case "update":
await runUpdate(argvArg, mode);
break;
case "install":
await runInstall(argvArg, mode);
break;
default:
showHelp(mode);
break;
}
};
async function runUpdate(argvArg: any, mode: ICliMode): Promise<void> {
const verbose = Boolean(argvArg.v || argvArg.verbose);
const pmUtil = new PackageManagerUtil();
console.log("Scanning for installed @git.zone packages...\n");
const pnpmInfo = await pmUtil.getPnpmVersionInfo();
if (!pnpmInfo.available) {
console.log("pnpm is required for gitzone tools update, but it was not found.");
return;
}
console.log("Package manager:\n");
console.log(" Name Current Latest Status");
console.log(" ----------------------------------------------");
const latestPnpm = (pnpmInfo.latestVersion || "unknown").padEnd(12);
const pnpmStatus = pnpmInfo.latestVersion === null
? "? Version unknown"
: pnpmInfo.needsUpdate
? "Update available"
: "Up to date";
console.log(` ${"pnpm".padEnd(9)}${pnpmInfo.currentVersion.padEnd(12)}${latestPnpm}${pnpmStatus}`);
console.log("");
if (verbose) {
console.log("Using pnpm as the supported global package manager.\n");
}
const selfUpdated = await handleSelfUpdate(pmUtil, mode);
if (selfUpdated) {
return;
}
const installedPackages = await pmUtil.getInstalledPackages();
const packageInfos = await getPackageUpdateInfos(pmUtil, installedPackages);
if (packageInfos.length === 0) {
console.log("No managed @git.zone packages found installed globally.");
return;
}
console.log("Installed @git.zone packages:\n");
console.log(" Package Current Latest Status");
console.log(" ------------------------------------------------------------");
for (const packageInfo of packageInfos) {
const status = packageInfo.latestVersion === "unknown"
? "? Version unknown"
: packageInfo.needsUpdate
? "Update available"
: "Up to date";
console.log(
` ${packageInfo.name.padEnd(28)}${packageInfo.currentVersion.padEnd(12)}${packageInfo.latestVersion.padEnd(12)}${status}`,
);
}
console.log("");
await printMissingPackages(pmUtil, installedPackages);
const packagesToUpdate = packageInfos.filter((packageInfo) => packageInfo.needsUpdate);
if (packagesToUpdate.length === 0) {
console.log("All managed packages are up to date.");
return;
}
console.log(`Found ${packagesToUpdate.length} package(s) with available updates.\n`);
if (!mode.yes && !mode.interactive) {
console.log("Run gitzone tools update -y to update without prompts.");
return;
}
let shouldUpdate = mode.yes;
if (!shouldUpdate) {
const interactInstance = new plugins.smartinteract.SmartInteract();
const answer = await interactInstance.askQuestion({
type: "confirm",
name: "confirmUpdate",
message: "Do you want to update these packages?",
default: true,
});
shouldUpdate = answer.value === true;
}
if (!shouldUpdate) {
console.log("Update cancelled.");
return;
}
await installPackages(pmUtil, packagesToUpdate.map((packageInfo) => packageInfo.name), "updated");
}
async function runInstall(argvArg: any, mode: ICliMode): Promise<void> {
const verbose = Boolean(argvArg.v || argvArg.verbose);
const pmUtil = new PackageManagerUtil();
console.log("Scanning for missing @git.zone packages...\n");
const pnpmAvailable = await pmUtil.detectPnpm();
if (!pnpmAvailable) {
console.log("pnpm is required for gitzone tools install, but it was not found.");
return;
}
if (verbose) {
console.log("Using pnpm as the supported global package manager.\n");
}
const installedPackages = await pmUtil.getInstalledPackages();
const installedNames = new Set(installedPackages.map((packageInfo) => packageInfo.name));
const missingPackages = GITZONE_PACKAGES.filter((packageName) => !installedNames.has(packageName));
if (missingPackages.length === 0) {
console.log("All managed @git.zone packages are already installed.");
return;
}
console.log(`Found ${missingPackages.length} missing package(s).\n`);
if (!mode.yes && !mode.interactive) {
await printPackageListWithLatest(pmUtil, missingPackages);
console.log("Run gitzone tools install -y to install all missing packages without prompts.");
return;
}
let selectedPackages = missingPackages;
if (!mode.yes) {
const choicesWithVersions: Array<{ name: string; value: string }> = [];
for (const packageName of missingPackages) {
const latest = await pmUtil.getLatestVersion(packageName);
choicesWithVersions.push({
name: `${packageName}${latest ? `@${latest}` : ""}`,
value: packageName,
});
}
const interactInstance = new plugins.smartinteract.SmartInteract();
const answer = await interactInstance.askQuestion({
type: "checkbox",
name: "packages",
message: "Select packages to install:",
default: missingPackages,
choices: choicesWithVersions,
});
selectedPackages = answer.value as string[];
if (selectedPackages.length === 0) {
console.log("No packages selected. Nothing to install.");
return;
}
}
await installPackages(pmUtil, selectedPackages, "installed");
}
async function handleSelfUpdate(
pmUtil: PackageManagerUtil,
mode: ICliMode,
): Promise<boolean> {
console.log("Checking for gitzone self-update...\n");
const currentVersion = commitinfo.version;
const latestVersion = await pmUtil.getLatestVersion("@git.zone/cli");
if (!latestVersion || !pmUtil.isNewerVersion(currentVersion, latestVersion)) {
console.log(` @git.zone/cli ${currentVersion} Up to date\n`);
return false;
}
console.log(` @git.zone/cli ${currentVersion} -> ${latestVersion} Update available\n`);
if (!mode.yes && !mode.interactive) {
console.log("Run gitzone tools update -y to update gitzone first.");
return true;
}
let shouldUpdate = mode.yes;
if (!shouldUpdate) {
const interactInstance = new plugins.smartinteract.SmartInteract();
const answer = await interactInstance.askQuestion({
type: "confirm",
name: "confirmSelfUpdate",
message: "Do you want to update gitzone itself first?",
default: true,
});
shouldUpdate = answer.value === true;
}
if (!shouldUpdate) {
console.log("Skipping gitzone self-update.\n");
return false;
}
const success = await pmUtil.installLatest("@git.zone/cli");
if (!success) {
console.log("\ngitzone self-update failed. Continuing with the current version.\n");
return false;
}
console.log("\ngitzone has been updated. Re-run gitzone tools update to check remaining packages.");
return true;
}
async function getPackageUpdateInfos(
pmUtil: PackageManagerUtil,
installedPackages: IInstalledPackage[],
): Promise<IPackageUpdateInfo[]> {
const packageInfos: IPackageUpdateInfo[] = [];
for (const installedPackage of installedPackages) {
if (!GITZONE_PACKAGES.includes(installedPackage.name)) {
continue;
}
const latestVersion = await pmUtil.getLatestVersion(installedPackage.name);
packageInfos.push({
name: installedPackage.name,
currentVersion: installedPackage.version,
latestVersion: latestVersion || "unknown",
needsUpdate: latestVersion
? pmUtil.isNewerVersion(installedPackage.version, latestVersion)
: false,
});
}
return packageInfos;
}
async function printMissingPackages(
pmUtil: PackageManagerUtil,
installedPackages: IInstalledPackage[],
): Promise<void> {
const installedNames = new Set(installedPackages.map((packageInfo) => packageInfo.name));
const missingPackages = GITZONE_PACKAGES.filter((packageName) => !installedNames.has(packageName));
if (missingPackages.length === 0) {
return;
}
console.log("Not installed (managed @git.zone packages):\n");
await printPackageListWithLatest(pmUtil, missingPackages);
console.log("Run gitzone tools install to install missing packages.\n");
}
async function printPackageListWithLatest(
pmUtil: PackageManagerUtil,
packageNames: string[],
): Promise<void> {
console.log(" Package Latest");
console.log(" ----------------------------------------");
for (const packageName of packageNames) {
const latest = await pmUtil.getLatestVersion(packageName);
console.log(` ${packageName.padEnd(28)} ${latest || "unknown"}`);
}
console.log("");
}
async function installPackages(
pmUtil: PackageManagerUtil,
packageNames: string[],
action: "installed" | "updated",
): Promise<void> {
let successCount = 0;
let failCount = 0;
for (const packageName of packageNames) {
const success = await pmUtil.installLatest(packageName);
if (success) {
console.log(` ${packageName} ${action} successfully`);
successCount++;
} else {
console.log(` ${packageName} failed`);
failCount++;
}
}
console.log("");
if (failCount === 0) {
console.log(`All ${successCount} package(s) ${action} successfully.`);
} else {
console.log(`${successCount} package(s) ${action}, ${failCount} failed.`);
}
}
export function showHelp(mode?: ICliMode): void {
if (mode?.json) {
printJson({
name: "gitzone tools",
usage: "gitzone tools <command> [options]",
commands: [
{ name: "update", description: "Check and update globally installed @git.zone packages" },
{ name: "install", description: "Install missing managed @git.zone packages" },
],
flags: [
{ flag: "-y, --yes", description: "Run without confirmation prompts" },
{ flag: "-v, --verbose", description: "Show package manager diagnostics" },
],
packageManager: "pnpm",
managedPackages: GITZONE_PACKAGES,
});
return;
}
console.log("");
console.log("Usage: gitzone tools <command> [options]");
console.log("");
console.log("Commands:");
console.log(" update Check and update globally installed @git.zone packages");
console.log(" install Install missing managed @git.zone packages");
console.log("");
console.log("Options:");
console.log(" -y, --yes Run without confirmation prompts");
console.log(" -v, --verbose Show package manager diagnostics");
console.log("");
console.log("Examples:");
console.log(" gitzone tools update");
console.log(" gitzone tools update -y");
console.log(" gitzone tools install");
console.log("");
}
+1
View File
@@ -0,0 +1 @@
export * from "../plugins.js";
+2 -2
View File
@@ -1,6 +1,6 @@
import * as smartlog from '@push.rocks/smartlog'; import * as smartlog from '@push.rocks/smartlog';
import * as smartlogDestinationLocal from '@push.rocks/smartlog-destination-local'; import * as smartlogDestinationLocal from '@push.rocks/smartlog-destination-local';
import * as npmextra from '@push.rocks/npmextra'; import * as smartconfig from '@push.rocks/smartconfig';
import * as path from 'path'; import * as path from 'path';
import * as projectinfo from '@push.rocks/projectinfo'; import * as projectinfo from '@push.rocks/projectinfo';
import * as smartcli from '@push.rocks/smartcli'; import * as smartcli from '@push.rocks/smartcli';
@@ -20,7 +20,7 @@ export const smartfs = new SmartFs(new SmartFsProviderNode());
export { export {
smartlog, smartlog,
smartlogDestinationLocal, smartlogDestinationLocal,
npmextra, smartconfig,
path, path,
projectinfo, projectinfo,
smartcli, smartcli,
-2
View File
@@ -1,7 +1,5 @@
{ {
"compilerOptions": { "compilerOptions": {
"experimentalDecorators": true,
"useDefineForClassFields": false,
"target": "ES2022", "target": "ES2022",
"module": "NodeNext", "module": "NodeNext",
"moduleResolution": "NodeNext", "moduleResolution": "NodeNext",