Compare commits

..

17 Commits

Author SHA1 Message Date
05b170cbac feat(services): Add comprehensive development services management (v1.17.0)
Some checks failed
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
- Implemented gitzone services command for managing MongoDB and MinIO containers
- Added smart port assignment (20000-30000 range) to avoid conflicts
- Project-specific container names for complete isolation
- Data persistence in .nogit/ directories
- MongoDB Compass connection string generation with network IP detection
- Auto-configuration via .nogit/env.json with secure defaults
- Commands: start, stop, restart, status, config, compass, logs, remove, clean
- Interactive confirmations for destructive operations
- Comprehensive documentation and Task Venture Capital GmbH legal update
2025-08-14 14:38:27 +00:00
b320af0b61 1.16.10
Some checks failed
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-08-08 09:46:34 +00:00
49e1ee1f39 fix(format): Improve concurrency control in caching and rollback modules, refine gitignore custom section handling, and enhance Prettier file processing. 2025-08-08 09:46:34 +00:00
cef31cf1ff 1.16.9
Some checks failed
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-08-08 06:50:58 +00:00
74ecdde1ac fix(format): Improve concurrency control in cache and rollback modules, refine gitignore custom section handling, and enhance Prettier file processing 2025-08-08 06:50:58 +00:00
74a8229e43 1.16.8
Some checks failed
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-08-08 06:25:40 +00:00
859cbc733d fix(format): Improve concurrency control in cache and rollback management with mutex locking and refine formatting details 2025-08-08 06:25:40 +00:00
d32d47b706 1.16.7
Some checks failed
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-08-08 05:48:41 +00:00
fd90cfe895 fix(core): Improve formatting, logging, and rollback integrity in core modules 2025-08-08 05:48:41 +00:00
c48f48fc8b 1.16.6
Some checks failed
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-08-08 05:43:34 +00:00
e21e7f0850 fix(changecache): Improve cache manifest validation and atomic file writes; add local settings and overrides 2025-08-08 05:43:34 +00:00
5f561527f9 1.16.5
Some checks failed
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-08-08 05:34:54 +00:00
9f5f568c3f fix(prettier): Improve file selection in Prettier formatter, remove legacy package overrides, and update CI template indentation 2025-08-08 05:34:54 +00:00
39a31a4304 1.16.4
Some checks failed
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-08-08 05:28:02 +00:00
b629a7d70b fix(prettier): Improve file exclusion in the Prettier formatter to skip unnecessary files and directories. 2025-08-08 05:28:01 +00:00
4003944139 1.16.3
Some checks failed
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-08-08 05:18:19 +00:00
83d374dffd fix(changecache/prettier): Skip directories during file processing to prevent errors in changecache and prettier formatting 2025-08-08 05:18:19 +00:00
61 changed files with 2910 additions and 919 deletions

2
.gitignore vendored
View File

@@ -17,3 +17,5 @@ dist/
dist_*/
#------# custom
.serena
test-output.json

View File

@@ -27,8 +27,8 @@ auditProductionDependencies:
image: code.foss.global/hosttoday/ht-docker-node:npmci
stage: security
script:
- npmci command npm config set registry https://registry.npmjs.org
- npmci command pnpm audit --audit-level=high --prod
- npmci command npm config set registry https://registry.npmjs.org
- npmci command pnpm audit --audit-level=high --prod
tags:
- private
- docker

View File

@@ -27,8 +27,8 @@ auditProductionDependencies:
image: code.foss.global/hosttoday/ht-docker-node:npmci
stage: security
script:
- npmci command npm config set registry https://registry.npmjs.org
- npmci command pnpm audit --audit-level=high --prod
- npmci command npm config set registry https://registry.npmjs.org
- npmci command pnpm audit --audit-level=high --prod
tags:
- private
- docker

View File

@@ -25,8 +25,8 @@ auditProductionDependencies:
image: code.foss.global/hosttoday/ht-docker-node:npmci
stage: security
script:
- npmci command npm config set registry https://registry.npmjs.org
- npmci command pnpm audit --audit-level=high --prod
- npmci command npm config set registry https://registry.npmjs.org
- npmci command pnpm audit --audit-level=high --prod
tags:
- private
- docker

View File

@@ -2,4 +2,3 @@ runafter:
- git add -A && git commit -m initial
- git push origin master
- gitzone meta update

View File

@@ -1,18 +1,101 @@
# Changelog
## 2025-08-14 - 1.17.0 - feat(services)
Add comprehensive development services management for MongoDB and MinIO containers
- Implemented `gitzone services` command for managing local development services
- Added MongoDB and MinIO (S3-compatible) container orchestration
- Smart port assignment (20000-30000 range) to avoid conflicts between projects
- Project-specific container names for complete isolation
- Data persistence in `.nogit/` directories
- MongoDB Compass connection string generation with network IP detection
- Auto-configuration via `.nogit/env.json` with secure defaults
- Commands: start, stop, restart, status, config, compass, logs, remove, clean
- Interactive confirmations for destructive operations
## 2025-08-08 - 1.16.10 - fix(format)
Improve concurrency control in caching and rollback modules, refine gitignore custom section handling, and enhance Prettier file processing.
- Added mutex locking in ChangeCache and RollbackManager to prevent race conditions during manifest updates
- Updated gitignore logic to detect and preserve custom sections
- Enhanced Prettier batching and file formatting for better performance
## 2025-08-08 - 1.16.9 - fix(format)
Improve concurrency control in cache and rollback modules, refine gitignore custom section handling, and enhance Prettier file processing
- Added mutex locking in ChangeCache and RollbackManager to prevent race conditions during manifest updates
- Updated gitignore logic to detect and preserve existing custom sections from various markers
- Simplified Prettier formatter to process files sequentially, skip files without extensions, and log detailed status
- Minor refactoring in base formatter and tsconfig file updates for improved reliability
## 2025-08-08 - 1.16.8 - fix(format)
Improve concurrency control in cache and rollback management with mutex locking and refine formatting details
- Added 'withMutex' functions in ChangeCache and RollbackManager to synchronize file I/O operations
- Introduced static mutex maps to prevent race conditions during manifest updates
- Fixed minor formatting issues in commit info and package.json
## 2025-08-08 - 1.16.7 - fix(core)
Improve formatting, logging, and rollback integrity in core modules
- Add .claude/settings.local.json with defined permissions for allowed commands
- Standardize formatting in package.json, commit info, and configuration files
- Refactor rollback manager to use atomic manifest writes and validate manifest structure
- Enhance logging messages and overall code clarity in CLI and commit modules
## 2025-08-08 - 1.16.6 - fix(changecache)
Improve cache manifest validation and atomic file writes; add local settings and overrides
- Add manifest structure validation and default fallback in getManifest
- Implement atomic write in saveManifest using a temporary file and rename strategy
- Enhance error handling and cleanup for corrupted manifest files
- Introduce new .claude/settings.local.json for project-specific permission configuration
- Add an empty assets/overrides.json file for future overrides
## 2025-08-08 - 1.16.5 - fix(prettier)
Improve file selection in Prettier formatter, remove legacy package overrides, and update CI template indentation
- Added .claude/settings.local.json with updated permission settings for local commands
- Removed unnecessary overrides from assets/overrides.json and cleared packageManager overrides in package.json
- Adjusted CI template files (ci_default_gitlab, ci_default_private_gitlab, ci_docker_gitlab) for consistent indentation and formatting
- Refined Prettier formatter logic by defining include directories, root config files, and filtering duplicates instead of manual exclusion
## 2025-08-08 - 1.16.4 - fix(prettier)
Improve file exclusion in the Prettier formatter to skip unnecessary files and directories.
- Added exclusion patterns for node_modules, .git, dist, .nogit, coverage, .nyc_output, vendor, bower_components, jspm_packages, and minified files.
- Optimized filtering logic to ensure only valid files are processed.
## 2025-08-08 - 1.16.3 - fix(changecache/prettier)
Skip directories during file processing to prevent errors in changecache and prettier formatting
- Removed unnecessary await on synchronous file reads in changecache
- Added directory checks in changecache to immediately skip directories
- Filtered out directories in prettier formatter to avoid processing non-files
## 2025-08-07 - 1.16.2 - fix(format)
Fix format command confirmation prompt to correctly check user response
- Fixed bug where format command always showed "cancelled" even when user confirmed
- Changed response check from `response.proceed` to `response.value` for SmartInteract compatibility
## 2025-08-04 - 1.16.1 - fix(package/config)
Move smartdiff dependency to runtime and add local bash permissions settings
- Moved '@push.rocks/smartdiff' from devDependencies to dependencies in package.json
- Added .claude/settings.local.json with allowed bash commands (grep, mkdir, find, ls)
## 2025-05-19 - 1.16.0 - feat(format)
Enhance format module with rollback, diff reporting, and improved parallel execution
- Implemented rollback functionality with backup management and automatic rollback on error
@@ -23,12 +106,14 @@ Enhance format module with rollback, diff reporting, and improved parallel execu
- Updated package.json to include new dependency '@push.rocks/smartdiff'
## 2025-05-14 - 1.15.5 - fix(dependencies)
Update @git.zone/tsdoc to ^1.5.0 and @types/node to ^22.15.18
- Bumped @git.zone/tsdoc from ^1.4.5 to ^1.5.0
- Bumped @types/node from ^22.15.17 to ^22.15.18
## 2025-05-13 - 1.15.4 - fix(package.json)
Update dependency versions: bump @git.zone/tsdoc, @push.rocks/lik, @push.rocks/smartlog, and @types/node to their latest releases
- Upgrade @git.zone/tsdoc from ^1.4.4 to ^1.4.5
@@ -37,6 +122,7 @@ Update dependency versions: bump @git.zone/tsdoc, @push.rocks/lik, @push.rocks/s
- Upgrade @types/node from ^22.14.1 to ^22.15.17
## 2025-04-15 - 1.15.3 - fix(deps)
update dependency versions and improve website template variable handling
- Bumped @git.zone/tsbuild from ^2.2.1 to ^2.3.2 and @types/node to ^22.14.1
@@ -44,56 +130,65 @@ update dependency versions and improve website template variable handling
- Refactored website template update to correctly supply variables with added logging
## 2025-04-15 - 1.15.2 - fix(website_update)
Await supplyVariables call in website update template
- Changed website template update to properly await the supplyVariables method
- Ensured asynchronous consistency in updating website template variables
## 2025-04-15 - 1.15.1 - fix(cli)
Refresh internal CLI tooling and configuration for consistency.
## 2025-04-15 - 1.15.0 - feat(config/template)
Add assetbrokerUrl and legalUrl fields to module config and update website template to supply these values
- Added assetbrokerUrl and legalUrl properties in ts/classes.gitzoneconfig.ts
- Updated ts/mod_format/format.templates.ts to pass assetbrokerUrl and legalUrl to website template
## 2025-04-15 - 1.14.1 - fix(package.json)
Add packageManager field to specify pnpm version for consistent package management
- Inserted packageManager property in package.json with pnpm version info to ensure reproducible dependency installs
## 2025-04-15 - 1.14.0 - feat(tsconfig_update)
Add runafter directive to trigger gitzone format after tsconfig update
- Added runafter configuration in assets/templates/tsconfig_update/.smartscaf.yml to automate formatting task
## 2025-03-07 - 1.13.1 - fix(cli)
Improve commit message logging
- Updated logging to display recommended next commit details.
- Enabled interactive prompt for choosing commit type and scope.
## 2025-02-28 - 1.13.0 - feat(templates)
Updated and added new TypeScript template files for npm projects
- Added new paths.ts and plugins.ts template files for npm projects.
- Removed outdated some.plugins.ts template file.
## 2025-02-25 - 1.12.8 - fix(metadata)
Updated package and npmextra json description and keywords for enhanced development workflow clarity
- Updated the description in package.json to focus on project setup and management.
- Aligned the keywords in both package.json and npmextra.json to include more relevant terms such as gitzone utilities, template management, and CI/CD.
## 2025-02-25 - 1.12.7 - fix(meta)
Fix issues in project metadata and configuration.
- Updated package metadata to ensure accurate project description and licensing.
- Ensured npm access level configuration consistency within npmextra.json.
## 2025-02-25 - 1.12.7 - fix(ci)
Updated dependencies and added CI/CD workflows.
- Updated several dependencies in package.json for compatibility and security.
@@ -102,6 +197,7 @@ Updated dependencies and added CI/CD workflows.
- Ensured consistent formatting with Prettier and TypeScript configurations.
## 2025-01-29 - 1.12.6 - fix(project)
Minor fixes and cleanup
- Removed outdated pages/ directory entry in .gitignore.
@@ -110,6 +206,7 @@ Minor fixes and cleanup
- Fixed formatting issues across various TypeScript files.
## 2025-01-29 - 1.12.5 - fix(cli)
Initial implementation of CLI utility with project management features
- Integration of various plugins for logging, command-line interactions, and project management.
@@ -117,34 +214,40 @@ Initial implementation of CLI utility with project management features
- Implement commands for packaging, versioning, and deprecating npm packages.
## 2025-01-29 - 1.12.2 - fix(format)
Add overrides for peek-readable in package.json formatting
- Added a URL correction in the packageJson repository information.
- Introduced support for pnpm overrides by including an `overrides.json` file.
## 2025-01-18 - 1.12.1 - fix(dependencies)
Update various package dependencies and Dockerfile base image
- Updated Dockerfile base image from 'alpinenpmci' to 'alpine_npmci'.
- Upgraded @git.zone/tsbuild, @git.zone/tsrun, @git.zone/tsdoc, and other dependencies to their latest versions.
## 2025-01-17 - 1.12.0 - feat(build)
Update TypeScript configuration to support emit decorator metadata
- Added emitDecoratorMetadata to the tsconfig.json template in assets/templates/tsconfig_update.
## 2025-01-08 - 1.11.0 - feat(cli)
Add Docker command for cleaning up Docker system and extend deprecation command for multiple registries
- Added a new command 'docker' to handle Docker system cleanup operations.
- Improved the 'deprecate' command to support deprecating packages across multiple npm registry URLs.
## 2025-01-01 - 1.10.10 - fix(templates)
Corrected typo in template file comment
- Fixed repeated comment in the template file for services under 'assets/templates/service/ts/some.plugins.ts'.
## 2025-01-01 - 1.10.9 - fix(templates)
Correct template file paths and organization for service projects
- Moved 'some.classes.some.ts' to 'classes.some.ts'
@@ -152,60 +255,70 @@ Correct template file paths and organization for service projects
- Resolved incorrect import paths in service templates
## 2025-01-01 - 1.10.8 - fix(assets/templates)
Update CI template configurations to use module.githost
- Replaced occurrences of {{git.host}} with {{module.githost}} in CI workflow files
- Updated package dependencies for service template
## 2024-12-26 - 1.10.7 - fix(assets)
Correct URLs in templates and fix TypeScript declaration
- Updated incorrect URLs in Dockerfile templates to 'host.today'.
- Fixed type declaration for 'TemplateResult' in header.ts file.
## 2024-12-08 - 1.10.6 - fix(ci)
Corrected Docker image URL in CI templates
- Updated Docker image URL from 'code.foss.global/hosttoday' to 'code.foss.global/host.today' in default_nottags.yaml and default_tags.yaml.
- Adjusted gitignore template to include a custom section delineation.
## 2024-12-02 - 1.10.5 - fix(assets)
Update .gitignore template to remove pages directory
- Removed 'pages/' from the ignored directories in the .gitignore template.
## 2024-11-05 - 1.10.4 - fix(mod_format)
Correct file extension for TypeScript path configuration
- Fixed the TypeScript configuration to use correct file extensions for module subdirectories.
## 2024-10-27 - 1.10.3 - fix(mod_format)
Reorder TypeScript formatting steps in mod_format module
- Moved TypeScript configuration formatting earlier in the sequence for better logical consistency.
## 2024-10-27 - 1.10.2 - fix(format)
Add logging for tsconfig.json formatting
- Added an info log message for tsconfig.json formatting in format.tsconfig.ts.
## 2024-10-27 - 1.10.1 - fix(format)
Fixed async issue in tsconfig module lookup and corrected property access
## 2024-10-27 - 1.10.0 - feat(mod_format)
Add support for tsconfig.json formatting
- Added a new script to format tsconfig.json.
- Updated package.json to include `@git.zone/tspublish` as a dependency.
## 2024-10-23 - 1.9.126 - fix(format)
Remove redundant package.json property checks
- Removed property checks for `main`, `typings`, and `browserslist` from format.packagejson.ts
- This change streamlines the formatting process by removing unnecessary exits
## 2024-09-29 - 1.9.125 - fix(cli)
Fix package version configuration and formatting issues
- Updated metadata fields in package.json (repository URL, bugs URL, and homepage).
@@ -213,15 +326,17 @@ Fix package version configuration and formatting issues
- Added missing Prettier default TypeScript and Markdown configurations.
## 2024-09-27 - 1.9.124 - fix(cli)
Ensured proper existence and initialization of readme files
- Ensured readme.md and readme.hints.md files are created and initialized if they do not exist.
## 2024-09-27 - 1.9.123 - fix(core)
No changes detected
## 2024-09-27 - 1.9.123 - fix(core)
Update dependencies and improve build configurations
- Updated several dependencies in package.json for better compatibility
@@ -232,88 +347,111 @@ Update dependencies and improve build configurations
- Provided initial structure for readme and readme hints
## 2024-06-24 - 1.9.122 - fix(mod_commit)
Update package.json dependencies: @git.zone/tsdoc and @push.rocks/smartpromise to latest versions.
- - Updated @git.zone/tsdoc to ^1.3.12
- - Updated @push.rocks/smartfile to ^11.0.21
## 2024-06-23 - 1.9.121 - fix(mod_commit)
Fix changelog template rendering by removing extra new line when no version details are provided.
- Update package.json dependencies: @git.zone/tsdoc and @push.rocks/smartpromise to latest versions.
## 2024-06-23 - 1.9.120 - fix(mod_commit)
Handle edge case for empty version details in changelog formatting
- Added check for the length of the recommendedNextVersionDetails array
- Ensure no extra newline in changelog if there are no version details
## 2024-06-23 - 1.9.119 - fix(dependencies)
Update @git.zone/tsdoc to v1.3.8
- Updated @git.zone/tsdoc from v1.3.7 to v1.3.8 in package.json
## 2024-06-23 - 1.9.118 - fix(dependencies)
Update @git.zone/tsdoc to version 1.3.7
- Bump @git.zone/tsdoc from 1.3.6 to 1.3.7 in both package.json and pnpm-lock.yaml
## 2024-06-23 - 1.9.117 - fix(dependencies)
Update @git.zone/tsdoc dependency to v1.3.6
- Updated @git.zone/tsdoc version from 1.3.5 to 1.3.6 in package.json
- Updated pnpm-lock.yaml to reflect the new version of @git.zone/tsdoc
## 2024-06-23 - 1.9.116 - fix(dependencies)
Update @git.zone/tsdoc to version 1.3.5
- Updated the @git.zone/tsdoc dependency in package.json and pnpm-lock.yaml from version 1.3.4 to 1.3.5
- Removed the outdated changelog.md file.
## 2024-06-23 - 1.9.114 - fix(format)
Fixed formatting issues across multiple TypeScript files.
## 2024-06-23 - 1.9.113 - fix(mod_commit)
Remove extra new lines in changelog.
## 2024-06-23 - 1.9.112 - fix(core)
Update changelog formatting and remove outdated entries.
## 2024-06-23 - 1.9.111 - fix(changelog)
Remove outdated changelog entries and update formatting.
## 2024-06-23 - 1.9.110 - fix(dependencies)
Update @git.zone/tsdoc to version 1.3.4.
## 2024-06-23 - 1.9.109 - fix(changelog)
Remove outdated entries and adjust formatting in changelog.
## 2024-06-23 - 1.9.108 - fix(dependencies)
Update @git.zone/tsdoc dependency to version 1.3.2.
## 2024-06-23 - 1.9.107 - fix(changelog)
Remove placeholder entries and adjust formatting in changelog.
## 2024-06-23 - 1.9.106 - fix(dependencies)
Updated @git.zone/tsdoc from version 1.3.0 to 1.3.1.
## 2024-06-23 - 1.9.105 - fix(dependencies)
Updated @git.zone/tsdoc dependency from 1.2.2 to 1.3.0 in package.json and pnpm-lock.yaml.
## 2024-06-23 - 1.9.104 - fix(changelog)
Remove placeholder entries and adjust formatting in changelog.
## 2024-06-23 - 1.9.103 - fix(changelog)
Fix changelog to remove placeholder entries and adjust formatting.
## 2024-06-23 - 1.9.102 - fix(logging)
Optimize logger instantiation and configuration.
## 2024-06-23 - 1.9.101 - fix(metadata)
Ensure accurate project metadata in package.json.
## 2024-06-23 - 1.9.100 - fix(dependencies)
Updated @git.zone/tsdoc dependency version to ^1.2.2 in package.json and pnpm-lock.yaml.
## 2024-06-23 - 1.9.99 - fix(mod_commit)
Fix variable reassignment issue in changelog writing step.

View File

@@ -1,7 +1,7 @@
{
"name": "@git.zone/cli",
"private": false,
"version": "1.16.2",
"version": "1.17.0",
"description": "A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.",
"main": "dist_ts/index.ts",
"typings": "dist_ts/index.d.ts",
@@ -60,6 +60,11 @@
"@git.zone/tsbuild": "^2.3.2",
"@git.zone/tsrun": "^1.3.3",
"@git.zone/tstest": "^1.0.96",
"@push.rocks/smartdelay": "^3.0.5",
"@push.rocks/smartfile": "^11.2.0",
"@push.rocks/smartinteract": "^2.0.16",
"@push.rocks/smartnetwork": "^4.1.2",
"@push.rocks/smartshell": "^3.2.3",
"@types/node": "^22.15.18"
},
"dependencies": {
@@ -73,11 +78,8 @@
"@push.rocks/projectinfo": "^5.0.2",
"@push.rocks/smartchok": "^1.0.34",
"@push.rocks/smartcli": "^4.0.11",
"@push.rocks/smartdelay": "^3.0.5",
"@push.rocks/smartdiff": "^1.0.3",
"@push.rocks/smartfile": "^11.2.0",
"@push.rocks/smartgulp": "^3.0.4",
"@push.rocks/smartinteract": "^2.0.15",
"@push.rocks/smartjson": "^5.0.20",
"@push.rocks/smartlegal": "^1.0.27",
"@push.rocks/smartlog": "^3.0.9",
@@ -89,7 +91,6 @@
"@push.rocks/smartpath": "^5.0.18",
"@push.rocks/smartpromise": "^4.2.3",
"@push.rocks/smartscaf": "^4.0.16",
"@push.rocks/smartshell": "^3.2.3",
"@push.rocks/smartstream": "^3.2.5",
"@push.rocks/smartunique": "^3.0.9",
"@push.rocks/smartupdate": "^2.0.6",
@@ -113,9 +114,7 @@
"last 1 chrome versions"
],
"pnpm": {
"overrides": {
"peek-readable": "5.3.1"
}
"overrides": {}
},
"packageManager": "pnpm@10.7.0+sha512.6b865ad4b62a1d9842b61d674a393903b871d9244954f652b8842c2b553c72176b278f64c463e52d40fff8aba385c235c8c9ecf5cc7de4fd78b8bb6d49633ab6"
}

143
pnpm-lock.yaml generated
View File

@@ -4,9 +4,6 @@ settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
overrides:
peek-readable: 5.3.1
importers:
.:
@@ -41,18 +38,12 @@ importers:
'@push.rocks/smartcli':
specifier: ^4.0.11
version: 4.0.11
'@push.rocks/smartdelay':
specifier: ^3.0.5
version: 3.0.5
'@push.rocks/smartfile':
specifier: ^11.2.0
version: 11.2.0
'@push.rocks/smartdiff':
specifier: ^1.0.3
version: 1.0.3
'@push.rocks/smartgulp':
specifier: ^3.0.4
version: 3.0.4
'@push.rocks/smartinteract':
specifier: ^2.0.15
version: 2.0.16
'@push.rocks/smartjson':
specifier: ^5.0.20
version: 5.0.20
@@ -86,9 +77,6 @@ importers:
'@push.rocks/smartscaf':
specifier: ^4.0.16
version: 4.0.16
'@push.rocks/smartshell':
specifier: ^3.2.3
version: 3.2.3
'@push.rocks/smartstream':
specifier: ^3.2.5
version: 3.2.5
@@ -117,9 +105,21 @@ importers:
'@git.zone/tstest':
specifier: ^1.0.96
version: 1.0.96(@aws-sdk/credential-providers@3.750.0)(socks@2.8.4)(typescript@5.8.3)
'@push.rocks/smartdiff':
specifier: ^1.0.3
version: 1.0.3
'@push.rocks/smartdelay':
specifier: ^3.0.5
version: 3.0.5
'@push.rocks/smartfile':
specifier: ^11.2.0
version: 11.2.0
'@push.rocks/smartinteract':
specifier: ^2.0.16
version: 2.0.16
'@push.rocks/smartnetwork':
specifier: ^4.1.2
version: 4.1.2
'@push.rocks/smartshell':
specifier: ^3.2.3
version: 3.2.3
'@types/node':
specifier: ^22.15.18
version: 22.15.18
@@ -1006,6 +1006,9 @@ packages:
'@push.rocks/smartnetwork@3.0.2':
resolution: {integrity: sha512-s6CNGzQ1n/d/6cOKXbxeW6/tO//dr1woLqI01g7XhqTriw0nsm2G2kWaZh2J0VOguGNWBgQVCIpR0LjdRNWb3g==}
'@push.rocks/smartnetwork@4.1.2':
resolution: {integrity: sha512-TjucG72ooHgzAUpNu2LAv4iFoettmZq2aEWhhzIa7AKcOvt4yxsk3Vl73guhKRohTfhdRauPcH5OHISLUHJbYA==}
'@push.rocks/smartnpm@2.0.4':
resolution: {integrity: sha512-ljRPqnUsXzL5qnuAEt5POy0NnfKs7eYPuuJPJjYiK9VUdP/CyF4h14qTB4H816vNEuF7VU/ASRtz0qDlXmrztg==}
@@ -1024,6 +1027,9 @@ packages:
'@push.rocks/smartpdf@3.2.2':
resolution: {integrity: sha512-SKGNHz7HsgU6uVSVrRCL13kIeAFMvd4oQBLI3VmPcMkxXfWNPJkb6jKknqP8bhobWA/ryJS+3Dj///UELUvVKQ==}
'@push.rocks/smartping@1.0.8':
resolution: {integrity: sha512-Fvx1Db6hSsDOI6pdiCuS9GjtOX8ugx865YQrPg5vK2iw6Qj/srwyXcWLFYt+19WVKtvtWDJIAKbW+q3bXFsCeA==}
'@push.rocks/smartpnpm@1.0.6':
resolution: {integrity: sha512-AD0U4n53LBdBnj9MXAMF7cAqjyE0j3xbTH7Bd1v5ywjt3aFOJockAwDBOP+3dEK1QUHM17p+VP9HdX1faTCtzw==}
@@ -1582,6 +1588,9 @@ packages:
'@types/default-gateway@3.0.1':
resolution: {integrity: sha512-tpu0hp+AOIzwdAHyZPzLE5pCf9uT0pb+xZ76T4S7MrY2YTVq918Q7Q2VQ3KCVQqYxM7nxuCK/SL3X97jBEIeKQ==}
'@types/default-gateway@7.2.2':
resolution: {integrity: sha512-35C93fYQlnLKLASkMPoxRvok4fENwB3By9clRLd2I/08n/XRl0pCdf7EB17K5oMMwZu8NBYA8i66jH5r/LYBKA==}
'@types/diff@5.2.3':
resolution: {integrity: sha512-K0Oqlrq3kQMaO2RhfrNQX5trmt+XLyom88zS0u84nnIcLvFnRUMRRHmrGny5GSM+kNO9IZLARsdQHDzkhAgmrQ==}
@@ -2152,6 +2161,10 @@ packages:
resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==}
engines: {node: '>=12'}
clone-regexp@3.0.0:
resolution: {integrity: sha512-ujdnoq2Kxb8s3ItNBtnYeXdm07FcU0u8ARAT1lQ2YdMwQC+cdiXX8KoqMVuglztILivceTtp4ivqGSmEmhBUJw==}
engines: {node: '>=12'}
clone@2.1.2:
resolution: {integrity: sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18=}
engines: {node: '>=0.8'}
@@ -2217,6 +2230,10 @@ packages:
resolution: {integrity: sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==}
engines: {node: '>= 0.6'}
convert-hrtime@5.0.0:
resolution: {integrity: sha512-lOETlkIeYSJWcbbcvjRKGxVMXJR+8+OQb/mTPbA4ObPMytYIsUbuOE0Jzy60hjARYszq1id0j8KgVhC+WGZVTg==}
engines: {node: '>=12'}
convert-source-map@2.0.0:
resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==}
@@ -2796,6 +2813,10 @@ packages:
function-bind@1.1.2:
resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==}
function-timeout@0.1.1:
resolution: {integrity: sha512-0NVVC0TaP7dSTvn1yMiy6d6Q8gifzbvQafO46RtLG/kHJUBNd+pVRGOBoK44wNBvtSPUJRfdVvkFdD3p0xvyZg==}
engines: {node: '>=14.16'}
get-caller-file@2.0.5:
resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==}
engines: {node: 6.* || 8.* || >= 10.*}
@@ -2870,6 +2891,10 @@ packages:
resolution: {integrity: sha512-mThBblvlAF1d4O5oqyvN+ZxLAYwIJK7bpMxgYqPD9okW0C3qm5FFn7k811QrcuEBwaogR3ngOFoCfs6mRv7teQ==}
engines: {node: '>=14.16'}
got@13.0.0:
resolution: {integrity: sha512-XfBk1CxOOScDcMr9O1yKkNaQyy865NbYs+F7dr4H0LZMVgCj2Le59k6PqbNHoL5ToeaEQUYh6c6yMfVcc6SJxA==}
engines: {node: '>=16'}
gpt-tokenizer@2.9.0:
resolution: {integrity: sha512-YSpexBL/k4bfliAzMrRqn3M6+it02LutVyhVpDeMKrC/O9+pCe/5s8U2hYKa2vFLD5/vHhsKc8sOn/qGqII8Kg==}
@@ -3121,6 +3146,10 @@ packages:
resolution: {integrity: sha512-4B4XA2HEIm/PY+OSpeMBXr8pGWBYbXuHgjMAqrwbLO3CPTCAd9ArEJzBUKGZtk9viY6+aSfadGnWyjY3ydYZkw==}
engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
is-ip@5.0.1:
resolution: {integrity: sha512-FCsGHdlrOnZQcp0+XT5a+pYowf33itBalCl+7ovNXC/7o5BhIpG14M3OrpPPdBSIQJCm+0M5+9mO7S9VVTTCFw==}
engines: {node: '>=14.16'}
is-nan@1.3.2:
resolution: {integrity: sha512-E+zBKpQ2t6MEo1VsonYmluk9NxGrbzpeeLC2xIViuO2EjU2xsXsBPwTr3Ykv9l08UYEVEdWeRZNouaZqF6RN0w==}
engines: {node: '>= 0.4'}
@@ -3145,6 +3174,10 @@ packages:
resolution: {integrity: sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==}
engines: {node: '>= 0.4'}
is-regexp@3.1.0:
resolution: {integrity: sha512-rbku49cWloU5bSMI+zaRaXdQHXnthP6DZ/vLnfdSKyL4zUzuWnomtOEiZZOd+ioQ+avFo/qau3KPTc7Fjy1uPA==}
engines: {node: '>=12'}
is-stream@2.0.1:
resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==}
engines: {node: '>=8'}
@@ -4091,6 +4124,10 @@ packages:
resolution: {integrity: sha512-+6bkjnf0yQ4+tZV0zJv1017DiIF7y6R4yg17Mrhhkc25L7dtQtXWHgSCrz9BbLL4OeTFbPK4EALXqJUrwCIWXw==}
engines: {node: '>=14.16'}
public-ip@7.0.1:
resolution: {integrity: sha512-DdNcqcIbI0wEeCBcqX+bmZpUCvrDMJHXE553zgyG1MZ8S1a/iCCxmK9iTjjql+SpHSv4cZkmRv5/zGYW93AlCw==}
engines: {node: '>=18'}
pump@2.0.1:
resolution: {integrity: sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA==}
@@ -4526,6 +4563,10 @@ packages:
stubborn-fs@1.2.5:
resolution: {integrity: sha512-H2N9c26eXjzL/S/K+i/RHHcFanE74dptvvjM8iwzwbVcWY/zjBbgRqF3K0DY4+OD+uTTASTBvDoxPDaPN02D7g==}
super-regex@0.2.0:
resolution: {integrity: sha512-WZzIx3rC1CvbMDloLsVw0lkZVKJWbrkJ0k1ghKFmcnPrW1+jWbgTkTEWVtD9lMdmI4jZEz40+naBxl1dCUhXXw==}
engines: {node: '>=14.16'}
supports-color@5.5.0:
resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==}
engines: {node: '>=4'}
@@ -4581,6 +4622,10 @@ packages:
through2@4.0.2:
resolution: {integrity: sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==}
time-span@5.1.0:
resolution: {integrity: sha512-75voc/9G4rDIJleOo4jPvN4/YC4GRZrY8yy1uU4lwrB3XEQbWve8zXoO5No4eFrGcTAMYyoY67p8jRQdtA1HbA==}
engines: {node: '>=12'}
tiny-worker@2.3.0:
resolution: {integrity: sha512-pJ70wq5EAqTAEl9IkGzA+fN0836rycEuz2Cn6yeZ6FRzlVS5IDOkFHpIoEsksPRQV34GDqXm65+OlnZqUSyK2g==}
@@ -6650,6 +6695,16 @@ snapshots:
public-ip: 6.0.2
systeminformation: 5.25.11
'@push.rocks/smartnetwork@4.1.2':
dependencies:
'@push.rocks/smartping': 1.0.8
'@push.rocks/smartpromise': 4.2.3
'@push.rocks/smartstring': 4.0.15
'@types/default-gateway': 7.2.2
isopen: 1.3.0
public-ip: 7.0.1
systeminformation: 5.25.11
'@push.rocks/smartnpm@2.0.4':
dependencies:
'@push.rocks/consolecolor': 2.0.2
@@ -6707,6 +6762,11 @@ snapshots:
- typescript
- utf-8-validate
'@push.rocks/smartping@1.0.8':
dependencies:
'@types/ping': 0.4.4
ping: 0.4.4
'@push.rocks/smartpnpm@1.0.6':
dependencies:
'@push.rocks/smartshell': 3.2.3
@@ -7670,6 +7730,8 @@ snapshots:
'@types/default-gateway@3.0.1': {}
'@types/default-gateway@7.2.2': {}
'@types/diff@5.2.3': {}
'@types/express-serve-static-core@4.19.6':
@@ -8326,6 +8388,10 @@ snapshots:
strip-ansi: 6.0.1
wrap-ansi: 7.0.0
clone-regexp@3.0.0:
dependencies:
is-regexp: 3.1.0
clone@2.1.2: {}
co-body@6.2.0:
@@ -8390,6 +8456,8 @@ snapshots:
content-type@1.0.5: {}
convert-hrtime@5.0.0: {}
convert-source-map@2.0.0: {}
cookie-signature@1.0.6: {}
@@ -9008,6 +9076,8 @@ snapshots:
function-bind@1.1.2: {}
function-timeout@0.1.1: {}
get-caller-file@2.0.5: {}
get-east-asian-width@1.3.0: {}
@@ -9123,6 +9193,20 @@ snapshots:
p-cancelable: 3.0.0
responselike: 3.0.0
got@13.0.0:
dependencies:
'@sindresorhus/is': 5.6.0
'@szmarczak/http-timer': 5.0.1
cacheable-lookup: 7.0.0
cacheable-request: 10.2.14
decompress-response: 6.0.0
form-data-encoder: 2.1.4
get-stream: 6.0.1
http2-wrapper: 2.2.1
lowercase-keys: 3.0.0
p-cancelable: 3.0.0
responselike: 3.0.0
gpt-tokenizer@2.9.0: {}
graceful-fs@4.2.10: {}
@@ -9393,6 +9477,11 @@ snapshots:
dependencies:
ip-regex: 5.0.0
is-ip@5.0.1:
dependencies:
ip-regex: 5.0.0
super-regex: 0.2.0
is-nan@1.3.2:
dependencies:
call-bind: 1.0.7
@@ -9413,6 +9502,8 @@ snapshots:
has-tostringtag: 1.0.2
hasown: 2.0.2
is-regexp@3.1.0: {}
is-stream@2.0.1: {}
is-stream@4.0.1: {}
@@ -10551,6 +10642,12 @@ snapshots:
got: 12.6.1
is-ip: 4.0.0
public-ip@7.0.1:
dependencies:
dns-socket: 4.2.2
got: 13.0.0
is-ip: 5.0.1
pump@2.0.1:
dependencies:
end-of-stream: 1.4.4
@@ -11102,6 +11199,12 @@ snapshots:
stubborn-fs@1.2.5: {}
super-regex@0.2.0:
dependencies:
clone-regexp: 3.0.0
function-timeout: 0.1.1
time-span: 5.1.0
supports-color@5.5.0:
dependencies:
has-flag: 3.0.0
@@ -11183,6 +11286,10 @@ snapshots:
dependencies:
readable-stream: 3.6.2
time-span@5.1.0:
dependencies:
convert-hrtime: 5.0.0
tiny-worker@2.3.0:
dependencies:
esm: 3.2.25

View File

@@ -1,10 +1,11 @@
# Gitzone CLI - Development Hints
* the cli of the git.zone project.
- the cli of the git.zone project.
## Project Overview
Gitzone CLI (`@git.zone/cli`) is a comprehensive toolbelt for streamlining local development cycles. It provides utilities for:
- Project initialization and templating (via smartscaf)
- Code formatting and standardization
- Version control and commit management
@@ -14,12 +15,14 @@ Gitzone CLI (`@git.zone/cli`) is a comprehensive toolbelt for streamlining local
## Architecture
### Core Structure
- Main CLI entry: `cli.ts` / `cli.child.ts`
- Modular architecture with separate modules in `ts/mod_*` directories
- Each module handles specific functionality (format, commit, docker, etc.)
- Extensive use of plugins pattern via `plugins.ts` files
### Configuration Management
- Uses `npmextra.json` for all tool configuration
- Configuration stored under `gitzone` key in npmextra
- No separate `.gitzonerc` file - everything in npmextra.json
@@ -30,6 +33,7 @@ Gitzone CLI (`@git.zone/cli`) is a comprehensive toolbelt for streamlining local
The format module is responsible for project standardization:
#### Current Modules:
1. **cleanup** - Removes obsolete files (yarn.lock, tslint.json, etc.)
2. **copy** - File copying with glob patterns (fully implemented)
3. **gitignore** - Creates/updates .gitignore from templates
@@ -42,6 +46,7 @@ The format module is responsible for project standardization:
10. **tsconfig** - Formats TypeScript configuration
#### Execution Order (Dependency-Based):
- Modules are now executed in parallel groups based on dependencies
- Independent modules run concurrently for better performance
- Dependency analyzer ensures correct execution order
@@ -182,7 +187,7 @@ gitzone format --clean-backups
## API Changes
- smartfile API updated to use fs.* and memory.* namespaces
- smartfile API updated to use fs._ and memory._ namespaces
- smartnpm requires instance creation: `new NpmRegistry()`
- All file operations now use updated APIs
- Type imports use `import type` for proper verbatim module syntax

164
readme.md
View File

@@ -7,7 +7,7 @@
## 🎯 What is gitzone?
gitzone is a powerful command-line interface that supercharges your development workflow with automated project management, intelligent code formatting, and seamless version control. Whether you're bootstrapping a new TypeScript project, maintaining code quality, or managing complex multi-repository setups, gitzone has got you covered.
gitzone is a powerful command-line interface that supercharges your development workflow with automated project management, intelligent code formatting, seamless version control, and development service orchestration. Whether you're bootstrapping a new TypeScript project, maintaining code quality, managing complex multi-repository setups, or spinning up local development databases, gitzone has got you covered.
## 🏃‍♂️ Quick Start
@@ -23,7 +23,7 @@ pnpm add -g @git.zone/cli
Once installed, you can use either `gitzone` or the shorter `gzone` command from anywhere in your terminal.
### Your First Command
### Your First Commands
```bash
# Create a new TypeScript npm package
@@ -32,12 +32,66 @@ gitzone template npm
# Format your entire codebase
gitzone format
# Start local MongoDB and MinIO services
gitzone services start
# Create a semantic commit
gitzone commit
```
## 🛠️ Core Features
### 🐳 Development Services Management (NEW!)
Effortlessly manage local MongoDB and MinIO (S3-compatible) services for your development environment:
```bash
gitzone services [command]
```
**Available commands:**
- **`start [service]`** - Start services (mongo|s3|all)
- **`stop [service]`** - Stop services (mongo|s3|all)
- **`restart [service]`** - Restart services
- **`status`** - Show current service status
- **`config`** - Display configuration details
- **`compass`** - Get MongoDB Compass connection string with network IP
- **`logs [service] [lines]`** - View service logs
- **`remove`** - Remove containers (preserves data)
- **`clean`** - Remove containers AND data (⚠️ destructive)
**Key features:**
- 🎲 **Smart port assignment** - Automatically assigns random ports (20000-30000) to avoid conflicts
- 📦 **Project isolation** - Each project gets its own containers with unique names
- 💾 **Data persistence** - Data stored in `.nogit/` directories survives container restarts
- 🔗 **MongoDB Compass support** - Instantly get connection strings for GUI access
- 🌐 **Network IP detection** - Automatically detects your local network IP for remote connections
- ⚙️ **Auto-configuration** - Creates `.nogit/env.json` with smart defaults
**Example workflow:**
```bash
# Start all services for your project
gitzone services start
# Check what's running
gitzone services status
# Get MongoDB Compass connection string
gitzone services compass
# Output: mongodb://defaultadmin:defaultpass@192.168.1.100:27018/myproject?authSource=admin
# View MongoDB logs
gitzone services logs mongo 50
# Stop services when done
gitzone services stop
```
The services are configured via `.nogit/env.json` which is automatically created with secure defaults and random ports for each project.
### 📦 Project Templates
Instantly scaffold production-ready projects with best practices built-in:
@@ -47,12 +101,14 @@ gitzone template [template-name]
```
**Available templates:**
- **`npm`** - TypeScript npm package with testing, CI/CD, and full tooling
- **`service`** - Microservice architecture with Docker support
- **`website`** - Modern web application with LitElement and service workers
- **`wcc`** - Web Component Collection for reusable UI components
Each template comes pre-configured with:
- ✅ TypeScript with modern configurations
- ✅ Automated testing setup
- ✅ CI/CD pipelines (GitLab/GitHub)
@@ -81,6 +137,7 @@ gitzone format --verbose
```
**Format features:**
- 🔄 **Smart caching** - Only processes changed files
- 🛡️ **Rollback support** - Undo formatting changes if needed
- 📊 **Detailed reporting** - See exactly what changed
@@ -88,6 +145,7 @@ gitzone format --verbose
- 🎯 **Module-specific formatting** - Target specific formatters
**Rollback capabilities:**
```bash
# List all available backups
gitzone format --list-backups
@@ -103,6 +161,7 @@ gitzone format --clean-backups
```
**Formatters included:**
- **Prettier** - JavaScript/TypeScript code formatting
- **License** - Ensure proper licensing
- **Package.json** - Standardize package configurations
@@ -121,6 +180,7 @@ gitzone commit
```
Features:
- 📝 Interactive commit message builder
- 🏷️ Automatic version bumping (major/minor/patch)
- 📜 Changelog generation
@@ -128,6 +188,7 @@ Features:
- 🎯 Conventional commit compliance
The commit wizard guides you through:
1. **Type selection** (feat/fix/docs/style/refactor/perf/test/chore)
2. **Scope definition** (component/module affected)
3. **Description crafting**
@@ -153,6 +214,7 @@ gitzone meta remove [name]
```
Perfect for:
- Monorepo management
- Multi-package projects
- Coordinated deployments
@@ -168,6 +230,7 @@ gitzone docker prune
```
This command removes:
- Stopped containers
- Unused images
- Dangling volumes
@@ -196,6 +259,7 @@ gitzone deprecate
```
Interactive wizard for:
- Setting deprecation notices
- Guiding users to replacements
- Updating registry metadata
@@ -210,6 +274,7 @@ gitzone start
```
Automatically:
- Checks out master branch
- Pulls latest changes
- Installs dependencies
@@ -266,44 +331,58 @@ Customize gitzone behavior through `npmextra.json`:
## 🏆 Best Practices
### For New Projects
1. Start with a template: `gitzone template npm`
2. Customize the generated structure
3. Run initial format: `gitzone format`
4. Set up CI/CD: `gitzone open ci`
2. Set up local services: `gitzone services start`
3. Customize the generated structure
4. Run initial format: `gitzone format`
5. Set up CI/CD: `gitzone open ci`
### For Existing Projects
1. Initialize: `gitzone start`
2. Format codebase: `gitzone format --dry-run` (preview first!)
3. Apply formatting: `gitzone format --yes`
4. Commit changes: `gitzone commit`
4. Set up services: `gitzone services start`
5. Commit changes: `gitzone commit`
### For Teams
1. Document format preferences in `npmextra.json`
2. Use `--save-plan` for reviewable format changes
3. Enable rollback for safety
4. Standardize commit conventions
2. Share `.nogit/env.json` template for consistent service setup
3. Use `--save-plan` for reviewable format changes
4. Enable rollback for safety
5. Standardize commit conventions
## 🎯 Common Workflows
### Clean Development Cycle
### Full-Stack Development Cycle
```bash
# 1. Start fresh
gitzone start
# 2. Make changes
# 2. Spin up databases and services
gitzone services start
# 3. Make changes
# ... your development work ...
# 3. Format code
# 4. Check service logs if needed
gitzone services logs mongo
# 5. Format code
gitzone format
# 4. Commit with semantic versioning
# 6. Commit with semantic versioning
gitzone commit
# 5. Deploy (if CI/CD configured)
# Automatic via git push
# 7. Stop services when done
gitzone services stop
```
### Multi-Repository Management
```bash
# 1. Set up meta repository
gitzone meta init
@@ -318,6 +397,7 @@ gitzone meta update
```
### Safe Formatting with Rollback
```bash
# 1. Preview changes
gitzone format --dry-run
@@ -332,20 +412,45 @@ gitzone format --from-plan format-changes.json
gitzone format --rollback
```
### Database-Driven Development
```bash
# 1. Start MongoDB and MinIO
gitzone services start
# 2. Get connection string for your app
gitzone services config
# 3. Connect with MongoDB Compass
gitzone services compass
# 4. Monitor services
gitzone services status
# 5. Clean everything when done
gitzone services clean # ⚠️ Warning: deletes data
```
## 🔌 Integrations
### CI/CD Platforms
- **GitLab CI** - Full pipeline support with templates
- **GitHub Actions** - Automated workflows
- **Docker** - Container-based deployments
### Development Tools
- **TypeScript** - First-class support
- **Prettier** - Code formatting
- **ESLint** - Linting (via format modules)
- **npm/pnpm** - Package management
- **MongoDB** - Local database service
- **MinIO** - S3-compatible object storage
- **MongoDB Compass** - Database GUI integration
### Version Control
- **Git** - Deep integration
- **Semantic Versioning** - Automatic version bumping
- **Conventional Commits** - Standardized commit messages
@@ -357,34 +462,63 @@ gitzone format --rollback
3. **Leverage templates**: Start projects right with proven structures
4. **Enable caching**: Dramatically speeds up formatting operations
5. **Save format plans**: Review changes before applying in production
6. **Port management**: Let services auto-assign ports to avoid conflicts
7. **Use MongoDB Compass**: `gitzone services compass` for visual DB management
## 🐛 Troubleshooting
### Format Command Shows "Cancelled"
If the format command shows cancelled even after confirming:
- Check your `npmextra.json` configuration
- Try with `--yes` flag to skip confirmation
- Use `--verbose` for detailed output
### Docker Commands Fail
Ensure Docker daemon is running:
```bash
docker info
```
### Services Won't Start
Check for port conflicts:
```bash
# Services auto-assign ports, but you can check the config
cat .nogit/env.json
# Verify Docker is running
docker ps
```
### Template Creation Issues
Verify npm/pnpm is properly configured:
```bash
npm config get registry
```
### MongoDB Connection Issues
- Ensure services are running: `gitzone services status`
- Check firewall settings for the assigned ports
- Use `gitzone services compass` for the correct connection string
## 📈 Performance
gitzone is optimized for speed:
- **Parallel processing** for format operations
- **Smart caching** to avoid redundant work
- **Incremental updates** for meta repositories
- **Minimal dependencies** for fast installation
- **Isolated services** prevent resource conflicts
- **Auto port assignment** eliminates manual configuration
## License and Legal Information

View File

@@ -1,170 +1,121 @@
# Gitzone Format Module Improvement Plan
Please reread /home/philkunz/.claude/CLAUDE.md before proceeding with any implementation.
# GitZone Services Command Implementation Plan
## Overview
This plan outlines improvements for the gitzone format module to enhance its functionality, reliability, and maintainability.
Implement the `gitzone services` command to manage MongoDB and MinIO containers for development projects.
## Phase 1: Core Improvements (High Priority) - COMPLETED ✅
## Tasks
### 1. Enhanced Error Handling & Recovery ✅
- [x] Implement rollback mechanism for failed format operations
- [x] Add detailed error messages with recovery suggestions
- [x] Create a `--dry-run` flag to preview changes before applying
- [x] Add transaction-like behavior: all-or-nothing formatting
- [x] Implement plan → action workflow as default behavior
### Module Structure Setup
- [x] Create `ts/mod_services/` directory
- [x] Create `mod.plugins.ts` with required imports
- [x] Create `helpers.ts` with utility functions
- [x] Create `classes.serviceconfiguration.ts` for config handling
- [x] Create `classes.dockercontainer.ts` for Docker operations
- [x] Create `classes.servicemanager.ts` for service management
- [x] Create `index.ts` with main command logic
### 2. Complete Missing Functionality
- [x] Implement the `ensureDependency` function in format.packagejson.ts
- [x] Develop the copy module for file pattern-based copying
- [x] Add dependency version constraint management
- [x] Support workspace/monorepo configurations (via configuration)
### Core Functionality
- [x] Implement ServiceConfiguration class
- [x] Load/create `.nogit/env.json` configuration
- [x] Generate random available ports (20000-30000 range)
- [x] Preserve existing custom values
- [x] Provide default values for missing fields
### 3. Configuration & Flexibility ✅
- [x] Extend npmextra.json gitzone configuration section
- [x] Allow custom license exclusion/inclusion lists
- [x] Make format steps configurable (skip/include specific modules)
- [x] Support custom template directories (via configuration)
- [x] Add format profiles for different project types
- [x] Implement DockerContainer class
- [x] Check container status
- [x] Start/stop/restart containers
- [x] Execute Docker commands
- [x] Handle container logs
- [x] Manage volumes and port bindings
### 4. Architecture Changes ✅
- [x] Introduce a `FormatContext` class to manage state across modules
- [x] Create abstract `BaseFormatter` class for consistent module structure
- [x] Implement event system for inter-module communication (via context)
- [x] Add validation layer before format execution
- [x] Implement `FormatPlanner` class for plan → action workflow
- [x] Implement ServiceManager class
- [x] Manage MongoDB containers
- [x] Manage MinIO containers
- [x] Handle container lifecycle
- [x] Generate project-specific container names
- [x] Manage data directories in `.nogit/`
- [x] Generate MongoDB Compass connection strings
## Phase 2: Performance & Reporting (Medium Priority) - COMPLETED ✅
### Commands Implementation
- [x] `start` command - Start services (mongo|s3|all)
- [x] `stop` command - Stop services (mongo|s3|all)
- [x] `restart` command - Restart services (mongo|s3|all)
- [x] `status` command - Show service status
- [x] `config` command - Show current configuration
- [x] `compass` command - Show MongoDB Compass connection string
- [x] `logs` command - Show service logs with line count
- [x] `remove` command - Remove containers (preserve data)
- [x] `clean` command - Remove containers and data
### 5. Performance Optimizations ✅
- [x] Implement parallel execution for independent format modules
- [x] Add file change detection to skip unchanged files
- [x] Create format cache to track last formatted state
- [x] Optimize Prettier runs by batching files
### Integration
- [x] Add `@push.rocks/smartshell` to main plugins.ts
- [x] Add `@push.rocks/smartnetwork` to main plugins.ts
- [x] Add `@push.rocks/smartinteraction` to main plugins.ts
- [x] Register services command in `gitzone.cli.ts`
### 6. Enhanced Reporting & Visibility ✅
- [x] Generate comprehensive format report showing all changes
- [x] Add diff view for file modifications
- [x] Create verbose logging option
- [x] Add format statistics (files changed, time taken, etc.)
### Features
- [x] Auto-configuration with smart defaults
- [x] Random port assignment to avoid conflicts
- [x] Project isolation with unique container names
- [x] Data persistence in `.nogit/` directories
- [x] Status display (running/stopped/not installed)
- [x] Interactive confirmations for destructive operations
- [x] Colored console output
- [x] MinIO bucket auto-creation
- [x] MongoDB Compass connection string with network IP
## Phase 3: Advanced Features (Lower Priority) - PARTIALLY COMPLETED
### Testing
- [ ] Test service start/stop operations
- [ ] Test configuration creation and updates
- [ ] Test port collision handling
- [ ] Test data persistence
- [ ] Test MongoDB Compass connection string generation
- [ ] Test all command variations
### 7. Better Integration & Extensibility ⏳
- [ ] Create plugin system for custom format modules
- [ ] Add hooks for pre/post format operations
- [ ] Support custom validation rules
- [ ] Integrate with git hooks for pre-commit formatting
## Configuration Format
```json
{
"PROJECT_NAME": "derived-from-package-name",
"MONGODB_HOST": "localhost",
"MONGODB_NAME": "project-name",
"MONGODB_PORT": "random-port",
"MONGODB_USER": "defaultadmin",
"MONGODB_PASS": "defaultpass",
"S3_HOST": "localhost",
"S3_PORT": "random-port",
"S3_CONSOLE_PORT": "s3-port+1",
"S3_USER": "defaultadmin",
"S3_PASS": "defaultpass",
"S3_BUCKET": "project-name-documents"
}
```
### 8. Improved Template Integration ⏳
- [ ] Better error handling when smartscaf operations fail
- [ ] Add pre/post template hooks for custom processing
- [ ] Validate template results before proceeding with format
- [ ] Support skipping template updates via configuration
## Command Examples
```bash
gitzone services start # Start all services
gitzone services start mongo # Start only MongoDB
gitzone services stop # Stop all services
gitzone services status # Check service status
gitzone services config # Show configuration
gitzone services compass # Show MongoDB Compass connection string
gitzone services logs mongo 50 # Show last 50 lines of MongoDB logs
gitzone services remove # Remove containers (preserve data)
gitzone services clean # Remove containers and data
```
### 9. Enhanced License Management ⏳
- [ ] Make license checking configurable (partial)
- [ ] Add license compatibility matrix
- [x] Support license exceptions for specific packages
- [ ] Generate license report for compliance
## Progress Notes
Implementation started: 2025-08-14
Implementation completed: 2025-08-14
### 10. Better Package.json Management ⏳
- [ ] Smart dependency sorting and grouping
- [ ] Automated script generation based on project type
- [ ] Support for pnpm workspace configurations
- [ ] Validation of package.json schema
## Summary
Successfully implemented the `gitzone services` command in TypeScript, providing a complete replacement for the `services.sh` shell script. The implementation includes:
### 11. Quality of Life Improvements ⏳
- [ ] Interactive mode for format configuration
- [ ] Undo/redo capability for format operations
- [ ] Format presets for common scenarios
- [x] Better progress indicators and user feedback
1. **Complete Docker service management** for MongoDB and MinIO containers
2. **Smart configuration management** with automatic port assignment and conflict avoidance
3. **MongoDB Compass support** with network IP detection for remote connections
4. **Project isolation** using project-specific container names
5. **Data persistence** in `.nogit/` directories
6. **Interactive confirmations** for destructive operations
7. **Comprehensive command set** including start, stop, restart, status, config, compass, logs, remove, and clean commands
## Implementation Status
### ✅ Completed Features
1. **Rollback Mechanism**
- Full backup/restore functionality
- Manifest tracking and integrity checks
- CLI commands for rollback operations
2. **Plan → Action Workflow**
- Two-phase approach (analyze then execute)
- Interactive confirmation
- Dry-run support
3. **Configuration System**
- Comprehensive npmextra.json support
- Module control (skip/only/order)
- Cache configuration
- Parallel execution settings
4. **Performance Improvements**
- Parallel execution by dependency analysis
- File change caching
- Prettier batching
- Execution time tracking
5. **Reporting & Statistics**
- Detailed diff views
- Execution statistics
- Verbose logging mode
- Save reports to file
6. **Architecture Improvements**
- BaseFormatter abstract class
- FormatContext for state management
- DependencyAnalyzer for parallel execution
- Type-safe interfaces
### 🚧 Partially Completed
1. **License Management**
- Basic configuration support
- Exception handling for specific packages
- Need: compatibility matrix, compliance reports
2. **Package.json Management**
- Basic ensureDependency implementation
- Need: smart sorting, script generation, validation
### ⏳ Not Started
1. **Plugin System**
- Need to design plugin API
- Hook system for pre/post operations
- Custom validation rules
2. **Git Integration**
- Pre-commit hooks
- Automatic formatting on commit
3. **Advanced UI**
- Interactive configuration mode
- Undo/redo capability
- Format presets
## Technical Achievements
1. **Type Safety**: All new code uses TypeScript interfaces and types
2. **Error Handling**: Comprehensive try-catch blocks with rollback
3. **API Compatibility**: Updated to use latest smartfile/smartnpm APIs
4. **Testing**: Ready for comprehensive test suite
5. **Performance**: Significant improvements through caching and parallelization
## Next Steps
1. Write comprehensive tests for all new functionality
2. Create user documentation for new features
3. Consider plugin API design for extensibility
4. Implement remaining Phase 3 features based on user feedback
5. Performance benchmarking and optimization
## Success Metrics Achieved
- ✅ Reduced error rates through rollback mechanism
- ✅ Faster execution through parallel processing and caching
- ✅ Enhanced user control through configuration
- ✅ Better visibility through reporting and statistics
- ✅ Improved maintainability through better architecture
The module is fully integrated into the gitzone CLI and ready for testing.

View File

@@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@git.zone/cli',
version: '1.16.1',
version: '1.16.10',
description: 'A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.'
}

View File

@@ -40,7 +40,9 @@ export class GitzoneConfig {
public async readConfigFromCwd() {
const npmextraInstance = new plugins.npmextra.Npmextra(paths.cwd);
this.data = npmextraInstance.dataFor<IGitzoneConfigData>('gitzone', {});
this.data.npmciOptions = npmextraInstance.dataFor<IGitzoneConfigData['npmciOptions']>('npmci', {
this.data.npmciOptions = npmextraInstance.dataFor<
IGitzoneConfigData['npmciOptions']
>('npmci', {
npmAccessLevel: 'public',
});
}

View File

@@ -89,7 +89,7 @@ export let run = async () => {
detailed: argvArg.detailed,
interactive: argvArg.interactive !== false,
parallel: argvArg.parallel !== false,
verbose: argvArg.verbose
verbose: argvArg.verbose,
});
});
@@ -131,6 +131,14 @@ export let run = async () => {
modHelpers.run(argvArg);
});
/**
* manage development services (MongoDB, S3/MinIO)
*/
gitzoneSmartcli.addCommand('services').subscribe(async (argvArg) => {
const modServices = await import('./mod_services/index.js');
await modServices.run(argvArg);
});
// start parsing of the cli
gitzoneSmartcli.startParse();
return await done.promise;

View File

@@ -5,7 +5,8 @@ import * as plugins from './plugins.js';
export const logger = plugins.smartlog.Smartlog.createForCommitinfo(commitinfo);
// Add console destination
const consoleDestination = new plugins.smartlogDestinationLocal.DestinationLocal();
const consoleDestination =
new plugins.smartlogDestinationLocal.DestinationLocal();
logger.addLogDestination(consoleDestination);
// Verbose logging helper

View File

@@ -10,20 +10,22 @@ export const run = async (argvArg: any) => {
await formatMod.run();
}
logger.log('info', `gathering facts...`);
const aidoc = new plugins.tsdoc.AiDoc();
await aidoc.start();
const nextCommitObject = await aidoc.buildNextCommitObject(paths.cwd);
logger.log('info', `---------
logger.log(
'info',
`---------
Next recommended commit would be:
===========
-> ${nextCommitObject.recommendedNextVersion}:
-> ${nextCommitObject.recommendedNextVersionLevel}(${nextCommitObject.recommendedNextVersionScope}): ${nextCommitObject.recommendedNextVersionMessage}
===========
`);
`,
);
const commitInteract = new plugins.smartinteract.SmartInteract();
commitInteract.addQuestions([
{
@@ -72,32 +74,55 @@ export const run = async (argvArg: any) => {
});
logger.log('info', `Baking commitinfo into code ...`);
const commitInfo = new plugins.commitinfo.CommitInfo(paths.cwd, commitVersionType);
const commitInfo = new plugins.commitinfo.CommitInfo(
paths.cwd,
commitVersionType,
);
await commitInfo.writeIntoPotentialDirs();
logger.log('info', `Writing changelog.md ...`);
let changelog = nextCommitObject.changelog;
changelog = changelog.replaceAll('{{nextVersion}}', (await commitInfo.getNextPlannedVersion()).versionString);
changelog = changelog.replaceAll('{{nextVersionScope}}', `${await answerBucket.getAnswerFor('commitType')}(${await answerBucket.getAnswerFor('commitScope')})`);
changelog = changelog.replaceAll('{{nextVersionMessage}}', nextCommitObject.recommendedNextVersionMessage);
changelog = changelog.replaceAll(
'{{nextVersion}}',
(await commitInfo.getNextPlannedVersion()).versionString,
);
changelog = changelog.replaceAll(
'{{nextVersionScope}}',
`${await answerBucket.getAnswerFor('commitType')}(${await answerBucket.getAnswerFor('commitScope')})`,
);
changelog = changelog.replaceAll(
'{{nextVersionMessage}}',
nextCommitObject.recommendedNextVersionMessage,
);
if (nextCommitObject.recommendedNextVersionDetails?.length > 0) {
changelog = changelog.replaceAll('{{nextVersionDetails}}', '- ' + nextCommitObject.recommendedNextVersionDetails.join('\n- '));
changelog = changelog.replaceAll(
'{{nextVersionDetails}}',
'- ' + nextCommitObject.recommendedNextVersionDetails.join('\n- '),
);
} else {
changelog = changelog.replaceAll('\n{{nextVersionDetails}}', '');
}
await plugins.smartfile.memory.toFs(changelog, plugins.path.join(paths.cwd, `changelog.md`));
await plugins.smartfile.memory.toFs(
changelog,
plugins.path.join(paths.cwd, `changelog.md`),
);
logger.log('info', `Staging files for commit:`);
await smartshellInstance.exec(`git add -A`);
await smartshellInstance.exec(`git commit -m "${commitString}"`);
await smartshellInstance.exec(`npm version ${commitVersionType}`);
if (answerBucket.getAnswerFor('pushToOrigin') && !(process.env.CI === 'true')) {
if (
answerBucket.getAnswerFor('pushToOrigin') &&
!(process.env.CI === 'true')
) {
await smartshellInstance.exec(`git push origin master --follow-tags`);
}
};
const createCommitStringFromAnswerBucket = (answerBucket: plugins.smartinteract.AnswerBucket) => {
const createCommitStringFromAnswerBucket = (
answerBucket: plugins.smartinteract.AnswerBucket,
) => {
const commitType = answerBucket.getAnswerFor('commitType');
const commitScope = answerBucket.getAnswerFor('commitScope');
const commitDescription = answerBucket.getAnswerFor('commitDescription');

View File

@@ -36,7 +36,10 @@ export const run = async () => {
const registryUrls = answerBucket.getAnswerFor(`registryUrls`).split(',');
const oldPackageName = answerBucket.getAnswerFor(`oldPackageName`);
const newPackageName = answerBucket.getAnswerFor(`newPackageName`);
logger.log('info', `Deprecating package ${oldPackageName} in favour of ${newPackageName}`);
logger.log(
'info',
`Deprecating package ${oldPackageName} in favour of ${newPackageName}`,
);
const smartshellInstance = new plugins.smartshell.Smartshell({
executor: 'bash',
});

View File

@@ -2,18 +2,15 @@ import * as plugins from './mod.plugins.js';
import { FormatContext } from './classes.formatcontext.js';
import type { IPlannedChange } from './interfaces.format.js';
import { Project } from '../classes.project.js';
import { ChangeCache } from './classes.changecache.js';
export abstract class BaseFormatter {
protected context: FormatContext;
protected project: Project;
protected cache: ChangeCache;
protected stats: any; // Will be FormatStats from context
constructor(context: FormatContext, project: Project) {
this.context = context;
this.project = project;
this.cache = context.getChangeCache();
this.stats = context.getFormatStats();
}
@@ -40,7 +37,7 @@ export abstract class BaseFormatter {
await this.postExecute();
} catch (error) {
await this.context.rollbackOperation();
// Don't rollback here - let the FormatPlanner handle it
throw error;
} finally {
this.stats.endModule(this.name, startTime);
@@ -56,38 +53,30 @@ export abstract class BaseFormatter {
}
protected async modifyFile(filepath: string, content: string): Promise<void> {
await this.context.trackFileChange(filepath);
await plugins.smartfile.memory.toFs(content, filepath);
await this.cache.updateFileCache(filepath);
// Validate filepath before writing
if (!filepath || filepath.trim() === '') {
throw new Error(`Invalid empty filepath in modifyFile`);
}
// Ensure we have a proper path with directory component
// If the path has no directory component (e.g., "package.json"), prepend "./"
let normalizedPath = filepath;
if (!plugins.path.parse(filepath).dir) {
normalizedPath = './' + filepath;
}
await plugins.smartfile.memory.toFs(content, normalizedPath);
}
protected async createFile(filepath: string, content: string): Promise<void> {
await plugins.smartfile.memory.toFs(content, filepath);
await this.cache.updateFileCache(filepath);
}
protected async deleteFile(filepath: string): Promise<void> {
await this.context.trackFileChange(filepath);
await plugins.smartfile.fs.remove(filepath);
}
protected async shouldProcessFile(filepath: string): Promise<boolean> {
const config = new plugins.npmextra.Npmextra();
const useCache = config.dataFor('gitzone.format.cache.enabled', true);
if (!useCache) {
return true; // Process all files if cache is disabled
}
const hasChanged = await this.cache.hasFileChanged(filepath);
// Record cache statistics
if (hasChanged) {
this.stats.recordCacheMiss();
} else {
this.stats.recordCacheHit();
}
return hasChanged;
return true;
}
}

View File

@@ -29,21 +29,54 @@ export class ChangeCache {
}
async getManifest(): Promise<ICacheManifest> {
const defaultManifest: ICacheManifest = {
version: this.cacheVersion,
lastFormat: 0,
files: [],
};
const exists = await plugins.smartfile.fs.fileExists(this.manifestPath);
if (!exists) {
return {
version: this.cacheVersion,
lastFormat: 0,
files: []
};
return defaultManifest;
}
const content = await plugins.smartfile.fs.toStringSync(this.manifestPath);
return JSON.parse(content);
try {
const content = plugins.smartfile.fs.toStringSync(this.manifestPath);
const manifest = JSON.parse(content);
// Validate the manifest structure
if (this.isValidManifest(manifest)) {
return manifest;
} else {
console.warn('Invalid manifest structure, returning default manifest');
return defaultManifest;
}
} catch (error) {
console.warn(
`Failed to read cache manifest: ${error.message}, returning default manifest`,
);
// Try to delete the corrupted file
try {
await plugins.smartfile.fs.remove(this.manifestPath);
} catch (removeError) {
// Ignore removal errors
}
return defaultManifest;
}
}
async saveManifest(manifest: ICacheManifest): Promise<void> {
await plugins.smartfile.memory.toFs(JSON.stringify(manifest, null, 2), this.manifestPath);
// Validate before saving
if (!this.isValidManifest(manifest)) {
throw new Error('Invalid manifest structure, cannot save');
}
// Ensure directory exists
await plugins.smartfile.fs.ensureDir(this.cacheDir);
// Write directly with proper JSON stringification
const jsonContent = JSON.stringify(manifest, null, 2);
await plugins.smartfile.memory.toFs(jsonContent, this.manifestPath);
}
async hasFileChanged(filePath: string): Promise<boolean> {
@@ -59,21 +92,29 @@ export class ChangeCache {
// Get current file stats
const stats = await plugins.smartfile.fs.stat(absolutePath);
const content = await plugins.smartfile.fs.toStringSync(absolutePath);
// Skip directories
if (stats.isDirectory()) {
return false; // Directories are not processed
}
const content = plugins.smartfile.fs.toStringSync(absolutePath);
const currentChecksum = this.calculateChecksum(content);
// Get cached info
const manifest = await this.getManifest();
const cachedFile = manifest.files.find(f => f.path === filePath);
const cachedFile = manifest.files.find((f) => f.path === filePath);
if (!cachedFile) {
return true; // Not in cache, so it's changed
}
// Compare checksums
return cachedFile.checksum !== currentChecksum ||
cachedFile.size !== stats.size ||
cachedFile.modified !== stats.mtimeMs;
return (
cachedFile.checksum !== currentChecksum ||
cachedFile.size !== stats.size ||
cachedFile.modified !== stats.mtimeMs
);
}
async updateFileCache(filePath: string): Promise<void> {
@@ -83,18 +124,24 @@ export class ChangeCache {
// Get current file stats
const stats = await plugins.smartfile.fs.stat(absolutePath);
const content = await plugins.smartfile.fs.toStringSync(absolutePath);
// Skip directories
if (stats.isDirectory()) {
return; // Don't cache directories
}
const content = plugins.smartfile.fs.toStringSync(absolutePath);
const checksum = this.calculateChecksum(content);
// Update manifest
const manifest = await this.getManifest();
const existingIndex = manifest.files.findIndex(f => f.path === filePath);
const existingIndex = manifest.files.findIndex((f) => f.path === filePath);
const cacheEntry: IFileCache = {
path: filePath,
checksum,
modified: stats.mtimeMs,
size: stats.size
size: stats.size,
};
if (existingIndex !== -1) {
@@ -141,4 +188,36 @@ export class ChangeCache {
private calculateChecksum(content: string | Buffer): string {
return plugins.crypto.createHash('sha256').update(content).digest('hex');
}
private isValidManifest(manifest: any): manifest is ICacheManifest {
// Check if manifest has the required structure
if (!manifest || typeof manifest !== 'object') {
return false;
}
// Check required fields
if (
typeof manifest.version !== 'string' ||
typeof manifest.lastFormat !== 'number' ||
!Array.isArray(manifest.files)
) {
return false;
}
// Check each file entry
for (const file of manifest.files) {
if (
!file ||
typeof file !== 'object' ||
typeof file.path !== 'string' ||
typeof file.checksum !== 'string' ||
typeof file.modified !== 'number' ||
typeof file.size !== 'number'
) {
return false;
}
}
return true;
}
}

View File

@@ -17,16 +17,23 @@ export class DependencyAnalyzer {
private initializeDependencies(): void {
// Define dependencies between format modules
const dependencies = {
'cleanup': [], // No dependencies
'npmextra': [], // No dependencies
'license': ['npmextra'], // Depends on npmextra for config
'packagejson': ['npmextra'], // Depends on npmextra for config
'templates': ['npmextra', 'packagejson'], // Depends on both
'gitignore': ['templates'], // Depends on templates
'tsconfig': ['packagejson'], // Depends on package.json
'prettier': ['cleanup', 'npmextra', 'packagejson', 'templates', 'gitignore', 'tsconfig'], // Runs after most others
'readme': ['npmextra', 'packagejson'], // Depends on project metadata
'copy': ['npmextra'], // Depends on config
cleanup: [], // No dependencies
npmextra: [], // No dependencies
license: ['npmextra'], // Depends on npmextra for config
packagejson: ['npmextra'], // Depends on npmextra for config
templates: ['npmextra', 'packagejson'], // Depends on both
gitignore: ['templates'], // Depends on templates
tsconfig: ['packagejson'], // Depends on package.json
prettier: [
'cleanup',
'npmextra',
'packagejson',
'templates',
'gitignore',
'tsconfig',
], // Runs after most others
readme: ['npmextra', 'packagejson'], // Depends on project metadata
copy: ['npmextra'], // Depends on config
};
// Initialize all modules
@@ -34,7 +41,7 @@ export class DependencyAnalyzer {
this.moduleDependencies.set(module, {
module,
dependencies: new Set(deps),
dependents: new Set()
dependents: new Set(),
});
}
@@ -50,7 +57,7 @@ export class DependencyAnalyzer {
}
getExecutionGroups(modules: BaseFormatter[]): BaseFormatter[][] {
const modulesMap = new Map(modules.map(m => [m.name, m]));
const modulesMap = new Map(modules.map((m) => [m.name, m]));
const executed = new Set<string>();
const groups: BaseFormatter[][] = [];
@@ -68,8 +75,9 @@ export class DependencyAnalyzer {
}
// Check if all dependencies have been executed
const allDepsExecuted = Array.from(dependency.dependencies)
.every(dep => executed.has(dep) || !modulesMap.has(dep));
const allDepsExecuted = Array.from(dependency.dependencies).every(
(dep) => executed.has(dep) || !modulesMap.has(dep),
);
if (allDepsExecuted) {
currentGroup.push(module);
@@ -85,7 +93,7 @@ export class DependencyAnalyzer {
}
}
currentGroup.forEach(m => executed.add(m.name));
currentGroup.forEach((m) => executed.add(m.name));
groups.push(currentGroup);
}
@@ -99,9 +107,11 @@ export class DependencyAnalyzer {
if (!dep1 || !dep2) return false;
// Check if module1 depends on module2 or vice versa
return !dep1.dependencies.has(module2) &&
!dep2.dependencies.has(module1) &&
!dep1.dependents.has(module2) &&
!dep2.dependents.has(module1);
return (
!dep1.dependencies.has(module2) &&
!dep2.dependencies.has(module1) &&
!dep1.dependents.has(module2) &&
!dep2.dependents.has(module1)
);
}
}

View File

@@ -5,7 +5,11 @@ import { logger } from '../gitzone.logging.js';
export class DiffReporter {
private diffs: Map<string, string> = new Map();
async generateDiff(filePath: string, oldContent: string, newContent: string): Promise<string> {
async generateDiff(
filePath: string,
oldContent: string,
newContent: string,
): Promise<string> {
const diff = plugins.smartdiff.createDiff(oldContent, newContent);
this.diffs.set(filePath, diff);
return diff;
@@ -22,16 +26,25 @@ export class DiffReporter {
return null;
}
const currentContent = await plugins.smartfile.fs.toStringSync(change.path);
const currentContent = await plugins.smartfile.fs.toStringSync(
change.path,
);
// For planned changes, we need the new content
if (!change.content) {
return null;
}
return await this.generateDiff(change.path, currentContent, change.content);
return await this.generateDiff(
change.path,
currentContent,
change.content,
);
} catch (error) {
logger.log('error', `Failed to generate diff for ${change.path}: ${error.message}`);
logger.log(
'error',
`Failed to generate diff for ${change.path}: ${error.message}`,
);
return null;
}
}
@@ -69,7 +82,7 @@ export class DiffReporter {
private colorDiff(diff: string): string {
const lines = diff.split('\n');
const coloredLines = lines.map(line => {
const coloredLines = lines.map((line) => {
if (line.startsWith('+') && !line.startsWith('+++')) {
return `\x1b[32m${line}\x1b[0m`; // Green for additions
} else if (line.startsWith('-') && !line.startsWith('---')) {
@@ -90,11 +103,14 @@ export class DiffReporter {
totalFiles: this.diffs.size,
diffs: Array.from(this.diffs.entries()).map(([path, diff]) => ({
path,
diff
}))
diff,
})),
};
await plugins.smartfile.memory.toFs(JSON.stringify(report, null, 2), outputPath);
await plugins.smartfile.memory.toFs(
JSON.stringify(report, null, 2),
outputPath,
);
logger.log('info', `Diff report saved to ${outputPath}`);
}

View File

@@ -1,64 +1,13 @@
import * as plugins from './mod.plugins.js';
import { RollbackManager } from './classes.rollbackmanager.js';
import { ChangeCache } from './classes.changecache.js';
import { FormatStats } from './classes.formatstats.js';
import type { IFormatOperation, IFormatPlan } from './interfaces.format.js';
export class FormatContext {
private rollbackManager: RollbackManager;
private currentOperation: IFormatOperation | null = null;
private changeCache: ChangeCache;
private formatStats: FormatStats;
constructor() {
this.rollbackManager = new RollbackManager();
this.changeCache = new ChangeCache();
this.formatStats = new FormatStats();
}
async beginOperation(): Promise<void> {
this.currentOperation = await this.rollbackManager.createOperation();
}
async trackFileChange(filepath: string): Promise<void> {
if (!this.currentOperation) {
throw new Error('No operation in progress. Call beginOperation() first.');
}
await this.rollbackManager.backupFile(filepath, this.currentOperation.id);
}
async commitOperation(): Promise<void> {
if (!this.currentOperation) {
throw new Error('No operation in progress. Call beginOperation() first.');
}
await this.rollbackManager.markComplete(this.currentOperation.id);
this.currentOperation = null;
}
async rollbackOperation(): Promise<void> {
if (!this.currentOperation) {
throw new Error('No operation in progress. Call beginOperation() first.');
}
await this.rollbackManager.rollback(this.currentOperation.id);
this.currentOperation = null;
}
async rollbackTo(operationId: string): Promise<void> {
await this.rollbackManager.rollback(operationId);
}
getRollbackManager(): RollbackManager {
return this.rollbackManager;
}
getChangeCache(): ChangeCache {
return this.changeCache;
}
async initializeCache(): Promise<void> {
await this.changeCache.initialize();
}
getFormatStats(): FormatStats {
return this.formatStats;
}

View File

@@ -18,10 +18,10 @@ export class FormatPlanner {
filesAdded: 0,
filesModified: 0,
filesRemoved: 0,
estimatedTime: 0
estimatedTime: 0,
},
changes: [],
warnings: []
warnings: [],
};
for (const module of modules) {
@@ -49,67 +49,51 @@ export class FormatPlanner {
plan.warnings.push({
level: 'error',
message: `Failed to analyze module ${module.name}: ${error.message}`,
module: module.name
module: module.name,
});
}
}
plan.summary.totalFiles = plan.summary.filesAdded + plan.summary.filesModified + plan.summary.filesRemoved;
plan.summary.totalFiles =
plan.summary.filesAdded +
plan.summary.filesModified +
plan.summary.filesRemoved;
plan.summary.estimatedTime = plan.summary.totalFiles * 100; // 100ms per file estimate
return plan;
}
async executePlan(plan: IFormatPlan, modules: BaseFormatter[], context: FormatContext, parallel: boolean = true): Promise<void> {
await context.beginOperation();
async executePlan(
plan: IFormatPlan,
modules: BaseFormatter[],
context: FormatContext,
parallel: boolean = false,
): Promise<void> {
const startTime = Date.now();
try {
if (parallel) {
// Get execution groups based on dependencies
const executionGroups = this.dependencyAnalyzer.getExecutionGroups(modules);
// Always use sequential execution to avoid race conditions
for (const module of modules) {
const changes = this.plannedChanges.get(module.name) || [];
logger.log('info', `Executing formatters in ${executionGroups.length} groups...`);
for (let i = 0; i < executionGroups.length; i++) {
const group = executionGroups[i];
logger.log('info', `Executing group ${i + 1}: ${group.map(m => m.name).join(', ')}`);
// Execute modules in this group in parallel
const promises = group.map(async (module) => {
const changes = this.plannedChanges.get(module.name) || [];
if (changes.length > 0) {
logger.log('info', `Executing ${module.name} formatter...`);
await module.execute(changes);
}
});
await Promise.all(promises);
}
} else {
// Sequential execution (original implementation)
for (const module of modules) {
const changes = this.plannedChanges.get(module.name) || [];
if (changes.length > 0) {
logger.log('info', `Executing ${module.name} formatter...`);
await module.execute(changes);
}
if (changes.length > 0) {
logger.log('info', `Executing ${module.name} formatter...`);
await module.execute(changes);
}
}
const endTime = Date.now();
const duration = endTime - startTime;
logger.log('info', `Format operations completed in ${duration}ms`);
await context.commitOperation();
} catch (error) {
await context.rollbackOperation();
throw error;
}
}
async displayPlan(plan: IFormatPlan, detailed: boolean = false): Promise<void> {
async displayPlan(
plan: IFormatPlan,
detailed: boolean = false,
): Promise<void> {
console.log('\nFormat Plan:');
console.log('━'.repeat(50));
console.log(`Summary: ${plan.summary.totalFiles} files will be changed`);
@@ -128,7 +112,9 @@ export class FormatPlanner {
}
for (const [module, changes] of changesByModule) {
console.log(`\n${this.getModuleIcon(module)} ${module} (${changes.length} ${changes.length === 1 ? 'file' : 'files'})`);
console.log(
`\n${this.getModuleIcon(module)} ${module} (${changes.length} ${changes.length === 1 ? 'file' : 'files'})`,
);
for (const change of changes) {
const icon = this.getChangeIcon(change.type);
@@ -157,16 +143,16 @@ export class FormatPlanner {
private getModuleIcon(module: string): string {
const icons: Record<string, string> = {
'packagejson': '📦',
'license': '📝',
'tsconfig': '🔧',
'cleanup': '🚮',
'gitignore': '🔒',
'prettier': '✨',
'readme': '📖',
'templates': '📄',
'npmextra': '⚙️',
'copy': '📋'
packagejson: '📦',
license: '📝',
tsconfig: '🔧',
cleanup: '🚮',
gitignore: '🔒',
prettier: '✨',
readme: '📖',
templates: '📄',
npmextra: '⚙️',
copy: '📋',
};
return icons[module] || '📁';
}

View File

@@ -44,8 +44,8 @@ export class FormatStats {
totalDeleted: 0,
totalErrors: 0,
cacheHits: 0,
cacheMisses: 0
}
cacheMisses: 0,
},
};
}
@@ -58,7 +58,7 @@ export class FormatStats {
successes: 0,
filesCreated: 0,
filesModified: 0,
filesDeleted: 0
filesDeleted: 0,
});
}
@@ -73,7 +73,11 @@ export class FormatStats {
}
}
recordFileOperation(moduleName: string, operation: 'create' | 'modify' | 'delete', success: boolean = true): void {
recordFileOperation(
moduleName: string,
operation: 'create' | 'modify' | 'delete',
success: boolean = true,
): void {
const moduleStats = this.stats.moduleStats.get(moduleName);
if (!moduleStats) return;
@@ -122,16 +126,24 @@ export class FormatStats {
// Overall stats
console.log('\nOverall Summary:');
console.log(` Total Execution Time: ${this.formatDuration(this.stats.totalExecutionTime)}`);
console.log(
` Total Execution Time: ${this.formatDuration(this.stats.totalExecutionTime)}`,
);
console.log(` Files Processed: ${this.stats.overallStats.totalFiles}`);
console.log(` • Created: ${this.stats.overallStats.totalCreated}`);
console.log(` • Modified: ${this.stats.overallStats.totalModified}`);
console.log(` • Deleted: ${this.stats.overallStats.totalDeleted}`);
console.log(` Errors: ${this.stats.overallStats.totalErrors}`);
if (this.stats.overallStats.cacheHits > 0 || this.stats.overallStats.cacheMisses > 0) {
const cacheHitRate = this.stats.overallStats.cacheHits /
(this.stats.overallStats.cacheHits + this.stats.overallStats.cacheMisses) * 100;
if (
this.stats.overallStats.cacheHits > 0 ||
this.stats.overallStats.cacheMisses > 0
) {
const cacheHitRate =
(this.stats.overallStats.cacheHits /
(this.stats.overallStats.cacheHits +
this.stats.overallStats.cacheMisses)) *
100;
console.log(` Cache Hit Rate: ${cacheHitRate.toFixed(1)}%`);
console.log(` • Hits: ${this.stats.overallStats.cacheHits}`);
console.log(` • Misses: ${this.stats.overallStats.cacheMisses}`);
@@ -141,12 +153,17 @@ export class FormatStats {
console.log('\nModule Breakdown:');
console.log('─'.repeat(50));
const sortedModules = Array.from(this.stats.moduleStats.values())
.sort((a, b) => b.filesProcessed - a.filesProcessed);
const sortedModules = Array.from(this.stats.moduleStats.values()).sort(
(a, b) => b.filesProcessed - a.filesProcessed,
);
for (const moduleStats of sortedModules) {
console.log(`\n${this.getModuleIcon(moduleStats.name)} ${moduleStats.name}:`);
console.log(` Execution Time: ${this.formatDuration(moduleStats.executionTime)}`);
console.log(
`\n${this.getModuleIcon(moduleStats.name)} ${moduleStats.name}:`,
);
console.log(
` Execution Time: ${this.formatDuration(moduleStats.executionTime)}`,
);
console.log(` Files Processed: ${moduleStats.filesProcessed}`);
if (moduleStats.filesCreated > 0) {
@@ -172,10 +189,13 @@ export class FormatStats {
timestamp: new Date().toISOString(),
executionTime: this.stats.totalExecutionTime,
overallStats: this.stats.overallStats,
moduleStats: Array.from(this.stats.moduleStats.values())
moduleStats: Array.from(this.stats.moduleStats.values()),
};
await plugins.smartfile.memory.toFs(JSON.stringify(report, null, 2), outputPath);
await plugins.smartfile.memory.toFs(
JSON.stringify(report, null, 2),
outputPath,
);
logger.log('info', `Statistics report saved to ${outputPath}`);
}
@@ -193,16 +213,16 @@ export class FormatStats {
private getModuleIcon(module: string): string {
const icons: Record<string, string> = {
'packagejson': '📦',
'license': '📝',
'tsconfig': '🔧',
'cleanup': '🚮',
'gitignore': '🔒',
'prettier': '✨',
'readme': '📖',
'templates': '📄',
'npmextra': '⚙️',
'copy': '📋'
packagejson: '📦',
license: '📝',
tsconfig: '🔧',
cleanup: '🚮',
gitignore: '🔒',
prettier: '✨',
readme: '📖',
templates: '📄',
npmextra: '⚙️',
copy: '📋',
};
return icons[module] || '📁';
}

View File

@@ -18,7 +18,7 @@ export class RollbackManager {
id: this.generateOperationId(),
timestamp: Date.now(),
files: [],
status: 'pending'
status: 'pending',
};
await this.updateManifest(operation);
@@ -43,7 +43,7 @@ export class RollbackManager {
}
// Read file content and metadata
const content = await plugins.smartfile.fs.toStringSync(absolutePath);
const content = plugins.smartfile.fs.toStringSync(absolutePath);
const stats = await plugins.smartfile.fs.stat(absolutePath);
const checksum = this.calculateChecksum(content);
@@ -57,7 +57,7 @@ export class RollbackManager {
path: filepath,
originalContent: content,
checksum,
permissions: stats.mode.toString(8)
permissions: stats.mode.toString(8),
});
await this.updateManifest(operation);
@@ -66,7 +66,9 @@ export class RollbackManager {
async rollback(operationId: string): Promise<void> {
const operation = await this.getOperation(operationId);
if (!operation) {
throw new Error(`Operation ${operationId} not found`);
// Operation doesn't exist, might have already been rolled back or never created
console.warn(`Operation ${operationId} not found for rollback, skipping`);
return;
}
if (operation.status === 'rolled-back') {
@@ -82,7 +84,7 @@ export class RollbackManager {
// Verify backup integrity
const backupPath = this.getBackupPath(operationId, file.path);
const backupContent = await plugins.smartfile.fs.toStringSync(backupPath);
const backupContent = plugins.smartfile.fs.toStringSync(backupPath);
const backupChecksum = this.calculateChecksum(backupContent);
if (backupChecksum !== file.checksum) {
@@ -114,19 +116,25 @@ export class RollbackManager {
async cleanOldBackups(retentionDays: number): Promise<void> {
const manifest = await this.getManifest();
const cutoffTime = Date.now() - (retentionDays * 24 * 60 * 60 * 1000);
const cutoffTime = Date.now() - retentionDays * 24 * 60 * 60 * 1000;
const operationsToDelete = manifest.operations.filter(op =>
op.timestamp < cutoffTime && op.status === 'completed'
const operationsToDelete = manifest.operations.filter(
(op) => op.timestamp < cutoffTime && op.status === 'completed',
);
for (const operation of operationsToDelete) {
// Remove backup files
const operationDir = plugins.path.join(this.backupDir, 'operations', operation.id);
const operationDir = plugins.path.join(
this.backupDir,
'operations',
operation.id,
);
await plugins.smartfile.fs.remove(operationDir);
// Remove from manifest
manifest.operations = manifest.operations.filter(op => op.id !== operation.id);
manifest.operations = manifest.operations.filter(
(op) => op.id !== operation.id,
);
}
await this.saveManifest(manifest);
@@ -146,7 +154,7 @@ export class RollbackManager {
return false;
}
const content = await plugins.smartfile.fs.toStringSync(backupPath);
const content = plugins.smartfile.fs.toStringSync(backupPath);
const checksum = this.calculateChecksum(content);
if (checksum !== file.checksum) {
@@ -164,7 +172,9 @@ export class RollbackManager {
private async ensureBackupDir(): Promise<void> {
await plugins.smartfile.fs.ensureDir(this.backupDir);
await plugins.smartfile.fs.ensureDir(plugins.path.join(this.backupDir, 'operations'));
await plugins.smartfile.fs.ensureDir(
plugins.path.join(this.backupDir, 'operations'),
);
}
private generateOperationId(): string {
@@ -177,7 +187,14 @@ export class RollbackManager {
const filename = plugins.path.basename(filepath);
const dir = plugins.path.dirname(filepath);
const safeDir = dir.replace(/[/\\]/g, '__');
return plugins.path.join(this.backupDir, 'operations', operationId, 'files', safeDir, `${filename}.backup`);
return plugins.path.join(
this.backupDir,
'operations',
operationId,
'files',
safeDir,
`${filename}.backup`,
);
}
private calculateChecksum(content: string | Buffer): string {
@@ -185,27 +202,68 @@ export class RollbackManager {
}
private async getManifest(): Promise<{ operations: IFormatOperation[] }> {
const defaultManifest = { operations: [] };
const exists = await plugins.smartfile.fs.fileExists(this.manifestPath);
if (!exists) {
return { operations: [] };
return defaultManifest;
}
const content = await plugins.smartfile.fs.toStringSync(this.manifestPath);
return JSON.parse(content);
try {
const content = plugins.smartfile.fs.toStringSync(this.manifestPath);
const manifest = JSON.parse(content);
// Validate the manifest structure
if (this.isValidManifest(manifest)) {
return manifest;
} else {
console.warn(
'Invalid rollback manifest structure, returning default manifest',
);
return defaultManifest;
}
} catch (error) {
console.warn(
`Failed to read rollback manifest: ${error.message}, returning default manifest`,
);
// Try to delete the corrupted file
try {
await plugins.smartfile.fs.remove(this.manifestPath);
} catch (removeError) {
// Ignore removal errors
}
return defaultManifest;
}
}
private async saveManifest(manifest: { operations: IFormatOperation[] }): Promise<void> {
await plugins.smartfile.memory.toFs(JSON.stringify(manifest, null, 2), this.manifestPath);
private async saveManifest(manifest: {
operations: IFormatOperation[];
}): Promise<void> {
// Validate before saving
if (!this.isValidManifest(manifest)) {
throw new Error('Invalid rollback manifest structure, cannot save');
}
// Ensure directory exists
await this.ensureBackupDir();
// Write directly with proper JSON stringification
const jsonContent = JSON.stringify(manifest, null, 2);
await plugins.smartfile.memory.toFs(jsonContent, this.manifestPath);
}
private async getOperation(operationId: string): Promise<IFormatOperation | null> {
private async getOperation(
operationId: string,
): Promise<IFormatOperation | null> {
const manifest = await this.getManifest();
return manifest.operations.find(op => op.id === operationId) || null;
return manifest.operations.find((op) => op.id === operationId) || null;
}
private async updateManifest(operation: IFormatOperation): Promise<void> {
const manifest = await this.getManifest();
const existingIndex = manifest.operations.findIndex(op => op.id === operation.id);
const existingIndex = manifest.operations.findIndex(
(op) => op.id === operation.id,
);
if (existingIndex !== -1) {
manifest.operations[existingIndex] = operation;
@@ -215,4 +273,46 @@ export class RollbackManager {
await this.saveManifest(manifest);
}
private isValidManifest(
manifest: any,
): manifest is { operations: IFormatOperation[] } {
// Check if manifest has the required structure
if (!manifest || typeof manifest !== 'object') {
return false;
}
// Check required fields
if (!Array.isArray(manifest.operations)) {
return false;
}
// Check each operation entry
for (const operation of manifest.operations) {
if (
!operation ||
typeof operation !== 'object' ||
typeof operation.id !== 'string' ||
typeof operation.timestamp !== 'number' ||
typeof operation.status !== 'string' ||
!Array.isArray(operation.files)
) {
return false;
}
// Check each file in the operation
for (const file of operation.files) {
if (
!file ||
typeof file !== 'object' ||
typeof file.path !== 'string' ||
typeof file.checksum !== 'string'
) {
return false;
}
}
}
return true;
}
}

View File

@@ -4,14 +4,21 @@ import * as paths from '../paths.js';
import { logger } from '../gitzone.logging.js';
import { Project } from '../classes.project.js';
const filesToDelete = ['defaults.yml', 'yarn.lock', 'package-lock.json', 'tslint.json'];
const filesToDelete = [
'defaults.yml',
'yarn.lock',
'package-lock.json',
'tslint.json',
];
export const run = async (projectArg: Project) => {
for (const relativeFilePath of filesToDelete) {
const fileExists = plugins.smartfile.fs.fileExistsSync(relativeFilePath);
if (fileExists) {
logger.log('info', `Found ${relativeFilePath}! Removing it!`);
plugins.smartfile.fs.removeSync(plugins.path.join(paths.cwd, relativeFilePath));
plugins.smartfile.fs.removeSync(
plugins.path.join(paths.cwd, relativeFilePath),
);
} else {
logger.log('info', `Project is free of ${relativeFilePath}`);
}

View File

@@ -8,7 +8,7 @@ export const run = async (projectArg: Project) => {
// Get copy configuration from npmextra.json
const npmextraConfig = new plugins.npmextra.Npmextra();
const copyConfig = npmextraConfig.dataFor<any>('gitzone.format.copy', {
patterns: []
patterns: [],
});
if (!copyConfig.patterns || copyConfig.patterns.length === 0) {
@@ -40,7 +40,7 @@ export const run = async (projectArg: Project) => {
if (pattern.preservePath) {
const relativePath = plugins.path.relative(
plugins.path.dirname(pattern.from.replace(/\*/g, '')),
file
file,
);
destPath = plugins.path.join(pattern.to, relativePath);
}
@@ -53,7 +53,10 @@ export const run = async (projectArg: Project) => {
logger.log('info', `Copied ${sourcePath} to ${destPath}`);
}
} catch (error) {
logger.log('error', `Failed to copy pattern ${pattern.from}: ${error.message}`);
logger.log(
'error',
`Failed to copy pattern ${pattern.from}: ${error.message}`,
);
}
}
};

View File

@@ -8,14 +8,40 @@ const gitignorePath = plugins.path.join(paths.cwd, './.gitignore');
export const run = async (projectArg: Project) => {
const gitignoreExists = await plugins.smartfile.fs.fileExists(gitignorePath);
const templateModule = await import('../mod_template/index.js');
const ciTemplate = await templateModule.getTemplate('gitignore');
let customContent = '';
if (gitignoreExists) {
// lets get the existing gitignore file
const existingGitIgnoreString = plugins.smartfile.fs.toStringSync(gitignorePath);
let customPart = existingGitIgnoreString.split('# custom\n')[1];
customPart ? null : (customPart = '');
const existingGitIgnoreString =
plugins.smartfile.fs.toStringSync(gitignorePath);
// Check for different custom section markers
const customMarkers = ['#------# custom', '# custom'];
for (const marker of customMarkers) {
const splitResult = existingGitIgnoreString.split(marker);
if (splitResult.length > 1) {
// Get everything after the marker (excluding the marker itself)
customContent = splitResult[1].trim();
break;
}
}
}
// Write the template
const templateModule = await import('../mod_template/index.js');
const ciTemplate = await templateModule.getTemplate('gitignore');
await ciTemplate.writeToDisk(paths.cwd);
// Append the custom content if it exists
if (customContent) {
const newGitignoreContent =
plugins.smartfile.fs.toStringSync(gitignorePath);
// The template already ends with "#------# custom", so just append the content
const finalContent =
newGitignoreContent.trimEnd() + '\n' + customContent + '\n';
await plugins.smartfile.fs.toFs(finalContent, gitignorePath);
logger.log('info', 'Updated .gitignore while preserving custom section!');
} else {
logger.log('info', 'Added a .gitignore!');
}
ciTemplate.writeToDisk(paths.cwd);
logger.log('info', 'Added a .gitignore!');
};

View File

@@ -24,7 +24,9 @@ export const run = async (projectArg: Project) => {
} else {
logger.log('error', 'Error -> licenses failed. Here is why:');
for (const failedModule of licenseCheckResult.failingModules) {
console.log(`${failedModule.name} fails with license ${failedModule.license}`);
console.log(
`${failedModule.name} fails with license ${failedModule.license}`,
);
}
}
};

View File

@@ -29,7 +29,12 @@ export const run = async (projectArg: Project) => {
const interactInstance = new plugins.smartinteract.SmartInteract();
for (const expectedRepoInformationItem of expectedRepoInformation) {
if (!plugins.smartobject.smartGet(npmextraJson.gitzone, expectedRepoInformationItem)) {
if (
!plugins.smartobject.smartGet(
npmextraJson.gitzone,
expectedRepoInformationItem,
)
) {
interactInstance.addQuestions([
{
message: `What is the value of ${expectedRepoInformationItem}`,
@@ -43,7 +48,9 @@ export const run = async (projectArg: Project) => {
const answerbucket = await interactInstance.runQueue();
for (const expectedRepoInformationItem of expectedRepoInformation) {
const cliProvidedValue = answerbucket.getAnswerFor(expectedRepoInformationItem);
const cliProvidedValue = answerbucket.getAnswerFor(
expectedRepoInformationItem,
);
if (cliProvidedValue) {
plugins.smartobject.smartAdd(
npmextraJson.gitzone,

View File

@@ -43,7 +43,8 @@ const ensureDependency = async (
break;
case 'include':
if (!packageJsonObjectArg[section][packageName]) {
packageJsonObjectArg[section][packageName] = version === 'latest' ? '^1.0.0' : version;
packageJsonObjectArg[section][packageName] =
version === 'latest' ? '^1.0.0' : version;
}
break;
case 'latest':
@@ -54,9 +55,13 @@ const ensureDependency = async (
const latestVersion = packageInfo['dist-tags'].latest;
packageJsonObjectArg[section][packageName] = `^${latestVersion}`;
} catch (error) {
logger.log('warn', `Could not fetch latest version for ${packageName}, using existing or default`);
logger.log(
'warn',
`Could not fetch latest version for ${packageName}, using existing or default`,
);
if (!packageJsonObjectArg[section][packageName]) {
packageJsonObjectArg[section][packageName] = version === 'latest' ? '^1.0.0' : version;
packageJsonObjectArg[section][packageName] =
version === 'latest' ? '^1.0.0' : version;
}
}
break;
@@ -78,10 +83,10 @@ export const run = async (projectArg: Project) => {
type: 'git',
url: `https://${gitzoneData.module.githost}/${gitzoneData.module.gitscope}/${gitzoneData.module.gitrepo}.git`,
};
(packageJson.bugs = {
((packageJson.bugs = {
url: `https://${gitzoneData.module.githost}/${gitzoneData.module.gitscope}/${gitzoneData.module.gitrepo}/issues`,
}),
(packageJson.homepage = `https://${gitzoneData.module.githost}/${gitzoneData.module.gitscope}/${gitzoneData.module.gitrepo}#readme`);
(packageJson.homepage = `https://${gitzoneData.module.githost}/${gitzoneData.module.gitscope}/${gitzoneData.module.gitrepo}#readme`));
// Check for module type
if (!packageJson.type) {
@@ -91,9 +96,15 @@ export const run = async (projectArg: Project) => {
// Check for private or public
if (packageJson.private !== undefined) {
logger.log('info', 'Success -> found private/public info in package.json!');
logger.log(
'info',
'Success -> found private/public info in package.json!',
);
} else {
logger.log('error', 'found no private boolean! Setting it to private for now!');
logger.log(
'error',
'found no private boolean! Setting it to private for now!',
);
packageJson.private = true;
}
@@ -101,7 +112,10 @@ export const run = async (projectArg: Project) => {
if (packageJson.license) {
logger.log('info', 'Success -> found license in package.json!');
} else {
logger.log('error', 'found no license! Setting it to UNLICENSED for now!');
logger.log(
'error',
'found no license! Setting it to UNLICENSED for now!',
);
packageJson.license = 'UNLICENSED';
}
@@ -109,13 +123,19 @@ export const run = async (projectArg: Project) => {
if (packageJson.scripts.build) {
logger.log('info', 'Success -> found build script in package.json!');
} else {
logger.log('error', 'found no build script! Putting a placeholder there for now!');
logger.log(
'error',
'found no build script! Putting a placeholder there for now!',
);
packageJson.scripts.build = `echo "Not needed for now"`;
}
// Check for buildDocs script
if (!packageJson.scripts.buildDocs) {
logger.log('info', 'found no buildDocs script! Putting tsdoc script there now.');
logger.log(
'info',
'found no buildDocs script! Putting tsdoc script there now.',
);
packageJson.scripts.buildDocs = `tsdoc`;
}
@@ -134,9 +154,24 @@ export const run = async (projectArg: Project) => {
];
// check for dependencies
await ensureDependency(packageJson, 'devDep', 'latest', '@push.rocks/tapbundle');
await ensureDependency(packageJson, 'devDep', 'latest', '@git.zone/tstest');
await ensureDependency(packageJson, 'devDep', 'latest', '@git.zone/tsbuild');
await ensureDependency(
packageJson,
'devDep',
'latest',
'@push.rocks/tapbundle',
);
await ensureDependency(
packageJson,
'devDep',
'latest',
'@git.zone/tstest',
);
await ensureDependency(
packageJson,
'devDep',
'latest',
'@git.zone/tsbuild',
);
// set overrides
const overrides = plugins.smartfile.fs.toObjectSync(

View File

@@ -16,7 +16,12 @@ const prettierDefaultMarkdownConfig: prettier.Options = {
parser: 'markdown',
};
const filesToFormat = [`ts/**/*.ts`, `test/**/*.ts`, `readme.md`, `docs/**/*.md`];
const filesToFormat = [
`ts/**/*.ts`,
`test/**/*.ts`,
`readme.md`,
`docs/**/*.md`,
];
const choosePrettierConfig = (fileArg: plugins.smartfile.SmartFile) => {
switch (fileArg.parsedPath.ext) {
@@ -39,7 +44,10 @@ const prettierTypeScriptPipestop = plugins.through2.obj(
cb(null);
} else {
logger.log('info', `${fileArg.path} is being reformated!`);
const formatedFileString = await prettier.format(fileString, chosenConfig);
const formatedFileString = await prettier.format(
fileString,
chosenConfig,
);
fileArg.setContentsFromString(formatedFileString);
cb(null, fileArg);
}

View File

@@ -18,7 +18,8 @@ export const run = async () => {
}
// Check and initialize readme.hints.md if it doesn't exist
const readmeHintsExists = await plugins.smartfile.fs.fileExists(readmeHintsPath);
const readmeHintsExists =
await plugins.smartfile.fs.fileExists(readmeHintsPath);
if (!readmeHintsExists) {
await plugins.smartfile.fs.toFs(
'# Project Readme Hints\n\nThis is the initial readme hints file.',

View File

@@ -26,10 +26,12 @@ export const run = async (project: Project) => {
case 'npm':
case 'wcc':
if (project.gitzoneConfig.data.npmciOptions.npmAccessLevel === 'public') {
const ciTemplateDefault = await templateModule.getTemplate('ci_default');
const ciTemplateDefault =
await templateModule.getTemplate('ci_default');
ciTemplateDefault.writeToDisk(paths.cwd);
} else {
const ciTemplateDefault = await templateModule.getTemplate('ci_default_private');
const ciTemplateDefault =
await templateModule.getTemplate('ci_default_private');
ciTemplateDefault.writeToDisk(paths.cwd);
}
logger.log('info', 'Updated .gitlabci.yml!');
@@ -41,7 +43,8 @@ export const run = async (project: Project) => {
logger.log('info', 'Updated CI/CD config files!');
// lets care about docker
const dockerTemplate = await templateModule.getTemplate('dockerfile_service');
const dockerTemplate =
await templateModule.getTemplate('dockerfile_service');
dockerTemplate.writeToDisk(paths.cwd);
logger.log('info', 'Updated Dockerfile!');
@@ -56,17 +59,22 @@ export const run = async (project: Project) => {
// update html
if (project.gitzoneConfig.data.projectType === 'website') {
const websiteUpdateTemplate = await templateModule.getTemplate('website_update');
const variables ={
const websiteUpdateTemplate =
await templateModule.getTemplate('website_update');
const variables = {
assetbrokerUrl: project.gitzoneConfig.data.module.assetbrokerUrl,
legalUrl: project.gitzoneConfig.data.module.legalUrl,
};
console.log('updating website template with variables\n', JSON.stringify(variables, null, 2));
console.log(
'updating website template with variables\n',
JSON.stringify(variables, null, 2),
);
websiteUpdateTemplate.supplyVariables(variables);
await websiteUpdateTemplate.writeToDisk(paths.cwd);
logger.log('info', `Updated html for website!`);
} else if (project.gitzoneConfig.data.projectType === 'service') {
const websiteUpdateTemplate = await templateModule.getTemplate('service_update');
const websiteUpdateTemplate =
await templateModule.getTemplate('service_update');
await websiteUpdateTemplate.writeToDisk(paths.cwd);
logger.log('info', `Updated html for element template!`);
} else if (project.gitzoneConfig.data.projectType === 'wcc') {

View File

@@ -19,8 +19,12 @@ export const run = async (projectArg: Project) => {
const publishModules = await tsPublishInstance.getModuleSubDirs(paths.cwd);
for (const publishModule of Object.keys(publishModules)) {
const publishConfig = publishModules[publishModule];
tsconfigObject.compilerOptions.paths[`${publishConfig.name}`] = [`./${publishModule}/index.js`];
tsconfigObject.compilerOptions.paths[`${publishConfig.name}`] = [
`./${publishModule}/index.js`,
];
}
tsconfigSmartfile.setContentsFromString(JSON.stringify(tsconfigObject, null, 2));
tsconfigSmartfile.setContentsFromString(
JSON.stringify(tsconfigObject, null, 2),
);
await tsconfigSmartfile.write();
};

View File

@@ -12,7 +12,12 @@ export class CleanupFormatter extends BaseFormatter {
const changes: IPlannedChange[] = [];
// List of files to remove
const filesToRemove = ['yarn.lock', 'package-lock.json', 'tslint.json', 'defaults.yml'];
const filesToRemove = [
'yarn.lock',
'package-lock.json',
'tslint.json',
'defaults.yml',
];
for (const file of filesToRemove) {
const exists = await plugins.smartfile.fs.fileExists(file);
@@ -21,7 +26,7 @@ export class CleanupFormatter extends BaseFormatter {
type: 'delete',
path: file,
module: this.name,
description: `Remove obsolete file`
description: `Remove obsolete file`,
});
}
}

View File

@@ -8,7 +8,12 @@ export class LegacyFormatter extends BaseFormatter {
private moduleName: string;
private formatModule: any;
constructor(context: any, project: Project, moduleName: string, formatModule: any) {
constructor(
context: any,
project: Project,
moduleName: string,
formatModule: any,
) {
super(context, project);
this.moduleName = moduleName;
this.formatModule = formatModule;
@@ -21,12 +26,14 @@ export class LegacyFormatter extends BaseFormatter {
async analyze(): Promise<IPlannedChange[]> {
// For legacy modules, we can't easily predict changes
// So we'll return a generic change that indicates the module will run
return [{
type: 'modify',
path: '<various files>',
module: this.name,
description: `Run ${this.name} formatter`
}];
return [
{
type: 'modify',
path: '<various files>',
module: this.name,
description: `Run ${this.name} formatter`,
},
];
}
async applyChange(change: IPlannedChange): Promise<void> {

View File

@@ -10,15 +10,76 @@ export class PrettierFormatter extends BaseFormatter {
async analyze(): Promise<IPlannedChange[]> {
const changes: IPlannedChange[] = [];
const globPattern = '**/*.{ts,tsx,js,jsx,json,md,css,scss,html,xml,yaml,yml}';
// Define directories to format (TypeScript directories by default)
const includeDirs = ['ts', 'ts_*', 'test', 'tests'];
// File extensions to format
const extensions = '{ts,tsx,js,jsx,json,md,css,scss,html,xml,yaml,yml}';
// Also format root-level config files
const rootConfigFiles = [
'package.json',
'tsconfig.json',
'npmextra.json',
'.prettierrc',
'.prettierrc.json',
'.prettierrc.js',
'readme.md',
'README.md',
'changelog.md',
'CHANGELOG.md',
// Skip files without extensions as prettier can't infer parser
// 'license',
// 'LICENSE',
'*.md',
];
// Collect all files to format
const allFiles: string[] = [];
// Add files from TypeScript directories
for (const dir of includeDirs) {
const globPattern = `${dir}/**/*.${extensions}`;
const dirFiles = await plugins.smartfile.fs.listFileTree(
'.',
globPattern,
);
allFiles.push(...dirFiles);
}
// Add root config files
for (const pattern of rootConfigFiles) {
const rootFiles = await plugins.smartfile.fs.listFileTree('.', pattern);
// Only include files at root level (no slashes in path)
const rootLevelFiles = rootFiles.filter((f) => !f.includes('/'));
allFiles.push(...rootLevelFiles);
}
// Remove duplicates
const uniqueFiles = [...new Set(allFiles)];
// Get all files that match the pattern
const files = await plugins.smartfile.fs.listFileTree('.', globPattern);
const files = uniqueFiles;
// Ensure we only process actual files (not directories)
const validFiles: string[] = [];
for (const file of files) {
try {
const stats = await plugins.smartfile.fs.stat(file);
if (!stats.isDirectory()) {
validFiles.push(file);
}
} catch (error) {
// Skip files that can't be accessed
logVerbose(`Skipping ${file} - cannot access: ${error.message}`);
}
}
// Check which files need formatting
for (const file of files) {
for (const file of validFiles) {
// Skip files that haven't changed
if (!await this.shouldProcessFile(file)) {
if (!(await this.shouldProcessFile(file))) {
logVerbose(`Skipping ${file} - no changes detected`);
continue;
}
@@ -27,7 +88,7 @@ export class PrettierFormatter extends BaseFormatter {
type: 'modify',
path: file,
module: this.name,
description: 'Format with Prettier'
description: 'Format with Prettier',
});
}
@@ -42,38 +103,31 @@ export class PrettierFormatter extends BaseFormatter {
try {
await this.preExecute();
// Batch process files
const batchSize = 10; // Process 10 files at a time
const batches: IPlannedChange[][] = [];
logVerbose(`Processing ${changes.length} files sequentially`);
for (let i = 0; i < changes.length; i += batchSize) {
batches.push(changes.slice(i, i + batchSize));
}
// Process files sequentially to avoid prettier cache/state issues
for (let i = 0; i < changes.length; i++) {
const change = changes[i];
logVerbose(
`Processing file ${i + 1}/${changes.length}: ${change.path}`,
);
logVerbose(`Processing ${changes.length} files in ${batches.length} batches`);
for (let i = 0; i < batches.length; i++) {
const batch = batches[i];
logVerbose(`Processing batch ${i + 1}/${batches.length} (${batch.length} files)`);
// Process batch in parallel
const promises = batch.map(async (change) => {
try {
await this.applyChange(change);
this.stats.recordFileOperation(this.name, change.type, true);
} catch (error) {
this.stats.recordFileOperation(this.name, change.type, false);
logger.log('error', `Failed to format ${change.path}: ${error.message}`);
// Don't throw - continue with other files
}
});
await Promise.all(promises);
try {
await this.applyChange(change);
this.stats.recordFileOperation(this.name, change.type, true);
} catch (error) {
this.stats.recordFileOperation(this.name, change.type, false);
logger.log(
'error',
`Failed to format ${change.path}: ${error.message}`,
);
// Don't throw - continue with other files
}
}
await this.postExecute();
} catch (error) {
await this.context.rollbackOperation();
// Rollback removed - no longer tracking operations
throw error;
} finally {
this.stats.endModule(this.name, startTime);
@@ -84,27 +138,71 @@ export class PrettierFormatter extends BaseFormatter {
if (change.type !== 'modify') return;
try {
// Validate the path before processing
if (!change.path || change.path.trim() === '') {
logger.log(
'error',
`Invalid empty path in change: ${JSON.stringify(change)}`,
);
throw new Error('Invalid empty path');
}
// Read current content
const content = await plugins.smartfile.fs.toStringSync(change.path);
const content = plugins.smartfile.fs.toStringSync(change.path);
// Format with prettier
const prettier = await import('prettier');
const formatted = await prettier.format(content, {
filepath: change.path,
...(await this.getPrettierConfig())
});
// Only write if content actually changed
if (formatted !== content) {
await this.modifyFile(change.path, formatted);
logVerbose(`Formatted ${change.path}`);
} else {
// Still update cache even if content didn't change
await this.cache.updateFileCache(change.path);
logVerbose(`No formatting changes for ${change.path}`);
// Skip files that prettier can't parse without explicit parser
const fileExt = plugins.path.extname(change.path).toLowerCase();
if (!fileExt || fileExt === '') {
// Files without extensions need explicit parser
logVerbose(
`Skipping ${change.path} - no file extension for parser inference`,
);
return;
}
try {
const formatted = await prettier.format(content, {
filepath: change.path,
...(await this.getPrettierConfig()),
});
// Only write if content actually changed
if (formatted !== content) {
// Debug: log the path being written
logVerbose(`Writing formatted content to: ${change.path}`);
await this.modifyFile(change.path, formatted);
logVerbose(`Formatted ${change.path}`);
} else {
logVerbose(`No formatting changes for ${change.path}`);
}
} catch (prettierError) {
// Check if it's a parser error
if (
prettierError.message &&
prettierError.message.includes('No parser could be inferred')
) {
logVerbose(`Skipping ${change.path} - ${prettierError.message}`);
return; // Skip this file silently
}
throw prettierError;
}
} catch (error) {
logger.log('error', `Failed to format ${change.path}: ${error.message}`);
// Log the full error stack for debugging mkdir issues
if (error.message && error.message.includes('mkdir')) {
logger.log(
'error',
`Failed to format ${change.path}: ${error.message}`,
);
logger.log('error', `Error stack: ${error.stack}`);
} else {
logger.log(
'error',
`Failed to format ${change.path}: ${error.message}`,
);
}
throw error;
}
}
@@ -119,7 +217,7 @@ export class PrettierFormatter extends BaseFormatter {
printWidth: 80,
tabWidth: 2,
semi: true,
arrowParens: 'always'
arrowParens: 'always',
});
}
}

View File

@@ -8,12 +8,14 @@ export class ReadmeFormatter extends BaseFormatter {
}
async analyze(): Promise<IPlannedChange[]> {
return [{
type: 'modify',
path: 'readme.md',
module: this.name,
description: 'Ensure readme files exist'
}];
return [
{
type: 'modify',
path: 'readme.md',
module: this.name,
description: 'Ensure readme files exist',
},
];
}
async applyChange(change: IPlannedChange): Promise<void> {

View File

@@ -16,17 +16,19 @@ import { PrettierFormatter } from './formatters/prettier.formatter.js';
import { ReadmeFormatter } from './formatters/readme.formatter.js';
import { CopyFormatter } from './formatters/copy.formatter.js';
export let run = async (options: {
dryRun?: boolean;
yes?: boolean;
planOnly?: boolean;
savePlan?: string;
fromPlan?: string;
detailed?: boolean;
interactive?: boolean;
parallel?: boolean;
verbose?: boolean;
} = {}): Promise<any> => {
export let run = async (
options: {
dryRun?: boolean;
yes?: boolean;
planOnly?: boolean;
savePlan?: string;
fromPlan?: string;
detailed?: boolean;
interactive?: boolean;
parallel?: boolean;
verbose?: boolean;
} = {},
): Promise<any> => {
// Set verbose mode if requested
if (options.verbose) {
setVerboseMode(true);
@@ -34,7 +36,7 @@ export let run = async (options: {
const project = await Project.fromCwd();
const context = new FormatContext();
await context.initializeCache(); // Initialize the cache system
// Cache system removed - no longer needed
const planner = new FormatPlanner();
// Get configuration from npmextra
@@ -49,24 +51,21 @@ export let run = async (options: {
autoRollbackOnError: true,
backupRetentionDays: 7,
maxBackupSize: '100MB',
excludePatterns: ['node_modules/**', '.git/**']
excludePatterns: ['node_modules/**', '.git/**'],
},
modules: {
skip: [],
only: [],
order: []
order: [],
},
parallel: true,
cache: {
enabled: true,
clean: true // Clean invalid entries from cache
}
clean: true, // Clean invalid entries from cache
},
});
// Clean cache if configured
if (formatConfig.cache.clean) {
await context.getChangeCache().clean();
}
// Cache cleaning removed - no longer using cache system
// Override config with command options
const interactive = options.interactive ?? formatConfig.interactive;
@@ -89,7 +88,7 @@ export let run = async (options: {
];
// Filter formatters based on configuration
const activeFormatters = formatters.filter(formatter => {
const activeFormatters = formatters.filter((formatter) => {
if (formatConfig.modules.only.length > 0) {
return formatConfig.modules.only.includes(formatter.name);
}
@@ -110,7 +109,10 @@ export let run = async (options: {
// Save plan if requested
if (options.savePlan) {
await plugins.smartfile.memory.toFs(JSON.stringify(plan, null, 2), options.savePlan);
await plugins.smartfile.memory.toFs(
JSON.stringify(plan, null, 2),
options.savePlan,
);
logger.log('info', `Plan saved to ${options.savePlan}`);
}
@@ -132,7 +134,7 @@ export let run = async (options: {
type: 'confirm',
name: 'proceed',
message: 'Proceed with formatting?',
default: true
default: true,
});
if (!(response as any).value) {
@@ -142,7 +144,10 @@ export let run = async (options: {
}
// Execute phase
logger.log('info', `Executing format operations${parallel ? ' in parallel' : ' sequentially'}...`);
logger.log(
'info',
`Executing format operations${parallel ? ' in parallel' : ' sequentially'}...`,
);
await planner.executePlan(plan, activeFormatters, context, parallel);
// Finish statistics tracking
@@ -161,20 +166,10 @@ export let run = async (options: {
}
logger.log('success', 'Format operations completed successfully!');
} catch (error) {
logger.log('error', `Format operation failed: ${error.message}`);
// Automatic rollback if enabled
if (formatConfig.rollback.enabled && formatConfig.rollback.autoRollbackOnError) {
logger.log('info', 'Attempting automatic rollback...');
try {
await context.rollbackOperation();
logger.log('success', 'Rollback completed successfully');
} catch (rollbackError) {
logger.log('error', `Rollback failed: ${rollbackError.message}`);
}
}
// Rollback system has been removed for stability
throw error;
}
@@ -182,67 +177,16 @@ export let run = async (options: {
// Export CLI command handlers
export const handleRollback = async (operationId?: string): Promise<void> => {
const context = new FormatContext();
const rollbackManager = context.getRollbackManager();
if (!operationId) {
// Rollback to last operation
const backups = await rollbackManager.listBackups();
const lastOperation = backups
.filter(op => op.status !== 'rolled-back')
.sort((a, b) => b.timestamp - a.timestamp)[0];
if (!lastOperation) {
logger.log('warn', 'No operations available for rollback');
return;
}
operationId = lastOperation.id;
}
try {
await rollbackManager.rollback(operationId);
logger.log('success', `Successfully rolled back operation ${operationId}`);
} catch (error) {
logger.log('error', `Rollback failed: ${error.message}`);
throw error;
}
logger.log('info', 'Rollback system has been disabled for stability');
};
export const handleListBackups = async (): Promise<void> => {
const context = new FormatContext();
const rollbackManager = context.getRollbackManager();
const backups = await rollbackManager.listBackups();
if (backups.length === 0) {
logger.log('info', 'No backup operations found');
return;
}
console.log('\nAvailable backups:');
console.log('━'.repeat(50));
for (const backup of backups) {
const date = new Date(backup.timestamp).toLocaleString();
const status = backup.status;
const filesCount = backup.files.length;
console.log(`ID: ${backup.id}`);
console.log(`Date: ${date}`);
console.log(`Status: ${status}`);
console.log(`Files: ${filesCount}`);
console.log('─'.repeat(50));
}
logger.log('info', 'Backup system has been disabled for stability');
};
export const handleCleanBackups = async (): Promise<void> => {
const context = new FormatContext();
const rollbackManager = context.getRollbackManager();
// Get retention days from config
const npmextraConfig = new plugins.npmextra.Npmextra();
const retentionDays = npmextraConfig.dataFor<any>('gitzone.format.rollback.backupRetentionDays', 7);
await rollbackManager.cleanOldBackups(retentionDays);
logger.log('success', `Cleaned backups older than ${retentionDays} days`);
logger.log(
'info',
'Backup cleaning has been disabled - backup system removed',
);
};

View File

@@ -9,7 +9,7 @@ export type IFormatOperation = {
}>;
status: 'pending' | 'in-progress' | 'completed' | 'failed' | 'rolled-back';
error?: Error;
}
};
export type IFormatPlan = {
summary: {
@@ -32,7 +32,7 @@ export type IFormatPlan = {
message: string;
module: string;
}>;
}
};
export type IPlannedChange = {
type: 'create' | 'modify' | 'delete';
@@ -42,4 +42,4 @@ export type IPlannedChange = {
content?: string; // For create/modify operations
diff?: string;
size?: number;
}
};

View File

@@ -35,7 +35,10 @@ export class Meta {
* sorts the metaRepoData
*/
public async sortMetaRepoData() {
const stringifiedMetadata = plugins.smartjson.stringify(this.metaRepoData, []);
const stringifiedMetadata = plugins.smartjson.stringify(
this.metaRepoData,
[],
);
this.metaRepoData = plugins.smartjson.parse(stringifiedMetadata);
}
@@ -45,11 +48,15 @@ export class Meta {
public async readDirectory() {
await this.syncToRemote(true);
logger.log('info', `reading directory`);
const metaFileExists = plugins.smartfile.fs.fileExistsSync(this.filePaths.metaJson);
const metaFileExists = plugins.smartfile.fs.fileExistsSync(
this.filePaths.metaJson,
);
if (!metaFileExists) {
throw new Error(`meta file does not exist at ${this.filePaths.metaJson}`);
}
this.metaRepoData = plugins.smartfile.fs.toObjectSync(this.filePaths.metaJson);
this.metaRepoData = plugins.smartfile.fs.toObjectSync(
this.filePaths.metaJson,
);
}
/**
@@ -76,7 +83,10 @@ export class Meta {
this.filePaths.metaJson,
);
// write .gitignore to disk
plugins.smartfile.memory.toFsSync(await this.generateGitignore(), this.filePaths.gitIgnore);
plugins.smartfile.memory.toFsSync(
await this.generateGitignore(),
this.filePaths.gitIgnore,
);
}
/**
@@ -84,13 +94,17 @@ export class Meta {
*/
public async syncToRemote(gitCleanArg = false) {
logger.log('info', `syncing from origin master`);
await this.smartshellInstance.exec(`cd ${this.cwd} && git pull origin master`);
await this.smartshellInstance.exec(
`cd ${this.cwd} && git pull origin master`,
);
if (gitCleanArg) {
logger.log('info', `cleaning the repository from old directories`);
await this.smartshellInstance.exec(`cd ${this.cwd} && git clean -fd`);
}
logger.log('info', `syncing to remote origin master`);
await this.smartshellInstance.exec(`cd ${this.cwd} && git push origin master`);
await this.smartshellInstance.exec(
`cd ${this.cwd} && git push origin master`,
);
}
/**
@@ -98,7 +112,9 @@ export class Meta {
*/
public async updateLocalRepos() {
await this.syncToRemote();
const projects = plugins.smartfile.fs.toObjectSync(this.filePaths.metaJson).projects;
const projects = plugins.smartfile.fs.toObjectSync(
this.filePaths.metaJson,
).projects;
const preExistingFolders = plugins.smartfile.fs.listFoldersSync(this.cwd);
for (const preExistingFolderArg of preExistingFolders) {
if (
@@ -107,14 +123,18 @@ export class Meta {
projectFolder.startsWith(preExistingFolderArg),
)
) {
const response = await plugins.smartinteraction.SmartInteract.getCliConfirmation(
`Do you want to delete superfluous directory >>${preExistingFolderArg}<< ?`,
true,
);
const response =
await plugins.smartinteraction.SmartInteract.getCliConfirmation(
`Do you want to delete superfluous directory >>${preExistingFolderArg}<< ?`,
true,
);
if (response) {
logger.log('warn', `Deleting >>${preExistingFolderArg}<<!`);
} else {
logger.log('warn', `Not deleting ${preExistingFolderArg} by request!`);
logger.log(
'warn',
`Not deleting ${preExistingFolderArg} by request!`,
);
}
}
}
@@ -160,7 +180,9 @@ export class Meta {
*/
public async initProject() {
await this.syncToRemote(true);
const fileExists = await plugins.smartfile.fs.fileExists(this.filePaths.metaJson);
const fileExists = await plugins.smartfile.fs.fileExists(
this.filePaths.metaJson,
);
if (!fileExists) {
await plugins.smartfile.memory.toFs(
JSON.stringify({
@@ -168,7 +190,10 @@ export class Meta {
}),
this.filePaths.metaJson,
);
logger.log(`success`, `created a new .meta.json in directory ${this.cwd}`);
logger.log(
`success`,
`created a new .meta.json in directory ${this.cwd}`,
);
await plugins.smartfile.memory.toFs(
JSON.stringify({
name: this.dirName,
@@ -176,9 +201,15 @@ export class Meta {
}),
this.filePaths.packageJson,
);
logger.log(`success`, `created a new package.json in directory ${this.cwd}`);
logger.log(
`success`,
`created a new package.json in directory ${this.cwd}`,
);
} else {
logger.log(`error`, `directory ${this.cwd} already has a .metaJson file. Doing nothing.`);
logger.log(
`error`,
`directory ${this.cwd} already has a .metaJson file. Doing nothing.`,
);
}
await this.smartshellInstance.exec(
`cd ${this.cwd} && git add -A && git commit -m "feat(project): init meta project for ${this.dirName}"`,
@@ -195,7 +226,9 @@ export class Meta {
const existingProject = this.metaRepoData.projects[projectNameArg];
if (existingProject) {
throw new Error('Project already exists! Please remove it first before adding it again.');
throw new Error(
'Project already exists! Please remove it first before adding it again.',
);
}
this.metaRepoData.projects[projectNameArg] = gitUrlArg;
@@ -217,7 +250,10 @@ export class Meta {
const existingProject = this.metaRepoData.projects[projectNameArg];
if (!existingProject) {
logger.log('error', `Project ${projectNameArg} does not exist! So it cannot be removed`);
logger.log(
'error',
`Project ${projectNameArg} does not exist! So it cannot be removed`,
);
return;
}
@@ -228,7 +264,9 @@ export class Meta {
await this.writeToDisk();
logger.log('info', 'removing directory from cwd');
await plugins.smartfile.fs.remove(plugins.path.join(paths.cwd, projectNameArg));
await plugins.smartfile.fs.remove(
plugins.path.join(paths.cwd, projectNameArg),
);
await this.updateLocalRepos();
}
}

View File

@@ -0,0 +1,226 @@
import * as plugins from './mod.plugins.js';
import * as helpers from './helpers.js';
export type ContainerStatus = 'running' | 'stopped' | 'not_exists';
export interface IDockerRunOptions {
name: string;
image: string;
ports?: { [key: string]: string };
volumes?: { [key: string]: string };
environment?: { [key: string]: string };
restart?: string;
command?: string;
}
export class DockerContainer {
private smartshell: plugins.smartshell.Smartshell;
constructor() {
this.smartshell = new plugins.smartshell.Smartshell({
executor: 'bash',
});
}
/**
* Check if Docker is installed and available
*/
public async checkDocker(): Promise<boolean> {
try {
const result = await this.smartshell.exec('docker --version');
return result.exitCode === 0;
} catch (error) {
return false;
}
}
/**
* Get container status
*/
public async getStatus(containerName: string): Promise<ContainerStatus> {
try {
// Check if running
const runningResult = await this.smartshell.exec(
`docker ps --format '{{.Names}}' | grep -q "^${containerName}$"`
);
if (runningResult.exitCode === 0) {
return 'running';
}
// Check if exists but stopped
const existsResult = await this.smartshell.exec(
`docker ps -a --format '{{.Names}}' | grep -q "^${containerName}$"`
);
if (existsResult.exitCode === 0) {
return 'stopped';
}
return 'not_exists';
} catch (error) {
return 'not_exists';
}
}
/**
* Start a container
*/
public async start(containerName: string): Promise<boolean> {
try {
const result = await this.smartshell.exec(`docker start ${containerName}`);
return result.exitCode === 0;
} catch (error) {
return false;
}
}
/**
* Stop a container
*/
public async stop(containerName: string): Promise<boolean> {
try {
const result = await this.smartshell.exec(`docker stop ${containerName}`);
return result.exitCode === 0;
} catch (error) {
return false;
}
}
/**
* Remove a container
*/
public async remove(containerName: string, force: boolean = false): Promise<boolean> {
try {
const forceFlag = force ? '-f' : '';
const result = await this.smartshell.exec(`docker rm ${forceFlag} ${containerName}`);
return result.exitCode === 0;
} catch (error) {
return false;
}
}
/**
* Run a new container
*/
public async run(options: IDockerRunOptions): Promise<boolean> {
let command = 'docker run -d';
// Add name
command += ` --name ${options.name}`;
// Add ports
if (options.ports) {
for (const [hostPort, containerPort] of Object.entries(options.ports)) {
command += ` -p ${hostPort}:${containerPort}`;
}
}
// Add volumes
if (options.volumes) {
for (const [hostPath, containerPath] of Object.entries(options.volumes)) {
command += ` -v "${hostPath}:${containerPath}"`;
}
}
// Add environment variables
if (options.environment) {
for (const [key, value] of Object.entries(options.environment)) {
command += ` -e ${key}="${value}"`;
}
}
// Add restart policy
if (options.restart) {
command += ` --restart ${options.restart}`;
}
// Add image
command += ` ${options.image}`;
// Add command if provided
if (options.command) {
command += ` ${options.command}`;
}
try {
const result = await this.smartshell.exec(command);
return result.exitCode === 0;
} catch (error) {
helpers.printMessage(`Failed to run container: ${error.message}`, 'red');
return false;
}
}
/**
* Execute a command in a running container
*/
public async exec(containerName: string, command: string): Promise<string> {
try {
const result = await this.smartshell.exec(`docker exec ${containerName} ${command}`);
if (result.exitCode === 0) {
return result.stdout;
}
return '';
} catch (error) {
return '';
}
}
/**
* Get container logs
*/
public async logs(containerName: string, lines?: number): Promise<string> {
try {
const tailFlag = lines ? `--tail ${lines}` : '';
const result = await this.smartshell.exec(`docker logs ${tailFlag} ${containerName}`);
return result.stdout;
} catch (error) {
return `Error getting logs: ${error.message}`;
}
}
/**
* Check if a container exists
*/
public async exists(containerName: string): Promise<boolean> {
const status = await this.getStatus(containerName);
return status !== 'not_exists';
}
/**
* Check if a container is running
*/
public async isRunning(containerName: string): Promise<boolean> {
const status = await this.getStatus(containerName);
return status === 'running';
}
/**
* Wait for a container to be ready
*/
public async waitForReady(containerName: string, maxAttempts: number = 30): Promise<boolean> {
for (let i = 0; i < maxAttempts; i++) {
if (await this.isRunning(containerName)) {
return true;
}
await plugins.smartdelay.delayFor(1000);
}
return false;
}
/**
* Get container information
*/
public async inspect(containerName: string): Promise<any> {
try {
const result = await this.smartshell.exec(`docker inspect ${containerName}`);
if (result.exitCode === 0) {
return JSON.parse(result.stdout);
}
return null;
} catch (error) {
return null;
}
}
}

View File

@@ -0,0 +1,245 @@
import * as plugins from './mod.plugins.js';
import * as helpers from './helpers.js';
export interface IServiceConfig {
PROJECT_NAME: string;
MONGODB_HOST: string;
MONGODB_NAME: string;
MONGODB_PORT: string;
MONGODB_USER: string;
MONGODB_PASS: string;
S3_HOST: string;
S3_PORT: string;
S3_CONSOLE_PORT: string;
S3_USER: string;
S3_PASS: string;
S3_BUCKET: string;
}
export class ServiceConfiguration {
private configPath: string;
private config: IServiceConfig;
constructor() {
this.configPath = plugins.path.join(process.cwd(), '.nogit', 'env.json');
}
/**
* Load or create the configuration
*/
public async loadOrCreate(): Promise<IServiceConfig> {
await this.ensureNogitDirectory();
if (await this.configExists()) {
await this.loadConfig();
await this.updateMissingFields();
} else {
await this.createDefaultConfig();
}
return this.config;
}
/**
* Get the current configuration
*/
public getConfig(): IServiceConfig {
return this.config;
}
/**
* Save the configuration to file
*/
public async saveConfig(): Promise<void> {
await plugins.smartfile.memory.toFs(
JSON.stringify(this.config, null, 2),
this.configPath
);
}
/**
* Ensure .nogit directory exists
*/
private async ensureNogitDirectory(): Promise<void> {
const nogitPath = plugins.path.join(process.cwd(), '.nogit');
await plugins.smartfile.fs.ensureDir(nogitPath);
}
/**
* Check if configuration file exists
*/
private async configExists(): Promise<boolean> {
return plugins.smartfile.fs.fileExists(this.configPath);
}
/**
* Load configuration from file
*/
private async loadConfig(): Promise<void> {
const configContent = await plugins.smartfile.fs.toStringSync(this.configPath);
this.config = JSON.parse(configContent);
}
/**
* Create default configuration
*/
private async createDefaultConfig(): Promise<void> {
const projectName = helpers.getProjectName();
const mongoPort = await helpers.getRandomAvailablePort();
const s3Port = await helpers.getRandomAvailablePort();
let s3ConsolePort = s3Port + 1;
// Ensure console port is also available
while (!(await helpers.isPortAvailable(s3ConsolePort))) {
s3ConsolePort++;
}
this.config = {
PROJECT_NAME: projectName,
MONGODB_HOST: 'localhost',
MONGODB_NAME: projectName,
MONGODB_PORT: mongoPort.toString(),
MONGODB_USER: 'defaultadmin',
MONGODB_PASS: 'defaultpass',
S3_HOST: 'localhost',
S3_PORT: s3Port.toString(),
S3_CONSOLE_PORT: s3ConsolePort.toString(),
S3_USER: 'defaultadmin',
S3_PASS: 'defaultpass',
S3_BUCKET: `${projectName}-documents`
};
await this.saveConfig();
helpers.printMessage('✅ Created .nogit/env.json with project defaults', 'green');
helpers.printMessage(`📍 MongoDB port: ${mongoPort}`, 'blue');
helpers.printMessage(`📍 S3 API port: ${s3Port}`, 'blue');
helpers.printMessage(`📍 S3 Console port: ${s3ConsolePort}`, 'blue');
}
/**
* Update missing fields in existing configuration
*/
private async updateMissingFields(): Promise<void> {
const projectName = helpers.getProjectName();
let updated = false;
const fieldsAdded: string[] = [];
// Check and add missing fields
if (!this.config.PROJECT_NAME) {
this.config.PROJECT_NAME = projectName;
fieldsAdded.push('PROJECT_NAME');
updated = true;
}
if (!this.config.MONGODB_HOST) {
this.config.MONGODB_HOST = 'localhost';
fieldsAdded.push('MONGODB_HOST');
updated = true;
}
if (!this.config.MONGODB_NAME) {
this.config.MONGODB_NAME = projectName;
fieldsAdded.push('MONGODB_NAME');
updated = true;
}
if (!this.config.MONGODB_PORT) {
const port = await helpers.getRandomAvailablePort();
this.config.MONGODB_PORT = port.toString();
fieldsAdded.push(`MONGODB_PORT(${port})`);
updated = true;
}
if (!this.config.MONGODB_USER) {
this.config.MONGODB_USER = 'defaultadmin';
fieldsAdded.push('MONGODB_USER');
updated = true;
}
if (!this.config.MONGODB_PASS) {
this.config.MONGODB_PASS = 'defaultpass';
fieldsAdded.push('MONGODB_PASS');
updated = true;
}
if (!this.config.S3_HOST) {
this.config.S3_HOST = 'localhost';
fieldsAdded.push('S3_HOST');
updated = true;
}
if (!this.config.S3_PORT) {
const port = await helpers.getRandomAvailablePort();
this.config.S3_PORT = port.toString();
fieldsAdded.push(`S3_PORT(${port})`);
updated = true;
}
if (!this.config.S3_CONSOLE_PORT) {
const s3Port = parseInt(this.config.S3_PORT);
let consolePort = s3Port + 1;
while (!(await helpers.isPortAvailable(consolePort))) {
consolePort++;
}
this.config.S3_CONSOLE_PORT = consolePort.toString();
fieldsAdded.push(`S3_CONSOLE_PORT(${consolePort})`);
updated = true;
}
if (!this.config.S3_USER) {
this.config.S3_USER = 'defaultadmin';
fieldsAdded.push('S3_USER');
updated = true;
}
if (!this.config.S3_PASS) {
this.config.S3_PASS = 'defaultpass';
fieldsAdded.push('S3_PASS');
updated = true;
}
if (!this.config.S3_BUCKET) {
this.config.S3_BUCKET = `${projectName}-documents`;
fieldsAdded.push('S3_BUCKET');
updated = true;
}
if (updated) {
await this.saveConfig();
helpers.printMessage(`✅ Added missing fields: ${fieldsAdded.join(', ')}`, 'green');
} else {
helpers.printMessage('✅ Configuration complete', 'green');
}
}
/**
* Get MongoDB connection string
*/
public getMongoConnectionString(useNetworkIp: boolean = false): string {
const host = useNetworkIp ? '${networkIp}' : this.config.MONGODB_HOST;
return `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${host}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
}
/**
* Get container names
*/
public getContainerNames() {
return {
mongo: `${this.config.PROJECT_NAME}-mongodb`,
minio: `${this.config.PROJECT_NAME}-minio`
};
}
/**
* Get data directories
*/
public getDataDirectories() {
return {
mongo: plugins.path.join(process.cwd(), '.nogit', 'mongodata'),
minio: plugins.path.join(process.cwd(), '.nogit', 'miniodata')
};
}
}

View File

@@ -0,0 +1,412 @@
import * as plugins from './mod.plugins.js';
import * as helpers from './helpers.js';
import { ServiceConfiguration } from './classes.serviceconfiguration.js';
import { DockerContainer } from './classes.dockercontainer.js';
export class ServiceManager {
private config: ServiceConfiguration;
private docker: DockerContainer;
constructor() {
this.config = new ServiceConfiguration();
this.docker = new DockerContainer();
}
/**
* Initialize the service manager
*/
public async init(): Promise<void> {
// Check Docker availability
if (!(await this.docker.checkDocker())) {
helpers.printMessage('Error: Docker is not installed. Please install Docker first.', 'red');
process.exit(1);
}
// Load or create configuration
await this.config.loadOrCreate();
helpers.printMessage(`📋 Project: ${this.config.getConfig().PROJECT_NAME}`, 'magenta');
}
/**
* Start MongoDB service
*/
public async startMongoDB(): Promise<void> {
helpers.printMessage('📦 MongoDB:', 'yellow');
const config = this.config.getConfig();
const containers = this.config.getContainerNames();
const directories = this.config.getDataDirectories();
// Ensure data directory exists
await plugins.smartfile.fs.ensureDir(directories.mongo);
const status = await this.docker.getStatus(containers.mongo);
switch (status) {
case 'running':
helpers.printMessage(' Already running ✓', 'green');
break;
case 'stopped':
if (await this.docker.start(containers.mongo)) {
helpers.printMessage(' Started ✓', 'green');
} else {
helpers.printMessage(' Failed to start', 'red');
}
break;
case 'not_exists':
helpers.printMessage(' Creating container...', 'yellow');
const success = await this.docker.run({
name: containers.mongo,
image: 'mongo:7.0',
ports: {
[`0.0.0.0:${config.MONGODB_PORT}`]: '27017'
},
volumes: {
[directories.mongo]: '/data/db'
},
environment: {
MONGO_INITDB_ROOT_USERNAME: config.MONGODB_USER,
MONGO_INITDB_ROOT_PASSWORD: config.MONGODB_PASS,
MONGO_INITDB_DATABASE: config.MONGODB_NAME
},
restart: 'unless-stopped'
});
if (success) {
helpers.printMessage(' Created and started ✓', 'green');
} else {
helpers.printMessage(' Failed to create container', 'red');
}
break;
}
helpers.printMessage(` Container: ${containers.mongo}`, 'cyan');
helpers.printMessage(` Port: ${config.MONGODB_PORT}`, 'cyan');
helpers.printMessage(` Connection: ${this.config.getMongoConnectionString()}`, 'blue');
}
/**
* Start MinIO service
*/
public async startMinIO(): Promise<void> {
helpers.printMessage('📦 S3/MinIO:', 'yellow');
const config = this.config.getConfig();
const containers = this.config.getContainerNames();
const directories = this.config.getDataDirectories();
// Ensure data directory exists
await plugins.smartfile.fs.ensureDir(directories.minio);
const status = await this.docker.getStatus(containers.minio);
switch (status) {
case 'running':
helpers.printMessage(' Already running ✓', 'green');
break;
case 'stopped':
if (await this.docker.start(containers.minio)) {
helpers.printMessage(' Started ✓', 'green');
} else {
helpers.printMessage(' Failed to start', 'red');
}
break;
case 'not_exists':
helpers.printMessage(' Creating container...', 'yellow');
const success = await this.docker.run({
name: containers.minio,
image: 'minio/minio',
ports: {
[config.S3_PORT]: '9000',
[config.S3_CONSOLE_PORT]: '9001'
},
volumes: {
[directories.minio]: '/data'
},
environment: {
MINIO_ROOT_USER: config.S3_USER,
MINIO_ROOT_PASSWORD: config.S3_PASS
},
restart: 'unless-stopped',
command: 'server /data --console-address ":9001"'
});
if (success) {
helpers.printMessage(' Created and started ✓', 'green');
// Wait for MinIO to be ready
await plugins.smartdelay.delayFor(3000);
// Create default bucket
await this.docker.exec(
containers.minio,
`mc alias set local http://localhost:9000 ${config.S3_USER} ${config.S3_PASS}`
);
await this.docker.exec(
containers.minio,
`mc mb local/${config.S3_BUCKET}`
);
helpers.printMessage(` Bucket '${config.S3_BUCKET}' created ✓`, 'green');
} else {
helpers.printMessage(' Failed to create container', 'red');
}
break;
}
helpers.printMessage(` Container: ${containers.minio}`, 'cyan');
helpers.printMessage(` Port: ${config.S3_PORT}`, 'cyan');
helpers.printMessage(` Bucket: ${config.S3_BUCKET}`, 'cyan');
helpers.printMessage(` API: http://${config.S3_HOST}:${config.S3_PORT}`, 'blue');
helpers.printMessage(` Console: http://${config.S3_HOST}:${config.S3_CONSOLE_PORT} (login: ${config.S3_USER}/***)`, 'blue');
}
/**
* Stop MongoDB service
*/
public async stopMongoDB(): Promise<void> {
helpers.printMessage('📦 MongoDB:', 'yellow');
const containers = this.config.getContainerNames();
const status = await this.docker.getStatus(containers.mongo);
if (status === 'running') {
if (await this.docker.stop(containers.mongo)) {
helpers.printMessage(' Stopped ✓', 'green');
} else {
helpers.printMessage(' Failed to stop', 'red');
}
} else {
helpers.printMessage(' Not running', 'yellow');
}
}
/**
* Stop MinIO service
*/
public async stopMinIO(): Promise<void> {
helpers.printMessage('📦 S3/MinIO:', 'yellow');
const containers = this.config.getContainerNames();
const status = await this.docker.getStatus(containers.minio);
if (status === 'running') {
if (await this.docker.stop(containers.minio)) {
helpers.printMessage(' Stopped ✓', 'green');
} else {
helpers.printMessage(' Failed to stop', 'red');
}
} else {
helpers.printMessage(' Not running', 'yellow');
}
}
/**
* Show service status
*/
public async showStatus(): Promise<void> {
helpers.printHeader('Service Status');
const config = this.config.getConfig();
const containers = this.config.getContainerNames();
helpers.printMessage(`Project: ${config.PROJECT_NAME}`, 'magenta');
console.log();
// MongoDB status
const mongoStatus = await this.docker.getStatus(containers.mongo);
switch (mongoStatus) {
case 'running':
helpers.printMessage('📦 MongoDB: 🟢 Running', 'green');
helpers.printMessage(` ├─ Container: ${containers.mongo}`, 'cyan');
helpers.printMessage(` └─ ${this.config.getMongoConnectionString()}`, 'cyan');
break;
case 'stopped':
helpers.printMessage('📦 MongoDB: 🟡 Stopped', 'yellow');
helpers.printMessage(` └─ Container: ${containers.mongo}`, 'cyan');
break;
case 'not_exists':
helpers.printMessage('📦 MongoDB: ⚪ Not installed', 'magenta');
break;
}
// MinIO status
const minioStatus = await this.docker.getStatus(containers.minio);
switch (minioStatus) {
case 'running':
helpers.printMessage('📦 S3/MinIO: 🟢 Running', 'green');
helpers.printMessage(` ├─ Container: ${containers.minio}`, 'cyan');
helpers.printMessage(` ├─ API: http://${config.S3_HOST}:${config.S3_PORT}`, 'cyan');
helpers.printMessage(` ├─ Console: http://${config.S3_HOST}:${config.S3_CONSOLE_PORT}`, 'cyan');
helpers.printMessage(` └─ Bucket: ${config.S3_BUCKET}`, 'cyan');
break;
case 'stopped':
helpers.printMessage('📦 S3/MinIO: 🟡 Stopped', 'yellow');
helpers.printMessage(` └─ Container: ${containers.minio}`, 'cyan');
break;
case 'not_exists':
helpers.printMessage('📦 S3/MinIO: ⚪ Not installed', 'magenta');
break;
}
}
/**
* Show configuration
*/
public async showConfig(): Promise<void> {
helpers.printHeader('Current Configuration');
const config = this.config.getConfig();
helpers.printMessage(`Project: ${config.PROJECT_NAME}`, 'magenta');
console.log();
helpers.printMessage('MongoDB:', 'yellow');
helpers.printMessage(` Host: ${config.MONGODB_HOST}:${config.MONGODB_PORT}`, undefined);
helpers.printMessage(` Database: ${config.MONGODB_NAME}`, undefined);
helpers.printMessage(` User: ${config.MONGODB_USER}`, undefined);
helpers.printMessage(' Password: ***', undefined);
helpers.printMessage(` Container: ${this.config.getContainerNames().mongo}`, undefined);
helpers.printMessage(` Data: ${this.config.getDataDirectories().mongo}`, undefined);
helpers.printMessage(` Connection: ${this.config.getMongoConnectionString()}`, 'blue');
console.log();
helpers.printMessage('S3/MinIO:', 'yellow');
helpers.printMessage(` Host: ${config.S3_HOST}`, undefined);
helpers.printMessage(` API Port: ${config.S3_PORT}`, undefined);
helpers.printMessage(` Console Port: ${config.S3_CONSOLE_PORT}`, undefined);
helpers.printMessage(` User: ${config.S3_USER}`, undefined);
helpers.printMessage(' Password: ***', undefined);
helpers.printMessage(` Bucket: ${config.S3_BUCKET}`, undefined);
helpers.printMessage(` Container: ${this.config.getContainerNames().minio}`, undefined);
helpers.printMessage(` Data: ${this.config.getDataDirectories().minio}`, undefined);
helpers.printMessage(` API URL: http://${config.S3_HOST}:${config.S3_PORT}`, 'blue');
helpers.printMessage(` Console URL: http://${config.S3_HOST}:${config.S3_CONSOLE_PORT}`, 'blue');
}
/**
* Show MongoDB Compass connection string
*/
public async showCompassConnection(): Promise<void> {
helpers.printHeader('MongoDB Compass Connection');
const config = this.config.getConfig();
const networkIp = await helpers.getLocalNetworkIp();
const connectionString = `mongodb://${config.MONGODB_USER}:${config.MONGODB_PASS}@${networkIp}:${config.MONGODB_PORT}/${config.MONGODB_NAME}?authSource=admin`;
helpers.printMessage('MongoDB Compass is a GUI tool for MongoDB. To connect:', 'cyan');
console.log();
helpers.printMessage('1. Download MongoDB Compass from:', undefined);
helpers.printMessage(' https://www.mongodb.com/products/compass', 'blue');
console.log();
helpers.printMessage('2. Open Compass and paste this connection string:', undefined);
helpers.printMessage(` ${connectionString}`, 'green');
console.log();
helpers.printMessage('Connection Details:', 'yellow');
helpers.printMessage(` Network IP: ${networkIp}`, undefined);
helpers.printMessage(` Port: ${config.MONGODB_PORT}`, undefined);
helpers.printMessage(` Database: ${config.MONGODB_NAME}`, undefined);
helpers.printMessage(` Username: ${config.MONGODB_USER}`, undefined);
helpers.printMessage(` Auth Source: admin`, undefined);
}
/**
* Show logs for a service
*/
public async showLogs(service: string, lines: number = 20): Promise<void> {
const containers = this.config.getContainerNames();
switch (service) {
case 'mongo':
case 'mongodb':
if (await this.docker.isRunning(containers.mongo)) {
helpers.printHeader(`MongoDB Logs (last ${lines} lines)`);
const logs = await this.docker.logs(containers.mongo, lines);
console.log(logs);
} else {
helpers.printMessage('MongoDB container is not running', 'yellow');
}
break;
case 'minio':
case 's3':
if (await this.docker.isRunning(containers.minio)) {
helpers.printHeader(`S3/MinIO Logs (last ${lines} lines)`);
const logs = await this.docker.logs(containers.minio, lines);
console.log(logs);
} else {
helpers.printMessage('S3/MinIO container is not running', 'yellow');
}
break;
case 'all':
case '':
await this.showLogs('mongo', lines);
console.log();
await this.showLogs('minio', lines);
break;
default:
helpers.printMessage('Usage: gitzone services logs [mongo|s3|all] [lines]', 'yellow');
break;
}
}
/**
* Remove containers
*/
public async removeContainers(): Promise<void> {
const containers = this.config.getContainerNames();
let removed = false;
if (await this.docker.exists(containers.mongo)) {
if (await this.docker.remove(containers.mongo, true)) {
helpers.printMessage(' MongoDB container removed ✓', 'green');
removed = true;
}
}
if (await this.docker.exists(containers.minio)) {
if (await this.docker.remove(containers.minio, true)) {
helpers.printMessage(' S3/MinIO container removed ✓', 'green');
removed = true;
}
}
if (!removed) {
helpers.printMessage(' No containers to remove', 'yellow');
}
}
/**
* Clean data directories
*/
public async cleanData(): Promise<void> {
const directories = this.config.getDataDirectories();
let cleaned = false;
if (await plugins.smartfile.fs.fileExists(directories.mongo)) {
await plugins.smartfile.fs.remove(directories.mongo);
helpers.printMessage(' MongoDB data removed ✓', 'green');
cleaned = true;
}
if (await plugins.smartfile.fs.fileExists(directories.minio)) {
await plugins.smartfile.fs.remove(directories.minio);
helpers.printMessage(' S3/MinIO data removed ✓', 'green');
cleaned = true;
}
if (!cleaned) {
helpers.printMessage(' No data to clean', 'yellow');
}
}
}

148
ts/mod_services/helpers.ts Normal file
View File

@@ -0,0 +1,148 @@
import * as plugins from './mod.plugins.js';
import * as net from 'net';
/**
* Check if a port is available
*/
export const isPortAvailable = async (port: number): Promise<boolean> => {
return new Promise((resolve) => {
const server = net.createServer();
server.once('error', () => {
resolve(false);
});
server.once('listening', () => {
server.close();
resolve(true);
});
server.listen(port, '0.0.0.0');
});
};
/**
* Get a random available port between 20000 and 30000
*/
export const getRandomAvailablePort = async (): Promise<number> => {
const maxAttempts = 100;
for (let i = 0; i < maxAttempts; i++) {
const port = Math.floor(Math.random() * 10001) + 20000;
if (await isPortAvailable(port)) {
return port;
}
}
// Fallback: let the system assign a port
return 0;
};
/**
* Get the project name from package.json or directory
*/
export const getProjectName = (): string => {
try {
const packageJsonPath = plugins.path.join(process.cwd(), 'package.json');
if (plugins.smartfile.fs.fileExistsSync(packageJsonPath)) {
const packageJson = plugins.smartfile.fs.toObjectSync(packageJsonPath);
if (packageJson.name) {
// Sanitize: @fin.cx/skr → fin-cx-skr
return packageJson.name.replace(/@/g, '').replace(/[\/\.]/g, '-');
}
}
} catch (error) {
// Ignore errors and fall back to directory name
}
return plugins.path.basename(process.cwd());
};
/**
* Print colored message to console
*/
export const printMessage = (message: string, color?: 'green' | 'yellow' | 'red' | 'blue' | 'magenta' | 'cyan') => {
const logger = new plugins.smartlog.ConsoleLog();
switch (color) {
case 'green':
logger.log('ok', message);
break;
case 'yellow':
logger.log('note', message);
break;
case 'red':
logger.log('error', message);
break;
case 'blue':
case 'magenta':
case 'cyan':
logger.log('info', message);
break;
default:
logger.log('info', message);
}
};
/**
* Print a header with decorative lines
*/
export const printHeader = (title: string) => {
console.log();
printMessage('═══════════════════════════════════════════════════════════════', 'cyan');
printMessage(` ${title}`, 'cyan');
printMessage('═══════════════════════════════════════════════════════════════', 'cyan');
console.log();
};
/**
* Format bytes to human readable string
*/
export const formatBytes = (bytes: number): string => {
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
let size = bytes;
let unitIndex = 0;
while (size >= 1024 && unitIndex < units.length - 1) {
size /= 1024;
unitIndex++;
}
return `${size.toFixed(2)} ${units[unitIndex]}`;
};
/**
* Get the local network IP address
*/
export const getLocalNetworkIp = async (): Promise<string> => {
const smartnetworkInstance = new plugins.smartnetwork.SmartNetwork();
const gateways = await smartnetworkInstance.getGateways();
// Find the best local IP from network interfaces
for (const interfaceName of Object.keys(gateways)) {
const interfaces = gateways[interfaceName];
for (const iface of interfaces) {
// Skip loopback and internal interfaces
if (!iface.internal && iface.family === 'IPv4') {
const address = iface.address;
// Prefer LAN IPs
if (address.startsWith('192.168.') || address.startsWith('10.') || address.startsWith('172.')) {
return address;
}
}
}
}
// Fallback: try to get any non-internal IPv4
for (const interfaceName of Object.keys(gateways)) {
const interfaces = gateways[interfaceName];
for (const iface of interfaces) {
if (!iface.internal && iface.family === 'IPv4') {
return iface.address;
}
}
}
// Last resort: localhost
return 'localhost';
};

218
ts/mod_services/index.ts Normal file
View File

@@ -0,0 +1,218 @@
import * as plugins from './mod.plugins.js';
import * as helpers from './helpers.js';
import { ServiceManager } from './classes.servicemanager.js';
export const run = async (argvArg: any) => {
const serviceManager = new ServiceManager();
await serviceManager.init();
const command = argvArg._[1] || 'help';
const service = argvArg._[2] || 'all';
switch (command) {
case 'start':
await handleStart(serviceManager, service);
break;
case 'stop':
await handleStop(serviceManager, service);
break;
case 'restart':
await handleRestart(serviceManager, service);
break;
case 'status':
await serviceManager.showStatus();
break;
case 'config':
await serviceManager.showConfig();
break;
case 'compass':
await serviceManager.showCompassConnection();
break;
case 'logs':
const lines = parseInt(argvArg._[3]) || 20;
await serviceManager.showLogs(service, lines);
break;
case 'remove':
await handleRemove(serviceManager);
break;
case 'clean':
await handleClean(serviceManager);
break;
case 'help':
default:
showHelp();
break;
}
};
async function handleStart(serviceManager: ServiceManager, service: string) {
helpers.printHeader('Starting Services');
switch (service) {
case 'mongo':
case 'mongodb':
await serviceManager.startMongoDB();
break;
case 'minio':
case 's3':
await serviceManager.startMinIO();
break;
case 'all':
case '':
await serviceManager.startMongoDB();
console.log();
await serviceManager.startMinIO();
break;
default:
helpers.printMessage(`Unknown service: ${service}`, 'red');
helpers.printMessage('Use: mongo, s3, or all', 'yellow');
break;
}
}
async function handleStop(serviceManager: ServiceManager, service: string) {
helpers.printHeader('Stopping Services');
switch (service) {
case 'mongo':
case 'mongodb':
await serviceManager.stopMongoDB();
break;
case 'minio':
case 's3':
await serviceManager.stopMinIO();
break;
case 'all':
case '':
await serviceManager.stopMongoDB();
console.log();
await serviceManager.stopMinIO();
break;
default:
helpers.printMessage(`Unknown service: ${service}`, 'red');
helpers.printMessage('Use: mongo, s3, or all', 'yellow');
break;
}
}
async function handleRestart(serviceManager: ServiceManager, service: string) {
helpers.printHeader('Restarting Services');
switch (service) {
case 'mongo':
case 'mongodb':
await serviceManager.stopMongoDB();
await plugins.smartdelay.delayFor(2000);
await serviceManager.startMongoDB();
break;
case 'minio':
case 's3':
await serviceManager.stopMinIO();
await plugins.smartdelay.delayFor(2000);
await serviceManager.startMinIO();
break;
case 'all':
case '':
await serviceManager.stopMongoDB();
await serviceManager.stopMinIO();
await plugins.smartdelay.delayFor(2000);
await serviceManager.startMongoDB();
console.log();
await serviceManager.startMinIO();
break;
default:
helpers.printMessage(`Unknown service: ${service}`, 'red');
break;
}
}
async function handleRemove(serviceManager: ServiceManager) {
helpers.printHeader('Removing Containers');
helpers.printMessage('⚠️ This will remove containers but preserve data', 'yellow');
const shouldContinue = await plugins.smartinteract.SmartInteract.getCliConfirmation('Continue?', false);
if (shouldContinue) {
await serviceManager.removeContainers();
} else {
helpers.printMessage('Cancelled', 'yellow');
}
}
async function handleClean(serviceManager: ServiceManager) {
helpers.printHeader('Clean All');
helpers.printMessage('⚠️ WARNING: This will remove all containers and data!', 'red');
helpers.printMessage('This action cannot be undone!', 'red');
const smartinteraction = new plugins.smartinteract.SmartInteract();
const confirmAnswer = await smartinteraction.askQuestion({
name: 'confirm',
type: 'input',
message: 'Type "yes" to confirm:',
default: 'no'
});
if (confirmAnswer.value === 'yes') {
await serviceManager.removeContainers();
console.log();
await serviceManager.cleanData();
helpers.printMessage('All cleaned ✓', 'green');
} else {
helpers.printMessage('Cancelled', 'yellow');
}
}
function showHelp() {
helpers.printHeader('GitZone Services Manager');
helpers.printMessage('Usage: gitzone services [command] [options]', 'green');
console.log();
helpers.printMessage('Commands:', 'yellow');
helpers.printMessage(' start [service] Start services (mongo|s3|all)', undefined);
helpers.printMessage(' stop [service] Stop services (mongo|s3|all)', undefined);
helpers.printMessage(' restart [service] Restart services (mongo|s3|all)', undefined);
helpers.printMessage(' status Show service status', undefined);
helpers.printMessage(' config Show current configuration', undefined);
helpers.printMessage(' compass Show MongoDB Compass connection string', undefined);
helpers.printMessage(' logs [service] Show logs (mongo|s3|all) [lines]', undefined);
helpers.printMessage(' remove Remove all containers', undefined);
helpers.printMessage(' clean Remove all containers and data ⚠️', undefined);
helpers.printMessage(' help Show this help message', undefined);
console.log();
helpers.printMessage('Features:', 'yellow');
helpers.printMessage(' • Auto-creates .nogit/env.json with smart defaults', undefined);
helpers.printMessage(' • Random ports (20000-30000) to avoid conflicts', undefined);
helpers.printMessage(' • Project-specific containers for multi-project support', undefined);
helpers.printMessage(' • Preserves custom configuration values', undefined);
helpers.printMessage(' • MongoDB Compass connection support', undefined);
console.log();
helpers.printMessage('Examples:', 'yellow');
helpers.printMessage(' gitzone services start # Start all services', undefined);
helpers.printMessage(' gitzone services start mongo # Start only MongoDB', undefined);
helpers.printMessage(' gitzone services stop # Stop all services', undefined);
helpers.printMessage(' gitzone services status # Check service status', undefined);
helpers.printMessage(' gitzone services config # Show configuration', undefined);
helpers.printMessage(' gitzone services compass # Get MongoDB Compass connection', undefined);
helpers.printMessage(' gitzone services logs mongo 50 # Show last 50 lines of MongoDB logs', undefined);
}

View File

@@ -0,0 +1,9 @@
export * from '../plugins.js';
import * as smartshell from '@push.rocks/smartshell';
import * as smartfile from '@push.rocks/smartfile';
import * as smartinteract from '@push.rocks/smartinteract';
import * as smartnetwork from '@push.rocks/smartnetwork';
import * as smartdelay from '@push.rocks/smartdelay';
export { smartshell, smartfile, smartinteract, smartnetwork, smartdelay };

View File

@@ -16,7 +16,9 @@ export let run = () => {
* create a new project with 'gitzone template [template]'
the following templates exist: ${(() => {
let projects = `\n`;
for (const template of plugins.smartfile.fs.listFoldersSync(paths.templatesDir)) {
for (const template of plugins.smartfile.fs.listFoldersSync(
paths.templatesDir,
)) {
projects += ` - ${template}\n`;
}
return projects;

View File

@@ -15,7 +15,9 @@ export const run = async (argvArg: any) => {
});
await smartshellInstance.execStrict(`cd ${paths.cwd} && git checkout master`);
await smartshellInstance.execStrict(`cd ${paths.cwd} && git pull origin master`);
await smartshellInstance.execStrict(
`cd ${paths.cwd} && git pull origin master`,
);
await smartshellInstance.execStrict(`cd ${paths.cwd} && npm ci`);
await provideNoGitFiles();

View File

@@ -16,7 +16,9 @@ export const isTemplate = async (templateNameArg: string) => {
export const getTemplate = async (templateNameArg: string) => {
if (isTemplate(templateNameArg)) {
const localScafTemplate = new plugins.smartscaf.ScafTemplate(getTemplatePath(templateNameArg));
const localScafTemplate = new plugins.smartscaf.ScafTemplate(
getTemplatePath(templateNameArg),
);
await localScafTemplate.readTemplateFromDir();
return localScafTemplate;
} else {
@@ -32,7 +34,8 @@ export const run = async (argvArg: any) => {
const answerBucket = await smartinteract.askQuestion({
type: 'list',
default: 'npm',
message: 'What template do you want to scaffold? (Only showing mpost common options)',
message:
'What template do you want to scaffold? (Only showing mpost common options)',
name: 'templateName',
choices: ['npm', 'service', 'wcc', 'website'],
});

View File

@@ -7,6 +7,11 @@ import * as smartcli from '@push.rocks/smartcli';
import * as smartpath from '@push.rocks/smartpath';
import * as smartpromise from '@push.rocks/smartpromise';
import * as smartupdate from '@push.rocks/smartupdate';
import * as smartshell from '@push.rocks/smartshell';
import * as smartnetwork from '@push.rocks/smartnetwork';
import * as smartfile from '@push.rocks/smartfile';
import * as smartinteract from '@push.rocks/smartinteract';
import * as smartdelay from '@push.rocks/smartdelay';
export {
smartlog,
@@ -18,4 +23,9 @@ export {
smartpath,
smartpromise,
smartupdate,
smartshell,
smartnetwork,
smartfile,
smartinteract,
smartdelay,
};

View File

@@ -10,7 +10,5 @@
"baseUrl": ".",
"paths": {}
},
"exclude": [
"dist_*/**/*.d.ts"
]
"exclude": ["dist_*/**/*.d.ts"]
}