Compare commits
23 Commits
Author | SHA1 | Date | |
---|---|---|---|
7b9ebfdacb | |||
05b170cbac | |||
b320af0b61 | |||
49e1ee1f39 | |||
cef31cf1ff | |||
74ecdde1ac | |||
74a8229e43 | |||
859cbc733d | |||
d32d47b706 | |||
fd90cfe895 | |||
c48f48fc8b | |||
e21e7f0850 | |||
5f561527f9 | |||
9f5f568c3f | |||
39a31a4304 | |||
b629a7d70b | |||
4003944139 | |||
83d374dffd | |||
49a5a66440 | |||
20a53d4d92 | |||
fe02b990b3 | |||
c013fbf42e | |||
949f273317 |
4
.gitignore
vendored
4
.gitignore
vendored
@@ -16,4 +16,6 @@ node_modules/
|
||||
dist/
|
||||
dist_*/
|
||||
|
||||
#------# custom
|
||||
#------# custom
|
||||
.serena
|
||||
test-output.json
|
||||
|
@@ -27,8 +27,8 @@ auditProductionDependencies:
|
||||
image: code.foss.global/hosttoday/ht-docker-node:npmci
|
||||
stage: security
|
||||
script:
|
||||
- npmci command npm config set registry https://registry.npmjs.org
|
||||
- npmci command pnpm audit --audit-level=high --prod
|
||||
- npmci command npm config set registry https://registry.npmjs.org
|
||||
- npmci command pnpm audit --audit-level=high --prod
|
||||
tags:
|
||||
- private
|
||||
- docker
|
||||
|
@@ -27,8 +27,8 @@ auditProductionDependencies:
|
||||
image: code.foss.global/hosttoday/ht-docker-node:npmci
|
||||
stage: security
|
||||
script:
|
||||
- npmci command npm config set registry https://registry.npmjs.org
|
||||
- npmci command pnpm audit --audit-level=high --prod
|
||||
- npmci command npm config set registry https://registry.npmjs.org
|
||||
- npmci command pnpm audit --audit-level=high --prod
|
||||
tags:
|
||||
- private
|
||||
- docker
|
||||
|
@@ -25,8 +25,8 @@ auditProductionDependencies:
|
||||
image: code.foss.global/hosttoday/ht-docker-node:npmci
|
||||
stage: security
|
||||
script:
|
||||
- npmci command npm config set registry https://registry.npmjs.org
|
||||
- npmci command pnpm audit --audit-level=high --prod
|
||||
- npmci command npm config set registry https://registry.npmjs.org
|
||||
- npmci command pnpm audit --audit-level=high --prod
|
||||
tags:
|
||||
- private
|
||||
- docker
|
||||
|
@@ -2,4 +2,3 @@ runafter:
|
||||
- git add -A && git commit -m initial
|
||||
- git push origin master
|
||||
- gitzone meta update
|
||||
|
174
changelog.md
174
changelog.md
@@ -1,12 +1,127 @@
|
||||
# Changelog
|
||||
|
||||
## 2025-08-15 - 1.17.1 - fix(services)
|
||||
Improve services module logging and enhance MongoDB Compass integration
|
||||
|
||||
- Refactored services module to use centralized logger from gitzone.logging.ts
|
||||
- Automatically display MongoDB Compass connection string when starting services or checking status
|
||||
- Removed custom printMessage wrapper in favor of standard logger.log() calls
|
||||
- Consistent logging across all service commands
|
||||
|
||||
## 2025-08-14 - 1.17.0 - feat(services)
|
||||
Add comprehensive development services management for MongoDB and MinIO containers
|
||||
|
||||
- Implemented `gitzone services` command for managing local development services
|
||||
- Added MongoDB and MinIO (S3-compatible) container orchestration
|
||||
- Smart port assignment (20000-30000 range) to avoid conflicts between projects
|
||||
- Project-specific container names for complete isolation
|
||||
- Data persistence in `.nogit/` directories
|
||||
- MongoDB Compass connection string generation with network IP detection
|
||||
- Auto-configuration via `.nogit/env.json` with secure defaults
|
||||
- Commands: start, stop, restart, status, config, compass, logs, remove, clean
|
||||
- Interactive confirmations for destructive operations
|
||||
|
||||
## 2025-08-08 - 1.16.10 - fix(format)
|
||||
Improve concurrency control in caching and rollback modules, refine gitignore custom section handling, and enhance Prettier file processing.
|
||||
|
||||
- Added mutex locking in ChangeCache and RollbackManager to prevent race conditions during manifest updates
|
||||
- Updated gitignore logic to detect and preserve custom sections
|
||||
- Enhanced Prettier batching and file formatting for better performance
|
||||
|
||||
## 2025-08-08 - 1.16.9 - fix(format)
|
||||
|
||||
Improve concurrency control in cache and rollback modules, refine gitignore custom section handling, and enhance Prettier file processing
|
||||
|
||||
- Added mutex locking in ChangeCache and RollbackManager to prevent race conditions during manifest updates
|
||||
- Updated gitignore logic to detect and preserve existing custom sections from various markers
|
||||
- Simplified Prettier formatter to process files sequentially, skip files without extensions, and log detailed status
|
||||
- Minor refactoring in base formatter and tsconfig file updates for improved reliability
|
||||
|
||||
## 2025-08-08 - 1.16.8 - fix(format)
|
||||
|
||||
Improve concurrency control in cache and rollback management with mutex locking and refine formatting details
|
||||
|
||||
- Added 'withMutex' functions in ChangeCache and RollbackManager to synchronize file I/O operations
|
||||
- Introduced static mutex maps to prevent race conditions during manifest updates
|
||||
- Fixed minor formatting issues in commit info and package.json
|
||||
|
||||
## 2025-08-08 - 1.16.7 - fix(core)
|
||||
|
||||
Improve formatting, logging, and rollback integrity in core modules
|
||||
|
||||
- Add .claude/settings.local.json with defined permissions for allowed commands
|
||||
- Standardize formatting in package.json, commit info, and configuration files
|
||||
- Refactor rollback manager to use atomic manifest writes and validate manifest structure
|
||||
- Enhance logging messages and overall code clarity in CLI and commit modules
|
||||
|
||||
## 2025-08-08 - 1.16.6 - fix(changecache)
|
||||
|
||||
Improve cache manifest validation and atomic file writes; add local settings and overrides
|
||||
|
||||
- Add manifest structure validation and default fallback in getManifest
|
||||
- Implement atomic write in saveManifest using a temporary file and rename strategy
|
||||
- Enhance error handling and cleanup for corrupted manifest files
|
||||
- Introduce new .claude/settings.local.json for project-specific permission configuration
|
||||
- Add an empty assets/overrides.json file for future overrides
|
||||
|
||||
## 2025-08-08 - 1.16.5 - fix(prettier)
|
||||
|
||||
Improve file selection in Prettier formatter, remove legacy package overrides, and update CI template indentation
|
||||
|
||||
- Added .claude/settings.local.json with updated permission settings for local commands
|
||||
- Removed unnecessary overrides from assets/overrides.json and cleared packageManager overrides in package.json
|
||||
- Adjusted CI template files (ci_default_gitlab, ci_default_private_gitlab, ci_docker_gitlab) for consistent indentation and formatting
|
||||
- Refined Prettier formatter logic by defining include directories, root config files, and filtering duplicates instead of manual exclusion
|
||||
|
||||
## 2025-08-08 - 1.16.4 - fix(prettier)
|
||||
|
||||
Improve file exclusion in the Prettier formatter to skip unnecessary files and directories.
|
||||
|
||||
- Added exclusion patterns for node_modules, .git, dist, .nogit, coverage, .nyc_output, vendor, bower_components, jspm_packages, and minified files.
|
||||
- Optimized filtering logic to ensure only valid files are processed.
|
||||
|
||||
## 2025-08-08 - 1.16.3 - fix(changecache/prettier)
|
||||
|
||||
Skip directories during file processing to prevent errors in changecache and prettier formatting
|
||||
|
||||
- Removed unnecessary await on synchronous file reads in changecache
|
||||
- Added directory checks in changecache to immediately skip directories
|
||||
- Filtered out directories in prettier formatter to avoid processing non-files
|
||||
|
||||
## 2025-08-07 - 1.16.2 - fix(format)
|
||||
|
||||
Fix format command confirmation prompt to correctly check user response
|
||||
|
||||
- Fixed bug where format command always showed "cancelled" even when user confirmed
|
||||
- Changed response check from `response.proceed` to `response.value` for SmartInteract compatibility
|
||||
|
||||
## 2025-08-04 - 1.16.1 - fix(package/config)
|
||||
|
||||
Move smartdiff dependency to runtime and add local bash permissions settings
|
||||
|
||||
- Moved '@push.rocks/smartdiff' from devDependencies to dependencies in package.json
|
||||
- Added .claude/settings.local.json with allowed bash commands (grep, mkdir, find, ls)
|
||||
|
||||
## 2025-05-19 - 1.16.0 - feat(format)
|
||||
|
||||
Enhance format module with rollback, diff reporting, and improved parallel execution
|
||||
|
||||
- Implemented rollback functionality with backup management and automatic rollback on error
|
||||
- Added CLI commands for rollback, listing backups, and cleaning old backups
|
||||
- Introduced DiffReporter for generating and displaying file diffs
|
||||
- Improved file change caching via ChangeCache and expanded dependency analysis for parallel execution
|
||||
- Updated logging to support verbose mode and enhanced user feedback
|
||||
- Updated package.json to include new dependency '@push.rocks/smartdiff'
|
||||
|
||||
## 2025-05-14 - 1.15.5 - fix(dependencies)
|
||||
|
||||
Update @git.zone/tsdoc to ^1.5.0 and @types/node to ^22.15.18
|
||||
|
||||
- Bumped @git.zone/tsdoc from ^1.4.5 to ^1.5.0
|
||||
- Bumped @types/node from ^22.15.17 to ^22.15.18
|
||||
|
||||
## 2025-05-13 - 1.15.4 - fix(package.json)
|
||||
|
||||
Update dependency versions: bump @git.zone/tsdoc, @push.rocks/lik, @push.rocks/smartlog, and @types/node to their latest releases
|
||||
|
||||
- Upgrade @git.zone/tsdoc from ^1.4.4 to ^1.4.5
|
||||
@@ -15,6 +130,7 @@ Update dependency versions: bump @git.zone/tsdoc, @push.rocks/lik, @push.rocks/s
|
||||
- Upgrade @types/node from ^22.14.1 to ^22.15.17
|
||||
|
||||
## 2025-04-15 - 1.15.3 - fix(deps)
|
||||
|
||||
update dependency versions and improve website template variable handling
|
||||
|
||||
- Bumped @git.zone/tsbuild from ^2.2.1 to ^2.3.2 and @types/node to ^22.14.1
|
||||
@@ -22,56 +138,65 @@ update dependency versions and improve website template variable handling
|
||||
- Refactored website template update to correctly supply variables with added logging
|
||||
|
||||
## 2025-04-15 - 1.15.2 - fix(website_update)
|
||||
|
||||
Await supplyVariables call in website update template
|
||||
|
||||
- Changed website template update to properly await the supplyVariables method
|
||||
- Ensured asynchronous consistency in updating website template variables
|
||||
|
||||
## 2025-04-15 - 1.15.1 - fix(cli)
|
||||
|
||||
Refresh internal CLI tooling and configuration for consistency.
|
||||
|
||||
|
||||
## 2025-04-15 - 1.15.0 - feat(config/template)
|
||||
|
||||
Add assetbrokerUrl and legalUrl fields to module config and update website template to supply these values
|
||||
|
||||
- Added assetbrokerUrl and legalUrl properties in ts/classes.gitzoneconfig.ts
|
||||
- Updated ts/mod_format/format.templates.ts to pass assetbrokerUrl and legalUrl to website template
|
||||
|
||||
## 2025-04-15 - 1.14.1 - fix(package.json)
|
||||
|
||||
Add packageManager field to specify pnpm version for consistent package management
|
||||
|
||||
- Inserted packageManager property in package.json with pnpm version info to ensure reproducible dependency installs
|
||||
|
||||
## 2025-04-15 - 1.14.0 - feat(tsconfig_update)
|
||||
|
||||
Add runafter directive to trigger gitzone format after tsconfig update
|
||||
|
||||
- Added runafter configuration in assets/templates/tsconfig_update/.smartscaf.yml to automate formatting task
|
||||
|
||||
## 2025-03-07 - 1.13.1 - fix(cli)
|
||||
|
||||
Improve commit message logging
|
||||
|
||||
- Updated logging to display recommended next commit details.
|
||||
- Enabled interactive prompt for choosing commit type and scope.
|
||||
|
||||
## 2025-02-28 - 1.13.0 - feat(templates)
|
||||
|
||||
Updated and added new TypeScript template files for npm projects
|
||||
|
||||
- Added new paths.ts and plugins.ts template files for npm projects.
|
||||
- Removed outdated some.plugins.ts template file.
|
||||
|
||||
## 2025-02-25 - 1.12.8 - fix(metadata)
|
||||
|
||||
Updated package and npmextra json description and keywords for enhanced development workflow clarity
|
||||
|
||||
- Updated the description in package.json to focus on project setup and management.
|
||||
- Aligned the keywords in both package.json and npmextra.json to include more relevant terms such as gitzone utilities, template management, and CI/CD.
|
||||
|
||||
## 2025-02-25 - 1.12.7 - fix(meta)
|
||||
|
||||
Fix issues in project metadata and configuration.
|
||||
|
||||
- Updated package metadata to ensure accurate project description and licensing.
|
||||
- Ensured npm access level configuration consistency within npmextra.json.
|
||||
|
||||
## 2025-02-25 - 1.12.7 - fix(ci)
|
||||
|
||||
Updated dependencies and added CI/CD workflows.
|
||||
|
||||
- Updated several dependencies in package.json for compatibility and security.
|
||||
@@ -80,6 +205,7 @@ Updated dependencies and added CI/CD workflows.
|
||||
- Ensured consistent formatting with Prettier and TypeScript configurations.
|
||||
|
||||
## 2025-01-29 - 1.12.6 - fix(project)
|
||||
|
||||
Minor fixes and cleanup
|
||||
|
||||
- Removed outdated pages/ directory entry in .gitignore.
|
||||
@@ -88,6 +214,7 @@ Minor fixes and cleanup
|
||||
- Fixed formatting issues across various TypeScript files.
|
||||
|
||||
## 2025-01-29 - 1.12.5 - fix(cli)
|
||||
|
||||
Initial implementation of CLI utility with project management features
|
||||
|
||||
- Integration of various plugins for logging, command-line interactions, and project management.
|
||||
@@ -95,34 +222,40 @@ Initial implementation of CLI utility with project management features
|
||||
- Implement commands for packaging, versioning, and deprecating npm packages.
|
||||
|
||||
## 2025-01-29 - 1.12.2 - fix(format)
|
||||
|
||||
Add overrides for peek-readable in package.json formatting
|
||||
|
||||
- Added a URL correction in the packageJson repository information.
|
||||
- Introduced support for pnpm overrides by including an `overrides.json` file.
|
||||
|
||||
## 2025-01-18 - 1.12.1 - fix(dependencies)
|
||||
|
||||
Update various package dependencies and Dockerfile base image
|
||||
|
||||
- Updated Dockerfile base image from 'alpinenpmci' to 'alpine_npmci'.
|
||||
- Upgraded @git.zone/tsbuild, @git.zone/tsrun, @git.zone/tsdoc, and other dependencies to their latest versions.
|
||||
|
||||
## 2025-01-17 - 1.12.0 - feat(build)
|
||||
|
||||
Update TypeScript configuration to support emit decorator metadata
|
||||
|
||||
- Added emitDecoratorMetadata to the tsconfig.json template in assets/templates/tsconfig_update.
|
||||
|
||||
## 2025-01-08 - 1.11.0 - feat(cli)
|
||||
|
||||
Add Docker command for cleaning up Docker system and extend deprecation command for multiple registries
|
||||
|
||||
- Added a new command 'docker' to handle Docker system cleanup operations.
|
||||
- Improved the 'deprecate' command to support deprecating packages across multiple npm registry URLs.
|
||||
|
||||
## 2025-01-01 - 1.10.10 - fix(templates)
|
||||
|
||||
Corrected typo in template file comment
|
||||
|
||||
- Fixed repeated comment in the template file for services under 'assets/templates/service/ts/some.plugins.ts'.
|
||||
|
||||
## 2025-01-01 - 1.10.9 - fix(templates)
|
||||
|
||||
Correct template file paths and organization for service projects
|
||||
|
||||
- Moved 'some.classes.some.ts' to 'classes.some.ts'
|
||||
@@ -130,60 +263,70 @@ Correct template file paths and organization for service projects
|
||||
- Resolved incorrect import paths in service templates
|
||||
|
||||
## 2025-01-01 - 1.10.8 - fix(assets/templates)
|
||||
|
||||
Update CI template configurations to use module.githost
|
||||
|
||||
- Replaced occurrences of {{git.host}} with {{module.githost}} in CI workflow files
|
||||
- Updated package dependencies for service template
|
||||
|
||||
## 2024-12-26 - 1.10.7 - fix(assets)
|
||||
|
||||
Correct URLs in templates and fix TypeScript declaration
|
||||
|
||||
- Updated incorrect URLs in Dockerfile templates to 'host.today'.
|
||||
- Fixed type declaration for 'TemplateResult' in header.ts file.
|
||||
|
||||
## 2024-12-08 - 1.10.6 - fix(ci)
|
||||
|
||||
Corrected Docker image URL in CI templates
|
||||
|
||||
- Updated Docker image URL from 'code.foss.global/hosttoday' to 'code.foss.global/host.today' in default_nottags.yaml and default_tags.yaml.
|
||||
- Adjusted gitignore template to include a custom section delineation.
|
||||
|
||||
## 2024-12-02 - 1.10.5 - fix(assets)
|
||||
|
||||
Update .gitignore template to remove pages directory
|
||||
|
||||
- Removed 'pages/' from the ignored directories in the .gitignore template.
|
||||
|
||||
## 2024-11-05 - 1.10.4 - fix(mod_format)
|
||||
|
||||
Correct file extension for TypeScript path configuration
|
||||
|
||||
- Fixed the TypeScript configuration to use correct file extensions for module subdirectories.
|
||||
|
||||
## 2024-10-27 - 1.10.3 - fix(mod_format)
|
||||
|
||||
Reorder TypeScript formatting steps in mod_format module
|
||||
|
||||
- Moved TypeScript configuration formatting earlier in the sequence for better logical consistency.
|
||||
|
||||
## 2024-10-27 - 1.10.2 - fix(format)
|
||||
|
||||
Add logging for tsconfig.json formatting
|
||||
|
||||
- Added an info log message for tsconfig.json formatting in format.tsconfig.ts.
|
||||
|
||||
## 2024-10-27 - 1.10.1 - fix(format)
|
||||
|
||||
Fixed async issue in tsconfig module lookup and corrected property access
|
||||
|
||||
|
||||
## 2024-10-27 - 1.10.0 - feat(mod_format)
|
||||
|
||||
Add support for tsconfig.json formatting
|
||||
|
||||
- Added a new script to format tsconfig.json.
|
||||
- Updated package.json to include `@git.zone/tspublish` as a dependency.
|
||||
|
||||
## 2024-10-23 - 1.9.126 - fix(format)
|
||||
|
||||
Remove redundant package.json property checks
|
||||
|
||||
- Removed property checks for `main`, `typings`, and `browserslist` from format.packagejson.ts
|
||||
- This change streamlines the formatting process by removing unnecessary exits
|
||||
|
||||
## 2024-09-29 - 1.9.125 - fix(cli)
|
||||
|
||||
Fix package version configuration and formatting issues
|
||||
|
||||
- Updated metadata fields in package.json (repository URL, bugs URL, and homepage).
|
||||
@@ -191,15 +334,17 @@ Fix package version configuration and formatting issues
|
||||
- Added missing Prettier default TypeScript and Markdown configurations.
|
||||
|
||||
## 2024-09-27 - 1.9.124 - fix(cli)
|
||||
|
||||
Ensured proper existence and initialization of readme files
|
||||
|
||||
- Ensured readme.md and readme.hints.md files are created and initialized if they do not exist.
|
||||
|
||||
## 2024-09-27 - 1.9.123 - fix(core)
|
||||
|
||||
No changes detected
|
||||
|
||||
|
||||
## 2024-09-27 - 1.9.123 - fix(core)
|
||||
|
||||
Update dependencies and improve build configurations
|
||||
|
||||
- Updated several dependencies in package.json for better compatibility
|
||||
@@ -210,88 +355,111 @@ Update dependencies and improve build configurations
|
||||
- Provided initial structure for readme and readme hints
|
||||
|
||||
## 2024-06-24 - 1.9.122 - fix(mod_commit)
|
||||
|
||||
Update package.json dependencies: @git.zone/tsdoc and @push.rocks/smartpromise to latest versions.
|
||||
|
||||
- - Updated @git.zone/tsdoc to ^1.3.12
|
||||
- - Updated @push.rocks/smartfile to ^11.0.21
|
||||
|
||||
## 2024-06-23 - 1.9.121 - fix(mod_commit)
|
||||
|
||||
Fix changelog template rendering by removing extra new line when no version details are provided.
|
||||
|
||||
- Update package.json dependencies: @git.zone/tsdoc and @push.rocks/smartpromise to latest versions.
|
||||
|
||||
## 2024-06-23 - 1.9.120 - fix(mod_commit)
|
||||
|
||||
Handle edge case for empty version details in changelog formatting
|
||||
|
||||
- Added check for the length of the recommendedNextVersionDetails array
|
||||
- Ensure no extra newline in changelog if there are no version details
|
||||
|
||||
## 2024-06-23 - 1.9.119 - fix(dependencies)
|
||||
|
||||
Update @git.zone/tsdoc to v1.3.8
|
||||
|
||||
- Updated @git.zone/tsdoc from v1.3.7 to v1.3.8 in package.json
|
||||
|
||||
## 2024-06-23 - 1.9.118 - fix(dependencies)
|
||||
|
||||
Update @git.zone/tsdoc to version 1.3.7
|
||||
|
||||
- Bump @git.zone/tsdoc from 1.3.6 to 1.3.7 in both package.json and pnpm-lock.yaml
|
||||
|
||||
## 2024-06-23 - 1.9.117 - fix(dependencies)
|
||||
|
||||
Update @git.zone/tsdoc dependency to v1.3.6
|
||||
|
||||
- Updated @git.zone/tsdoc version from 1.3.5 to 1.3.6 in package.json
|
||||
- Updated pnpm-lock.yaml to reflect the new version of @git.zone/tsdoc
|
||||
|
||||
## 2024-06-23 - 1.9.116 - fix(dependencies)
|
||||
|
||||
Update @git.zone/tsdoc to version 1.3.5
|
||||
|
||||
- Updated the @git.zone/tsdoc dependency in package.json and pnpm-lock.yaml from version 1.3.4 to 1.3.5
|
||||
- Removed the outdated changelog.md file.
|
||||
|
||||
## 2024-06-23 - 1.9.114 - fix(format)
|
||||
|
||||
Fixed formatting issues across multiple TypeScript files.
|
||||
|
||||
## 2024-06-23 - 1.9.113 - fix(mod_commit)
|
||||
|
||||
Remove extra new lines in changelog.
|
||||
|
||||
## 2024-06-23 - 1.9.112 - fix(core)
|
||||
|
||||
Update changelog formatting and remove outdated entries.
|
||||
|
||||
## 2024-06-23 - 1.9.111 - fix(changelog)
|
||||
|
||||
Remove outdated changelog entries and update formatting.
|
||||
|
||||
## 2024-06-23 - 1.9.110 - fix(dependencies)
|
||||
|
||||
Update @git.zone/tsdoc to version 1.3.4.
|
||||
|
||||
## 2024-06-23 - 1.9.109 - fix(changelog)
|
||||
|
||||
Remove outdated entries and adjust formatting in changelog.
|
||||
|
||||
## 2024-06-23 - 1.9.108 - fix(dependencies)
|
||||
|
||||
Update @git.zone/tsdoc dependency to version 1.3.2.
|
||||
|
||||
## 2024-06-23 - 1.9.107 - fix(changelog)
|
||||
|
||||
Remove placeholder entries and adjust formatting in changelog.
|
||||
|
||||
## 2024-06-23 - 1.9.106 - fix(dependencies)
|
||||
|
||||
Updated @git.zone/tsdoc from version 1.3.0 to 1.3.1.
|
||||
|
||||
## 2024-06-23 - 1.9.105 - fix(dependencies)
|
||||
|
||||
Updated @git.zone/tsdoc dependency from 1.2.2 to 1.3.0 in package.json and pnpm-lock.yaml.
|
||||
|
||||
## 2024-06-23 - 1.9.104 - fix(changelog)
|
||||
|
||||
Remove placeholder entries and adjust formatting in changelog.
|
||||
|
||||
## 2024-06-23 - 1.9.103 - fix(changelog)
|
||||
|
||||
Fix changelog to remove placeholder entries and adjust formatting.
|
||||
|
||||
## 2024-06-23 - 1.9.102 - fix(logging)
|
||||
|
||||
Optimize logger instantiation and configuration.
|
||||
|
||||
## 2024-06-23 - 1.9.101 - fix(metadata)
|
||||
|
||||
Ensure accurate project metadata in package.json.
|
||||
|
||||
## 2024-06-23 - 1.9.100 - fix(dependencies)
|
||||
|
||||
Updated @git.zone/tsdoc dependency version to ^1.2.2 in package.json and pnpm-lock.yaml.
|
||||
|
||||
## 2024-06-23 - 1.9.99 - fix(mod_commit)
|
||||
|
||||
Fix variable reassignment issue in changelog writing step.
|
||||
|
@@ -36,4 +36,4 @@
|
||||
"tsdoc": {
|
||||
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
16
package.json
16
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@git.zone/cli",
|
||||
"private": false,
|
||||
"version": "1.15.5",
|
||||
"version": "1.17.1",
|
||||
"description": "A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.",
|
||||
"main": "dist_ts/index.ts",
|
||||
"typings": "dist_ts/index.d.ts",
|
||||
@@ -60,6 +60,11 @@
|
||||
"@git.zone/tsbuild": "^2.3.2",
|
||||
"@git.zone/tsrun": "^1.3.3",
|
||||
"@git.zone/tstest": "^1.0.96",
|
||||
"@push.rocks/smartdelay": "^3.0.5",
|
||||
"@push.rocks/smartfile": "^11.2.0",
|
||||
"@push.rocks/smartinteract": "^2.0.16",
|
||||
"@push.rocks/smartnetwork": "^4.1.2",
|
||||
"@push.rocks/smartshell": "^3.2.3",
|
||||
"@types/node": "^22.15.18"
|
||||
},
|
||||
"dependencies": {
|
||||
@@ -73,10 +78,8 @@
|
||||
"@push.rocks/projectinfo": "^5.0.2",
|
||||
"@push.rocks/smartchok": "^1.0.34",
|
||||
"@push.rocks/smartcli": "^4.0.11",
|
||||
"@push.rocks/smartdelay": "^3.0.5",
|
||||
"@push.rocks/smartfile": "^11.2.0",
|
||||
"@push.rocks/smartdiff": "^1.0.3",
|
||||
"@push.rocks/smartgulp": "^3.0.4",
|
||||
"@push.rocks/smartinteract": "^2.0.15",
|
||||
"@push.rocks/smartjson": "^5.0.20",
|
||||
"@push.rocks/smartlegal": "^1.0.27",
|
||||
"@push.rocks/smartlog": "^3.0.9",
|
||||
@@ -88,7 +91,6 @@
|
||||
"@push.rocks/smartpath": "^5.0.18",
|
||||
"@push.rocks/smartpromise": "^4.2.3",
|
||||
"@push.rocks/smartscaf": "^4.0.16",
|
||||
"@push.rocks/smartshell": "^3.2.3",
|
||||
"@push.rocks/smartstream": "^3.2.5",
|
||||
"@push.rocks/smartunique": "^3.0.9",
|
||||
"@push.rocks/smartupdate": "^2.0.6",
|
||||
@@ -112,9 +114,7 @@
|
||||
"last 1 chrome versions"
|
||||
],
|
||||
"pnpm": {
|
||||
"overrides": {
|
||||
"peek-readable": "5.3.1"
|
||||
}
|
||||
"overrides": {}
|
||||
},
|
||||
"packageManager": "pnpm@10.7.0+sha512.6b865ad4b62a1d9842b61d674a393903b871d9244954f652b8842c2b553c72176b278f64c463e52d40fff8aba385c235c8c9ecf5cc7de4fd78b8bb6d49633ab6"
|
||||
}
|
||||
|
152
pnpm-lock.yaml
generated
152
pnpm-lock.yaml
generated
@@ -4,9 +4,6 @@ settings:
|
||||
autoInstallPeers: true
|
||||
excludeLinksFromLockfile: false
|
||||
|
||||
overrides:
|
||||
peek-readable: 5.3.1
|
||||
|
||||
importers:
|
||||
|
||||
.:
|
||||
@@ -41,18 +38,12 @@ importers:
|
||||
'@push.rocks/smartcli':
|
||||
specifier: ^4.0.11
|
||||
version: 4.0.11
|
||||
'@push.rocks/smartdelay':
|
||||
specifier: ^3.0.5
|
||||
version: 3.0.5
|
||||
'@push.rocks/smartfile':
|
||||
specifier: ^11.2.0
|
||||
version: 11.2.0
|
||||
'@push.rocks/smartdiff':
|
||||
specifier: ^1.0.3
|
||||
version: 1.0.3
|
||||
'@push.rocks/smartgulp':
|
||||
specifier: ^3.0.4
|
||||
version: 3.0.4
|
||||
'@push.rocks/smartinteract':
|
||||
specifier: ^2.0.15
|
||||
version: 2.0.16
|
||||
'@push.rocks/smartjson':
|
||||
specifier: ^5.0.20
|
||||
version: 5.0.20
|
||||
@@ -86,9 +77,6 @@ importers:
|
||||
'@push.rocks/smartscaf':
|
||||
specifier: ^4.0.16
|
||||
version: 4.0.16
|
||||
'@push.rocks/smartshell':
|
||||
specifier: ^3.2.3
|
||||
version: 3.2.3
|
||||
'@push.rocks/smartstream':
|
||||
specifier: ^3.2.5
|
||||
version: 3.2.5
|
||||
@@ -117,6 +105,21 @@ importers:
|
||||
'@git.zone/tstest':
|
||||
specifier: ^1.0.96
|
||||
version: 1.0.96(@aws-sdk/credential-providers@3.750.0)(socks@2.8.4)(typescript@5.8.3)
|
||||
'@push.rocks/smartdelay':
|
||||
specifier: ^3.0.5
|
||||
version: 3.0.5
|
||||
'@push.rocks/smartfile':
|
||||
specifier: ^11.2.0
|
||||
version: 11.2.0
|
||||
'@push.rocks/smartinteract':
|
||||
specifier: ^2.0.16
|
||||
version: 2.0.16
|
||||
'@push.rocks/smartnetwork':
|
||||
specifier: ^4.1.2
|
||||
version: 4.1.2
|
||||
'@push.rocks/smartshell':
|
||||
specifier: ^3.2.3
|
||||
version: 3.2.3
|
||||
'@types/node':
|
||||
specifier: ^22.15.18
|
||||
version: 22.15.18
|
||||
@@ -913,6 +916,9 @@ packages:
|
||||
'@push.rocks/smartdelay@3.0.5':
|
||||
resolution: {integrity: sha512-mUuI7kj2f7ztjpic96FvRIlf2RsKBa5arw81AHNsndbxO6asRcxuWL8dTVxouEIK8YsBUlj0AsrCkHhMbLQdHw==}
|
||||
|
||||
'@push.rocks/smartdiff@1.0.3':
|
||||
resolution: {integrity: sha512-cXUKj0KJBxnrZDN1Ztc2WiFRJM3vOTdQUdBfe6ar5NlKuXytSRMJqVL8IUbtWfMCSOx6HgWAUT7W68+/X2TG8w==}
|
||||
|
||||
'@push.rocks/smartenv@5.0.12':
|
||||
resolution: {integrity: sha512-tDEFwywzq0FNzRYc9qY2dRl2pgQuZG0G2/yml2RLWZWSW+Fn1EHshnKOGHz8o77W7zvu4hTgQQX42r/JY5XHTg==}
|
||||
|
||||
@@ -1000,6 +1006,9 @@ packages:
|
||||
'@push.rocks/smartnetwork@3.0.2':
|
||||
resolution: {integrity: sha512-s6CNGzQ1n/d/6cOKXbxeW6/tO//dr1woLqI01g7XhqTriw0nsm2G2kWaZh2J0VOguGNWBgQVCIpR0LjdRNWb3g==}
|
||||
|
||||
'@push.rocks/smartnetwork@4.1.2':
|
||||
resolution: {integrity: sha512-TjucG72ooHgzAUpNu2LAv4iFoettmZq2aEWhhzIa7AKcOvt4yxsk3Vl73guhKRohTfhdRauPcH5OHISLUHJbYA==}
|
||||
|
||||
'@push.rocks/smartnpm@2.0.4':
|
||||
resolution: {integrity: sha512-ljRPqnUsXzL5qnuAEt5POy0NnfKs7eYPuuJPJjYiK9VUdP/CyF4h14qTB4H816vNEuF7VU/ASRtz0qDlXmrztg==}
|
||||
|
||||
@@ -1018,6 +1027,9 @@ packages:
|
||||
'@push.rocks/smartpdf@3.2.2':
|
||||
resolution: {integrity: sha512-SKGNHz7HsgU6uVSVrRCL13kIeAFMvd4oQBLI3VmPcMkxXfWNPJkb6jKknqP8bhobWA/ryJS+3Dj///UELUvVKQ==}
|
||||
|
||||
'@push.rocks/smartping@1.0.8':
|
||||
resolution: {integrity: sha512-Fvx1Db6hSsDOI6pdiCuS9GjtOX8ugx865YQrPg5vK2iw6Qj/srwyXcWLFYt+19WVKtvtWDJIAKbW+q3bXFsCeA==}
|
||||
|
||||
'@push.rocks/smartpnpm@1.0.6':
|
||||
resolution: {integrity: sha512-AD0U4n53LBdBnj9MXAMF7cAqjyE0j3xbTH7Bd1v5ywjt3aFOJockAwDBOP+3dEK1QUHM17p+VP9HdX1faTCtzw==}
|
||||
|
||||
@@ -1576,6 +1588,9 @@ packages:
|
||||
'@types/default-gateway@3.0.1':
|
||||
resolution: {integrity: sha512-tpu0hp+AOIzwdAHyZPzLE5pCf9uT0pb+xZ76T4S7MrY2YTVq918Q7Q2VQ3KCVQqYxM7nxuCK/SL3X97jBEIeKQ==}
|
||||
|
||||
'@types/default-gateway@7.2.2':
|
||||
resolution: {integrity: sha512-35C93fYQlnLKLASkMPoxRvok4fENwB3By9clRLd2I/08n/XRl0pCdf7EB17K5oMMwZu8NBYA8i66jH5r/LYBKA==}
|
||||
|
||||
'@types/diff@5.2.3':
|
||||
resolution: {integrity: sha512-K0Oqlrq3kQMaO2RhfrNQX5trmt+XLyom88zS0u84nnIcLvFnRUMRRHmrGny5GSM+kNO9IZLARsdQHDzkhAgmrQ==}
|
||||
|
||||
@@ -2146,6 +2161,10 @@ packages:
|
||||
resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
clone-regexp@3.0.0:
|
||||
resolution: {integrity: sha512-ujdnoq2Kxb8s3ItNBtnYeXdm07FcU0u8ARAT1lQ2YdMwQC+cdiXX8KoqMVuglztILivceTtp4ivqGSmEmhBUJw==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
clone@2.1.2:
|
||||
resolution: {integrity: sha1-G39Ln1kfHo+DZwQBYANFoCiHQ18=}
|
||||
engines: {node: '>=0.8'}
|
||||
@@ -2211,6 +2230,10 @@ packages:
|
||||
resolution: {integrity: sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==}
|
||||
engines: {node: '>= 0.6'}
|
||||
|
||||
convert-hrtime@5.0.0:
|
||||
resolution: {integrity: sha512-lOETlkIeYSJWcbbcvjRKGxVMXJR+8+OQb/mTPbA4ObPMytYIsUbuOE0Jzy60hjARYszq1id0j8KgVhC+WGZVTg==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
convert-source-map@2.0.0:
|
||||
resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==}
|
||||
|
||||
@@ -2622,6 +2645,9 @@ packages:
|
||||
fast-deep-equal@3.1.3:
|
||||
resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==}
|
||||
|
||||
fast-diff@1.3.0:
|
||||
resolution: {integrity: sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==}
|
||||
|
||||
fast-fifo@1.3.2:
|
||||
resolution: {integrity: sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==}
|
||||
|
||||
@@ -2787,6 +2813,10 @@ packages:
|
||||
function-bind@1.1.2:
|
||||
resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==}
|
||||
|
||||
function-timeout@0.1.1:
|
||||
resolution: {integrity: sha512-0NVVC0TaP7dSTvn1yMiy6d6Q8gifzbvQafO46RtLG/kHJUBNd+pVRGOBoK44wNBvtSPUJRfdVvkFdD3p0xvyZg==}
|
||||
engines: {node: '>=14.16'}
|
||||
|
||||
get-caller-file@2.0.5:
|
||||
resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==}
|
||||
engines: {node: 6.* || 8.* || >= 10.*}
|
||||
@@ -2861,6 +2891,10 @@ packages:
|
||||
resolution: {integrity: sha512-mThBblvlAF1d4O5oqyvN+ZxLAYwIJK7bpMxgYqPD9okW0C3qm5FFn7k811QrcuEBwaogR3ngOFoCfs6mRv7teQ==}
|
||||
engines: {node: '>=14.16'}
|
||||
|
||||
got@13.0.0:
|
||||
resolution: {integrity: sha512-XfBk1CxOOScDcMr9O1yKkNaQyy865NbYs+F7dr4H0LZMVgCj2Le59k6PqbNHoL5ToeaEQUYh6c6yMfVcc6SJxA==}
|
||||
engines: {node: '>=16'}
|
||||
|
||||
gpt-tokenizer@2.9.0:
|
||||
resolution: {integrity: sha512-YSpexBL/k4bfliAzMrRqn3M6+it02LutVyhVpDeMKrC/O9+pCe/5s8U2hYKa2vFLD5/vHhsKc8sOn/qGqII8Kg==}
|
||||
|
||||
@@ -3112,6 +3146,10 @@ packages:
|
||||
resolution: {integrity: sha512-4B4XA2HEIm/PY+OSpeMBXr8pGWBYbXuHgjMAqrwbLO3CPTCAd9ArEJzBUKGZtk9viY6+aSfadGnWyjY3ydYZkw==}
|
||||
engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0}
|
||||
|
||||
is-ip@5.0.1:
|
||||
resolution: {integrity: sha512-FCsGHdlrOnZQcp0+XT5a+pYowf33itBalCl+7ovNXC/7o5BhIpG14M3OrpPPdBSIQJCm+0M5+9mO7S9VVTTCFw==}
|
||||
engines: {node: '>=14.16'}
|
||||
|
||||
is-nan@1.3.2:
|
||||
resolution: {integrity: sha512-E+zBKpQ2t6MEo1VsonYmluk9NxGrbzpeeLC2xIViuO2EjU2xsXsBPwTr3Ykv9l08UYEVEdWeRZNouaZqF6RN0w==}
|
||||
engines: {node: '>= 0.4'}
|
||||
@@ -3136,6 +3174,10 @@ packages:
|
||||
resolution: {integrity: sha512-MjYsKHO5O7mCsmRGxWcLWheFqN9DJ/2TmngvjKXihe6efViPqc274+Fx/4fYj/r03+ESvBdTXK0V6tA3rgez1g==}
|
||||
engines: {node: '>= 0.4'}
|
||||
|
||||
is-regexp@3.1.0:
|
||||
resolution: {integrity: sha512-rbku49cWloU5bSMI+zaRaXdQHXnthP6DZ/vLnfdSKyL4zUzuWnomtOEiZZOd+ioQ+avFo/qau3KPTc7Fjy1uPA==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
is-stream@2.0.1:
|
||||
resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==}
|
||||
engines: {node: '>=8'}
|
||||
@@ -4082,6 +4124,10 @@ packages:
|
||||
resolution: {integrity: sha512-+6bkjnf0yQ4+tZV0zJv1017DiIF7y6R4yg17Mrhhkc25L7dtQtXWHgSCrz9BbLL4OeTFbPK4EALXqJUrwCIWXw==}
|
||||
engines: {node: '>=14.16'}
|
||||
|
||||
public-ip@7.0.1:
|
||||
resolution: {integrity: sha512-DdNcqcIbI0wEeCBcqX+bmZpUCvrDMJHXE553zgyG1MZ8S1a/iCCxmK9iTjjql+SpHSv4cZkmRv5/zGYW93AlCw==}
|
||||
engines: {node: '>=18'}
|
||||
|
||||
pump@2.0.1:
|
||||
resolution: {integrity: sha512-ruPMNRkN3MHP1cWJc9OWr+T/xDP0jhXYCLfJcBuX54hhfIBnaQmAUMfDcG4DM5UMWByBbJY69QSphm3jtDKIkA==}
|
||||
|
||||
@@ -4517,6 +4563,10 @@ packages:
|
||||
stubborn-fs@1.2.5:
|
||||
resolution: {integrity: sha512-H2N9c26eXjzL/S/K+i/RHHcFanE74dptvvjM8iwzwbVcWY/zjBbgRqF3K0DY4+OD+uTTASTBvDoxPDaPN02D7g==}
|
||||
|
||||
super-regex@0.2.0:
|
||||
resolution: {integrity: sha512-WZzIx3rC1CvbMDloLsVw0lkZVKJWbrkJ0k1ghKFmcnPrW1+jWbgTkTEWVtD9lMdmI4jZEz40+naBxl1dCUhXXw==}
|
||||
engines: {node: '>=14.16'}
|
||||
|
||||
supports-color@5.5.0:
|
||||
resolution: {integrity: sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==}
|
||||
engines: {node: '>=4'}
|
||||
@@ -4572,6 +4622,10 @@ packages:
|
||||
through2@4.0.2:
|
||||
resolution: {integrity: sha512-iOqSav00cVxEEICeD7TjLB1sueEL+81Wpzp2bY17uZjZN0pWZPuo4suZ/61VujxmqSGFfgOcNuTZ85QJwNZQpw==}
|
||||
|
||||
time-span@5.1.0:
|
||||
resolution: {integrity: sha512-75voc/9G4rDIJleOo4jPvN4/YC4GRZrY8yy1uU4lwrB3XEQbWve8zXoO5No4eFrGcTAMYyoY67p8jRQdtA1HbA==}
|
||||
engines: {node: '>=12'}
|
||||
|
||||
tiny-worker@2.3.0:
|
||||
resolution: {integrity: sha512-pJ70wq5EAqTAEl9IkGzA+fN0836rycEuz2Cn6yeZ6FRzlVS5IDOkFHpIoEsksPRQV34GDqXm65+OlnZqUSyK2g==}
|
||||
|
||||
@@ -6405,6 +6459,10 @@ snapshots:
|
||||
dependencies:
|
||||
'@push.rocks/smartpromise': 4.2.3
|
||||
|
||||
'@push.rocks/smartdiff@1.0.3':
|
||||
dependencies:
|
||||
fast-diff: 1.3.0
|
||||
|
||||
'@push.rocks/smartenv@5.0.12':
|
||||
dependencies:
|
||||
'@push.rocks/smartpromise': 4.2.3
|
||||
@@ -6637,6 +6695,16 @@ snapshots:
|
||||
public-ip: 6.0.2
|
||||
systeminformation: 5.25.11
|
||||
|
||||
'@push.rocks/smartnetwork@4.1.2':
|
||||
dependencies:
|
||||
'@push.rocks/smartping': 1.0.8
|
||||
'@push.rocks/smartpromise': 4.2.3
|
||||
'@push.rocks/smartstring': 4.0.15
|
||||
'@types/default-gateway': 7.2.2
|
||||
isopen: 1.3.0
|
||||
public-ip: 7.0.1
|
||||
systeminformation: 5.25.11
|
||||
|
||||
'@push.rocks/smartnpm@2.0.4':
|
||||
dependencies:
|
||||
'@push.rocks/consolecolor': 2.0.2
|
||||
@@ -6694,6 +6762,11 @@ snapshots:
|
||||
- typescript
|
||||
- utf-8-validate
|
||||
|
||||
'@push.rocks/smartping@1.0.8':
|
||||
dependencies:
|
||||
'@types/ping': 0.4.4
|
||||
ping: 0.4.4
|
||||
|
||||
'@push.rocks/smartpnpm@1.0.6':
|
||||
dependencies:
|
||||
'@push.rocks/smartshell': 3.2.3
|
||||
@@ -7657,6 +7730,8 @@ snapshots:
|
||||
|
||||
'@types/default-gateway@3.0.1': {}
|
||||
|
||||
'@types/default-gateway@7.2.2': {}
|
||||
|
||||
'@types/diff@5.2.3': {}
|
||||
|
||||
'@types/express-serve-static-core@4.19.6':
|
||||
@@ -8313,6 +8388,10 @@ snapshots:
|
||||
strip-ansi: 6.0.1
|
||||
wrap-ansi: 7.0.0
|
||||
|
||||
clone-regexp@3.0.0:
|
||||
dependencies:
|
||||
is-regexp: 3.1.0
|
||||
|
||||
clone@2.1.2: {}
|
||||
|
||||
co-body@6.2.0:
|
||||
@@ -8377,6 +8456,8 @@ snapshots:
|
||||
|
||||
content-type@1.0.5: {}
|
||||
|
||||
convert-hrtime@5.0.0: {}
|
||||
|
||||
convert-source-map@2.0.0: {}
|
||||
|
||||
cookie-signature@1.0.6: {}
|
||||
@@ -8812,6 +8893,8 @@ snapshots:
|
||||
|
||||
fast-deep-equal@3.1.3: {}
|
||||
|
||||
fast-diff@1.3.0: {}
|
||||
|
||||
fast-fifo@1.3.2: {}
|
||||
|
||||
fast-glob@3.3.3:
|
||||
@@ -8993,6 +9076,8 @@ snapshots:
|
||||
|
||||
function-bind@1.1.2: {}
|
||||
|
||||
function-timeout@0.1.1: {}
|
||||
|
||||
get-caller-file@2.0.5: {}
|
||||
|
||||
get-east-asian-width@1.3.0: {}
|
||||
@@ -9108,6 +9193,20 @@ snapshots:
|
||||
p-cancelable: 3.0.0
|
||||
responselike: 3.0.0
|
||||
|
||||
got@13.0.0:
|
||||
dependencies:
|
||||
'@sindresorhus/is': 5.6.0
|
||||
'@szmarczak/http-timer': 5.0.1
|
||||
cacheable-lookup: 7.0.0
|
||||
cacheable-request: 10.2.14
|
||||
decompress-response: 6.0.0
|
||||
form-data-encoder: 2.1.4
|
||||
get-stream: 6.0.1
|
||||
http2-wrapper: 2.2.1
|
||||
lowercase-keys: 3.0.0
|
||||
p-cancelable: 3.0.0
|
||||
responselike: 3.0.0
|
||||
|
||||
gpt-tokenizer@2.9.0: {}
|
||||
|
||||
graceful-fs@4.2.10: {}
|
||||
@@ -9378,6 +9477,11 @@ snapshots:
|
||||
dependencies:
|
||||
ip-regex: 5.0.0
|
||||
|
||||
is-ip@5.0.1:
|
||||
dependencies:
|
||||
ip-regex: 5.0.0
|
||||
super-regex: 0.2.0
|
||||
|
||||
is-nan@1.3.2:
|
||||
dependencies:
|
||||
call-bind: 1.0.7
|
||||
@@ -9398,6 +9502,8 @@ snapshots:
|
||||
has-tostringtag: 1.0.2
|
||||
hasown: 2.0.2
|
||||
|
||||
is-regexp@3.1.0: {}
|
||||
|
||||
is-stream@2.0.1: {}
|
||||
|
||||
is-stream@4.0.1: {}
|
||||
@@ -10536,6 +10642,12 @@ snapshots:
|
||||
got: 12.6.1
|
||||
is-ip: 4.0.0
|
||||
|
||||
public-ip@7.0.1:
|
||||
dependencies:
|
||||
dns-socket: 4.2.2
|
||||
got: 13.0.0
|
||||
is-ip: 5.0.1
|
||||
|
||||
pump@2.0.1:
|
||||
dependencies:
|
||||
end-of-stream: 1.4.4
|
||||
@@ -11087,6 +11199,12 @@ snapshots:
|
||||
|
||||
stubborn-fs@1.2.5: {}
|
||||
|
||||
super-regex@0.2.0:
|
||||
dependencies:
|
||||
clone-regexp: 3.0.0
|
||||
function-timeout: 0.1.1
|
||||
time-span: 5.1.0
|
||||
|
||||
supports-color@5.5.0:
|
||||
dependencies:
|
||||
has-flag: 3.0.0
|
||||
@@ -11168,6 +11286,10 @@ snapshots:
|
||||
dependencies:
|
||||
readable-stream: 3.6.2
|
||||
|
||||
time-span@5.1.0:
|
||||
dependencies:
|
||||
convert-hrtime: 5.0.0
|
||||
|
||||
tiny-worker@2.3.0:
|
||||
dependencies:
|
||||
esm: 3.2.25
|
||||
|
194
readme.hints.md
194
readme.hints.md
@@ -1 +1,193 @@
|
||||
* the cli of the git.zone project.
|
||||
# Gitzone CLI - Development Hints
|
||||
|
||||
- the cli of the git.zone project.
|
||||
|
||||
## Project Overview
|
||||
|
||||
Gitzone CLI (`@git.zone/cli`) is a comprehensive toolbelt for streamlining local development cycles. It provides utilities for:
|
||||
|
||||
- Project initialization and templating (via smartscaf)
|
||||
- Code formatting and standardization
|
||||
- Version control and commit management
|
||||
- Docker and CI/CD integration
|
||||
- Meta project management
|
||||
|
||||
## Architecture
|
||||
|
||||
### Core Structure
|
||||
|
||||
- Main CLI entry: `cli.ts` / `cli.child.ts`
|
||||
- Modular architecture with separate modules in `ts/mod_*` directories
|
||||
- Each module handles specific functionality (format, commit, docker, etc.)
|
||||
- Extensive use of plugins pattern via `plugins.ts` files
|
||||
|
||||
### Configuration Management
|
||||
|
||||
- Uses `npmextra.json` for all tool configuration
|
||||
- Configuration stored under `gitzone` key in npmextra
|
||||
- No separate `.gitzonerc` file - everything in npmextra.json
|
||||
- Project type and module metadata also stored in npmextra
|
||||
|
||||
### Format Module (`mod_format`) - SIGNIFICANTLY ENHANCED
|
||||
|
||||
The format module is responsible for project standardization:
|
||||
|
||||
#### Current Modules:
|
||||
|
||||
1. **cleanup** - Removes obsolete files (yarn.lock, tslint.json, etc.)
|
||||
2. **copy** - File copying with glob patterns (fully implemented)
|
||||
3. **gitignore** - Creates/updates .gitignore from templates
|
||||
4. **license** - Checks dependency licenses for compatibility
|
||||
5. **npmextra** - Manages project metadata and configuration
|
||||
6. **packagejson** - Formats and updates package.json
|
||||
7. **prettier** - Applies code formatting with batching
|
||||
8. **readme** - Ensures readme files exist
|
||||
9. **templates** - Updates project templates based on type
|
||||
10. **tsconfig** - Formats TypeScript configuration
|
||||
|
||||
#### Execution Order (Dependency-Based):
|
||||
|
||||
- Modules are now executed in parallel groups based on dependencies
|
||||
- Independent modules run concurrently for better performance
|
||||
- Dependency analyzer ensures correct execution order
|
||||
|
||||
### New Architecture Features
|
||||
|
||||
1. **BaseFormatter Pattern**: All formatters extend abstract BaseFormatter class
|
||||
2. **FormatContext**: Central state management across all modules
|
||||
3. **FormatPlanner**: Implements plan → action workflow
|
||||
4. **RollbackManager**: Full backup/restore capabilities
|
||||
5. **ChangeCache**: Tracks file changes to optimize performance
|
||||
6. **DependencyAnalyzer**: Manages module execution order
|
||||
7. **DiffReporter**: Generates diff views for changes
|
||||
8. **FormatStats**: Comprehensive execution statistics
|
||||
|
||||
### Key Patterns
|
||||
|
||||
1. **Plugin Architecture**: All dependencies imported through `plugins.ts` files
|
||||
2. **Streaming**: Uses smartstream for file processing
|
||||
3. **Interactive Prompts**: smartinteract for user input
|
||||
4. **Enhanced Error Handling**: Comprehensive try-catch with automatic rollback
|
||||
5. **Template System**: Templates handled by smartscaf, not directly by gitzone
|
||||
6. **Type Safety**: Full TypeScript with interfaces and type definitions
|
||||
|
||||
### Important Notes
|
||||
|
||||
- `.nogit/` directory used for temporary/untracked files, backups, and cache
|
||||
- `.nogit/gitzone-backups/` stores format operation backups
|
||||
- `.nogit/gitzone-cache/` stores file change cache
|
||||
- Templates are managed by smartscaf - improvements should be made there
|
||||
- License checking configurable with exceptions support
|
||||
- All features implemented: `ensureDependency`, copy module, etc.
|
||||
|
||||
## Recent Improvements (Completed)
|
||||
|
||||
1. **Plan → Action Workflow**: Shows changes before applying them
|
||||
2. **Rollback Mechanism**: Full backup and restore on failures
|
||||
3. **Enhanced Configuration**: Granular control via npmextra.json
|
||||
4. **Better Error Handling**: Detailed errors with recovery options
|
||||
5. **Performance Optimizations**: Parallel execution and caching
|
||||
6. **Reporting**: Diff views, statistics, verbose logging
|
||||
7. **Architecture**: Clean separation of concerns with new classes
|
||||
|
||||
## Development Tips
|
||||
|
||||
- Always check readme.plan.md for ongoing improvement plans
|
||||
- Use npmextra.json for any new configuration options
|
||||
- Keep modules focused and single-purpose
|
||||
- Maintain the existing plugin pattern for dependencies
|
||||
- Test format operations on sample projects before deploying
|
||||
- Consider backward compatibility when changing configuration structure
|
||||
- Use BaseFormatter pattern for new format modules
|
||||
- Leverage FormatContext for cross-module state sharing
|
||||
|
||||
## Configuration Examples
|
||||
|
||||
```json
|
||||
{
|
||||
"gitzone": {
|
||||
"format": {
|
||||
"interactive": true,
|
||||
"parallel": true,
|
||||
"showStats": true,
|
||||
"cache": {
|
||||
"enabled": true,
|
||||
"clean": true
|
||||
},
|
||||
"rollback": {
|
||||
"enabled": true,
|
||||
"autoRollbackOnError": true,
|
||||
"backupRetentionDays": 7
|
||||
},
|
||||
"modules": {
|
||||
"skip": ["prettier"],
|
||||
"only": [],
|
||||
"order": []
|
||||
},
|
||||
"licenses": {
|
||||
"allowed": ["MIT", "Apache-2.0"],
|
||||
"exceptions": {
|
||||
"some-package": "GPL-3.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## CLI Usage
|
||||
|
||||
```bash
|
||||
# Basic format
|
||||
gitzone format
|
||||
|
||||
# Dry run to preview changes
|
||||
gitzone format --dry-run
|
||||
|
||||
# Non-interactive mode
|
||||
gitzone format --yes
|
||||
|
||||
# Plan only (no execution)
|
||||
gitzone format --plan-only
|
||||
|
||||
# Save plan for later
|
||||
gitzone format --save-plan format.json
|
||||
|
||||
# Execute saved plan
|
||||
gitzone format --from-plan format.json
|
||||
|
||||
# Verbose mode
|
||||
gitzone format --verbose
|
||||
|
||||
# Detailed diff views
|
||||
gitzone format --detailed
|
||||
|
||||
# Rollback operations
|
||||
gitzone format --rollback
|
||||
gitzone format --rollback <operation-id>
|
||||
gitzone format --list-backups
|
||||
gitzone format --clean-backups
|
||||
```
|
||||
|
||||
## Common Issues (Now Resolved)
|
||||
|
||||
1. ✅ Format operations are now reversible with rollback
|
||||
2. ✅ Enhanced error messages with recovery suggestions
|
||||
3. ✅ All modules fully implemented (including copy)
|
||||
4. ✅ Dry-run capability available
|
||||
5. ✅ Extensive configuration options available
|
||||
|
||||
## Future Considerations
|
||||
|
||||
- Plugin system for custom formatters
|
||||
- Git hooks integration for pre-commit formatting
|
||||
- Advanced UI with interactive configuration
|
||||
- Format presets for common scenarios
|
||||
- Performance benchmarking tools
|
||||
|
||||
## API Changes
|
||||
|
||||
- smartfile API updated to use fs._ and memory._ namespaces
|
||||
- smartnpm requires instance creation: `new NpmRegistry()`
|
||||
- All file operations now use updated APIs
|
||||
- Type imports use `import type` for proper verbatim module syntax
|
||||
|
549
readme.md
549
readme.md
@@ -1,159 +1,528 @@
|
||||
# @git.zone/cli
|
||||
# @git.zone/cli 🚀
|
||||
|
||||
A CLI toolbelt to streamline local development cycles by utilizing various gitzone utilities.
|
||||
**The ultimate CLI toolbelt for modern TypeScript development workflows**
|
||||
|
||||
## Install
|
||||
[](https://www.npmjs.com/package/@git.zone/cli)
|
||||
[](https://opensource.org/licenses/MIT)
|
||||
|
||||
To begin using `@git.zone/cli`, ensure that Node.js and npm are installed on your system. Once they are set up, install the CLI tool globally with the following command:
|
||||
## 🎯 What is gitzone?
|
||||
|
||||
```shell
|
||||
gitzone is a powerful command-line interface that supercharges your development workflow with automated project management, intelligent code formatting, seamless version control, and development service orchestration. Whether you're bootstrapping a new TypeScript project, maintaining code quality, managing complex multi-repository setups, or spinning up local development databases, gitzone has got you covered.
|
||||
|
||||
## 🏃♂️ Quick Start
|
||||
|
||||
### Installation
|
||||
|
||||
```bash
|
||||
# Install globally via npm
|
||||
npm install -g @git.zone/cli
|
||||
|
||||
# Or with pnpm (recommended)
|
||||
pnpm add -g @git.zone/cli
|
||||
```
|
||||
|
||||
This command makes `gitzone` or `gzone` accessible from any directory on your system.
|
||||
Once installed, you can use either `gitzone` or the shorter `gzone` command from anywhere in your terminal.
|
||||
|
||||
## Usage
|
||||
### Your First Commands
|
||||
|
||||
The `gitzone` CLI provides a comprehensive set of utilities to optimize and manage the development lifecycle of projects, ranging from project initialization to version control, code formatting, and deployment processes. This usage guide will walk you through the available commands, their use cases, and provide detailed examples.
|
||||
```bash
|
||||
# Create a new TypeScript npm package
|
||||
gitzone template npm
|
||||
|
||||
### Initializing a New Project
|
||||
# Format your entire codebase
|
||||
gitzone format
|
||||
|
||||
To bootstrap a new project aligned with modern TypeScript practices, use gitzone's template command. Choose from a range of templates tailored for different project types:
|
||||
# Start local MongoDB and MinIO services
|
||||
gitzone services start
|
||||
|
||||
```shell
|
||||
gitzone template [templatename]
|
||||
```
|
||||
|
||||
Replace `[templatename]` with any of the following:
|
||||
- `npm`: Starts an npm module with TypeScript, complete with test and CI/CD integration.
|
||||
- `website`: Initializes a LitElement-based frontend application with features such as e2e testing and service worker setup.
|
||||
- `element`: Prepares a new standard setup for creating reusable web components with LitElement.
|
||||
|
||||
**Example: Starting a New npm Project**
|
||||
1. Navigate to your desired directory in the terminal.
|
||||
2. Execute:
|
||||
```shell
|
||||
gitzone template npm
|
||||
```
|
||||
3. Respond to the prompts to customize the project settings. This includes setting up essential metadata such as name and repository information.
|
||||
|
||||
### Committing Changes
|
||||
|
||||
Standardizing commit messages and versionning with gitzone:
|
||||
|
||||
```shell
|
||||
# Create a semantic commit
|
||||
gitzone commit
|
||||
```
|
||||
|
||||
This command provides an interactive prompt to create a conventional commit message, which will be automatically versioned based on the significance of the changes like `fix`, `feat`, or `BREAKING CHANGE`.
|
||||
## 🛠️ Core Features
|
||||
|
||||
### Handling Package Deprecation
|
||||
### 🐳 Development Services Management (NEW!)
|
||||
|
||||
Replace an outdated package with a new one for your team and users:
|
||||
Effortlessly manage local MongoDB and MinIO (S3-compatible) services for your development environment:
|
||||
|
||||
```shell
|
||||
gitzone deprecate
|
||||
```bash
|
||||
gitzone services [command]
|
||||
```
|
||||
|
||||
The utility collects information about old and new package names through interactive prompts, and proceeds with deprecation notices in npm, guiding users towards the updated package.
|
||||
**Available commands:**
|
||||
|
||||
### CI/CD Utilities
|
||||
- **`start [service]`** - Start services (mongo|s3|all)
|
||||
- **`stop [service]`** - Stop services (mongo|s3|all)
|
||||
- **`restart [service]`** - Restart services
|
||||
- **`status`** - Show current service status
|
||||
- **`config`** - Display configuration details
|
||||
- **`compass`** - Get MongoDB Compass connection string with network IP
|
||||
- **`logs [service] [lines]`** - View service logs
|
||||
- **`remove`** - Remove containers (preserves data)
|
||||
- **`clean`** - Remove containers AND data (⚠️ destructive)
|
||||
|
||||
Access your project’s CI/CD settings instantly:
|
||||
**Key features:**
|
||||
|
||||
```shell
|
||||
gitzone open ci
|
||||
- 🎲 **Smart port assignment** - Automatically assigns random ports (20000-30000) to avoid conflicts
|
||||
- 📦 **Project isolation** - Each project gets its own containers with unique names
|
||||
- 💾 **Data persistence** - Data stored in `.nogit/` directories survives container restarts
|
||||
- 🔗 **MongoDB Compass support** - Instantly get connection strings for GUI access
|
||||
- 🌐 **Network IP detection** - Automatically detects your local network IP for remote connections
|
||||
- ⚙️ **Auto-configuration** - Creates `.nogit/env.json` with smart defaults
|
||||
|
||||
**Example workflow:**
|
||||
|
||||
```bash
|
||||
# Start all services for your project
|
||||
gitzone services start
|
||||
|
||||
# Check what's running
|
||||
gitzone services status
|
||||
|
||||
# Get MongoDB Compass connection string
|
||||
gitzone services compass
|
||||
# Output: mongodb://defaultadmin:defaultpass@192.168.1.100:27018/myproject?authSource=admin
|
||||
|
||||
# View MongoDB logs
|
||||
gitzone services logs mongo 50
|
||||
|
||||
# Stop services when done
|
||||
gitzone services stop
|
||||
```
|
||||
|
||||
This command opens the settings page of the repository’s CI/CD configuration directly from your CLI, thus swiftly transitioning from local tasks to cloud configurations.
|
||||
The services are configured via `.nogit/env.json` which is automatically created with secure defaults and random ports for each project.
|
||||
|
||||
### Maintaining Project Cleanliness
|
||||
### 📦 Project Templates
|
||||
|
||||
For consistent code formatting across your project:
|
||||
Instantly scaffold production-ready projects with best practices built-in:
|
||||
|
||||
```shell
|
||||
gitzone format
|
||||
```bash
|
||||
gitzone template [template-name]
|
||||
```
|
||||
|
||||
This command applies uniform formatting styles using Prettier, ensuring your JS/TS and markdown files align with the project's style guide.
|
||||
**Available templates:**
|
||||
|
||||
### Lifecycle Management Commands
|
||||
- **`npm`** - TypeScript npm package with testing, CI/CD, and full tooling
|
||||
- **`service`** - Microservice architecture with Docker support
|
||||
- **`website`** - Modern web application with LitElement and service workers
|
||||
- **`wcc`** - Web Component Collection for reusable UI components
|
||||
|
||||
#### Clean Builds
|
||||
Each template comes pre-configured with:
|
||||
|
||||
Remove generated files and prepare for a fresh build:
|
||||
- ✅ TypeScript with modern configurations
|
||||
- ✅ Automated testing setup
|
||||
- ✅ CI/CD pipelines (GitLab/GitHub)
|
||||
- ✅ Code formatting and linting
|
||||
- ✅ Documentation structure
|
||||
|
||||
```shell
|
||||
gitzone clean
|
||||
### 🎨 Intelligent Code Formatting
|
||||
|
||||
The most powerful feature of gitzone - automatically format and standardize your entire codebase:
|
||||
|
||||
```bash
|
||||
# Preview changes without applying them
|
||||
gitzone format --dry-run
|
||||
|
||||
# Format with automatic approval
|
||||
gitzone format --yes
|
||||
|
||||
# Save formatting plan for later execution
|
||||
gitzone format --save-plan format-plan.json
|
||||
|
||||
# Execute a saved plan
|
||||
gitzone format --from-plan format-plan.json
|
||||
|
||||
# Enable verbose output for debugging
|
||||
gitzone format --verbose
|
||||
```
|
||||
|
||||
This utility cleans up build artifacts from your project directory so that you can run a fresh build without residual clutter affecting outcomes.
|
||||
**Format features:**
|
||||
|
||||
#### Sync and meta Configuration
|
||||
- 🔄 **Smart caching** - Only processes changed files
|
||||
- 🛡️ **Rollback support** - Undo formatting changes if needed
|
||||
- 📊 **Detailed reporting** - See exactly what changed
|
||||
- ⚡ **Parallel execution** - Format multiple files simultaneously
|
||||
- 🎯 **Module-specific formatting** - Target specific formatters
|
||||
|
||||
To manage metadata that guides your project configurations and module syncing:
|
||||
**Rollback capabilities:**
|
||||
|
||||
```shell
|
||||
```bash
|
||||
# List all available backups
|
||||
gitzone format --list-backups
|
||||
|
||||
# Rollback to the last operation
|
||||
gitzone format --rollback
|
||||
|
||||
# Rollback to a specific operation
|
||||
gitzone format --rollback [operation-id]
|
||||
|
||||
# Clean old backups
|
||||
gitzone format --clean-backups
|
||||
```
|
||||
|
||||
**Formatters included:**
|
||||
|
||||
- **Prettier** - JavaScript/TypeScript code formatting
|
||||
- **License** - Ensure proper licensing
|
||||
- **Package.json** - Standardize package configurations
|
||||
- **Tsconfig** - TypeScript configuration optimization
|
||||
- **Readme** - Documentation formatting
|
||||
- **Gitignore** - Repository ignore rules
|
||||
- **Templates** - Project template updates
|
||||
- **Npmextra** - Extended npm configurations
|
||||
|
||||
### 🔀 Semantic Commits & Versioning
|
||||
|
||||
Create standardized commits that automatically handle versioning:
|
||||
|
||||
```bash
|
||||
gitzone commit
|
||||
```
|
||||
|
||||
Features:
|
||||
|
||||
- 📝 Interactive commit message builder
|
||||
- 🏷️ Automatic version bumping (major/minor/patch)
|
||||
- 📜 Changelog generation
|
||||
- 🚀 Optional auto-push to origin
|
||||
- 🎯 Conventional commit compliance
|
||||
|
||||
The commit wizard guides you through:
|
||||
|
||||
1. **Type selection** (feat/fix/docs/style/refactor/perf/test/chore)
|
||||
2. **Scope definition** (component/module affected)
|
||||
3. **Description crafting**
|
||||
4. **Breaking change detection**
|
||||
5. **Version bump determination**
|
||||
|
||||
### 🏗️ Meta Repository Management
|
||||
|
||||
Manage multiple related repositories as a cohesive unit:
|
||||
|
||||
```bash
|
||||
# Initialize a meta repository
|
||||
gitzone meta init
|
||||
|
||||
# Add a sub-project
|
||||
gitzone meta add [name] [git-url]
|
||||
|
||||
# Update all sub-projects
|
||||
gitzone meta update
|
||||
|
||||
# Remove a sub-project
|
||||
gitzone meta remove [name]
|
||||
```
|
||||
|
||||
This initialization sets the groundwork for supporting gitzone projects by helping you create a `.meta.json` file that tracks subprojects.
|
||||
Perfect for:
|
||||
|
||||
### Advanced Operations
|
||||
- Monorepo management
|
||||
- Multi-package projects
|
||||
- Coordinated deployments
|
||||
- Synchronized versioning
|
||||
|
||||
#### Cloud and Docker Integrations
|
||||
### 🐳 Docker Management
|
||||
|
||||
For projects integrating cloud deployment or containerization:
|
||||
Streamline your Docker workflow:
|
||||
|
||||
```shell
|
||||
```bash
|
||||
# Clean up all Docker resources
|
||||
gitzone docker prune
|
||||
```
|
||||
|
||||
This utility manages Docker resources by cleaning stale volumes, containers, and images, ensuring efficient resource use.
|
||||
This command removes:
|
||||
|
||||
### Detailed Use Cases
|
||||
- Stopped containers
|
||||
- Unused images
|
||||
- Dangling volumes
|
||||
- Unused networks
|
||||
|
||||
#### **Project Setup and Template Management**
|
||||
### 🔗 Quick CI/CD Access
|
||||
|
||||
Whether creating an npm module or a web component, the command `gitzone template [templatename]` invokes smart scaffolding processes that automatically set up a robust project environment equipped with modern tooling like TypeScript, Prettier configurations, and testing frameworks.
|
||||
Jump directly to your CI/CD configurations:
|
||||
|
||||
#### **Commit Management**
|
||||
```bash
|
||||
# Open CI/CD settings
|
||||
gitzone open ci
|
||||
|
||||
The `gitzone commit` command assures that your commit messages complement semantic versioning practices. It proposes version bumps based on detected changes and helps maintain an accurate project change log.
|
||||
|
||||
- **Interactive Commit:** Users engage in dialogues to define commit type (`fix`, `feat`, `BREAKING CHANGE`), scope, and description, resulting in uniform and meaningful commit logs.
|
||||
- **Automatic Versioning:** After message crafting, the versioning is attended by bumps similar to npm’s `npm version` with stages such as `major`, `minor`, and `patch`.
|
||||
|
||||
#### **Formatting and Configurations**
|
||||
|
||||
A consistent format yields a professional and readable codebase:
|
||||
```shell
|
||||
gitzone format
|
||||
# Open pipelines view
|
||||
gitzone open pipelines
|
||||
```
|
||||
|
||||
- **Prettier Integration:** Formats not only TypeScript and JavaScript files but also documents such as `readme.md`, maintaining consistency in both code and documentation style.
|
||||
Works with GitLab repositories to provide instant access to your deployment configurations.
|
||||
|
||||
#### **Synced Meta Projects**
|
||||
### 📝 Package Deprecation
|
||||
|
||||
Effectively supervise interconnected repositories or module collections:
|
||||
- **Meta Initialization (`meta init`)**: Lay down a foundation for a meta repository that orchestrates multiple projects by overseeing them through a centralized metadata file.
|
||||
- **Subproject Operations:** Commands such as `meta update`, `meta add`, `meta remove` enable dynamic management of subprojects, ensuring they remain updated with current features and aligned to the meta repository's standards.
|
||||
Smoothly transition users from old to new packages:
|
||||
|
||||
#### **CI/CD Integration**
|
||||
```bash
|
||||
gitzone deprecate
|
||||
```
|
||||
|
||||
Directly linking your local development processes to continuous integration settings with `gitzone open ci` provides seamless switches between code and configuration in cloud-based environments.
|
||||
Interactive wizard for:
|
||||
|
||||
#### **Real-time Docker Management**
|
||||
- Setting deprecation notices
|
||||
- Guiding users to replacements
|
||||
- Updating registry metadata
|
||||
- Coordinating migration paths
|
||||
|
||||
For developers employing Docker for service deployments, `gitzone docker prune` offers a quick remedy to residual services, optimizing local/docker-storage for a more streamlined experience in container management.
|
||||
### 🚦 Project Initialization
|
||||
|
||||
### Leveraging gitzone
|
||||
Prepare existing projects for development:
|
||||
|
||||
By mastering the `gitzone` CLI commands, you gain unparalleled efficiency in managing your projects from conception to deployment. Encompassing a wide gamut of functionalities, from initial project scaffolding to synchronization of modularized repositories and beyond, `gitzone` serves as a linchpin for developers intent on optimizing their workflow. Understanding and utilizing its full potential catalyzes development cycles, granting the freedom to focus on innovation, collaboration, and delivering outstanding software products.
|
||||
```bash
|
||||
gitzone start
|
||||
```
|
||||
|
||||
Automatically:
|
||||
|
||||
- Checks out master branch
|
||||
- Pulls latest changes
|
||||
- Installs dependencies
|
||||
- Sets up development environment
|
||||
|
||||
### 🔧 Helper Utilities
|
||||
|
||||
Quick utilities for common tasks:
|
||||
|
||||
```bash
|
||||
# Generate a unique short ID
|
||||
gitzone helpers shortid
|
||||
```
|
||||
|
||||
## 📋 Configuration
|
||||
|
||||
### npmextra.json Configuration
|
||||
|
||||
Customize gitzone behavior through `npmextra.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"gitzone": {
|
||||
"format": {
|
||||
"interactive": true,
|
||||
"showDiffs": false,
|
||||
"autoApprove": false,
|
||||
"parallel": true,
|
||||
"rollback": {
|
||||
"enabled": true,
|
||||
"autoRollbackOnError": true,
|
||||
"backupRetentionDays": 7
|
||||
},
|
||||
"modules": {
|
||||
"skip": ["prettier"],
|
||||
"only": [],
|
||||
"order": []
|
||||
},
|
||||
"cache": {
|
||||
"enabled": true,
|
||||
"clean": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Environment Variables
|
||||
|
||||
- `CI` - Detect CI environment for automated workflows
|
||||
- `DEBUG` - Enable debug output
|
||||
- `GITZONE_FORMAT_PARALLEL` - Control parallel formatting
|
||||
|
||||
## 🏆 Best Practices
|
||||
|
||||
### For New Projects
|
||||
|
||||
1. Start with a template: `gitzone template npm`
|
||||
2. Set up local services: `gitzone services start`
|
||||
3. Customize the generated structure
|
||||
4. Run initial format: `gitzone format`
|
||||
5. Set up CI/CD: `gitzone open ci`
|
||||
|
||||
### For Existing Projects
|
||||
|
||||
1. Initialize: `gitzone start`
|
||||
2. Format codebase: `gitzone format --dry-run` (preview first!)
|
||||
3. Apply formatting: `gitzone format --yes`
|
||||
4. Set up services: `gitzone services start`
|
||||
5. Commit changes: `gitzone commit`
|
||||
|
||||
### For Teams
|
||||
|
||||
1. Document format preferences in `npmextra.json`
|
||||
2. Share `.nogit/env.json` template for consistent service setup
|
||||
3. Use `--save-plan` for reviewable format changes
|
||||
4. Enable rollback for safety
|
||||
5. Standardize commit conventions
|
||||
|
||||
## 🎯 Common Workflows
|
||||
|
||||
### Full-Stack Development Cycle
|
||||
|
||||
```bash
|
||||
# 1. Start fresh
|
||||
gitzone start
|
||||
|
||||
# 2. Spin up databases and services
|
||||
gitzone services start
|
||||
|
||||
# 3. Make changes
|
||||
# ... your development work ...
|
||||
|
||||
# 4. Check service logs if needed
|
||||
gitzone services logs mongo
|
||||
|
||||
# 5. Format code
|
||||
gitzone format
|
||||
|
||||
# 6. Commit with semantic versioning
|
||||
gitzone commit
|
||||
|
||||
# 7. Stop services when done
|
||||
gitzone services stop
|
||||
```
|
||||
|
||||
### Multi-Repository Management
|
||||
|
||||
```bash
|
||||
# 1. Set up meta repository
|
||||
gitzone meta init
|
||||
|
||||
# 2. Add all related projects
|
||||
gitzone meta add frontend https://github.com/org/frontend.git
|
||||
gitzone meta add backend https://github.com/org/backend.git
|
||||
gitzone meta add shared https://github.com/org/shared.git
|
||||
|
||||
# 3. Synchronize updates
|
||||
gitzone meta update
|
||||
```
|
||||
|
||||
### Safe Formatting with Rollback
|
||||
|
||||
```bash
|
||||
# 1. Preview changes
|
||||
gitzone format --dry-run
|
||||
|
||||
# 2. Save plan for review
|
||||
gitzone format --save-plan format-changes.json
|
||||
|
||||
# 3. Apply formatting
|
||||
gitzone format --from-plan format-changes.json
|
||||
|
||||
# 4. If something goes wrong, rollback
|
||||
gitzone format --rollback
|
||||
```
|
||||
|
||||
### Database-Driven Development
|
||||
|
||||
```bash
|
||||
# 1. Start MongoDB and MinIO
|
||||
gitzone services start
|
||||
|
||||
# 2. Get connection string for your app
|
||||
gitzone services config
|
||||
|
||||
# 3. Connect with MongoDB Compass
|
||||
gitzone services compass
|
||||
|
||||
# 4. Monitor services
|
||||
gitzone services status
|
||||
|
||||
# 5. Clean everything when done
|
||||
gitzone services clean # ⚠️ Warning: deletes data
|
||||
```
|
||||
|
||||
## 🔌 Integrations
|
||||
|
||||
### CI/CD Platforms
|
||||
|
||||
- **GitLab CI** - Full pipeline support with templates
|
||||
- **GitHub Actions** - Automated workflows
|
||||
- **Docker** - Container-based deployments
|
||||
|
||||
### Development Tools
|
||||
|
||||
- **TypeScript** - First-class support
|
||||
- **Prettier** - Code formatting
|
||||
- **ESLint** - Linting (via format modules)
|
||||
- **npm/pnpm** - Package management
|
||||
- **MongoDB** - Local database service
|
||||
- **MinIO** - S3-compatible object storage
|
||||
- **MongoDB Compass** - Database GUI integration
|
||||
|
||||
### Version Control
|
||||
|
||||
- **Git** - Deep integration
|
||||
- **Semantic Versioning** - Automatic version bumping
|
||||
- **Conventional Commits** - Standardized commit messages
|
||||
|
||||
## 💡 Pro Tips
|
||||
|
||||
1. **Use aliases**: Add `alias gz='gitzone'` to your shell profile
|
||||
2. **Combine commands**: `gitzone format --yes && gitzone commit`
|
||||
3. **Leverage templates**: Start projects right with proven structures
|
||||
4. **Enable caching**: Dramatically speeds up formatting operations
|
||||
5. **Save format plans**: Review changes before applying in production
|
||||
6. **Port management**: Let services auto-assign ports to avoid conflicts
|
||||
7. **Use MongoDB Compass**: `gitzone services compass` for visual DB management
|
||||
|
||||
## 🐛 Troubleshooting
|
||||
|
||||
### Format Command Shows "Cancelled"
|
||||
|
||||
If the format command shows cancelled even after confirming:
|
||||
|
||||
- Check your `npmextra.json` configuration
|
||||
- Try with `--yes` flag to skip confirmation
|
||||
- Use `--verbose` for detailed output
|
||||
|
||||
### Docker Commands Fail
|
||||
|
||||
Ensure Docker daemon is running:
|
||||
|
||||
```bash
|
||||
docker info
|
||||
```
|
||||
|
||||
### Services Won't Start
|
||||
|
||||
Check for port conflicts:
|
||||
|
||||
```bash
|
||||
# Services auto-assign ports, but you can check the config
|
||||
cat .nogit/env.json
|
||||
|
||||
# Verify Docker is running
|
||||
docker ps
|
||||
```
|
||||
|
||||
### Template Creation Issues
|
||||
|
||||
Verify npm/pnpm is properly configured:
|
||||
|
||||
```bash
|
||||
npm config get registry
|
||||
```
|
||||
|
||||
### MongoDB Connection Issues
|
||||
|
||||
- Ensure services are running: `gitzone services status`
|
||||
- Check firewall settings for the assigned ports
|
||||
- Use `gitzone services compass` for the correct connection string
|
||||
|
||||
## 📈 Performance
|
||||
|
||||
gitzone is optimized for speed:
|
||||
|
||||
- **Parallel processing** for format operations
|
||||
- **Smart caching** to avoid redundant work
|
||||
- **Incremental updates** for meta repositories
|
||||
- **Minimal dependencies** for fast installation
|
||||
- **Isolated services** prevent resource conflicts
|
||||
- **Auto port assignment** eliminates manual configuration
|
||||
|
||||
## License and Legal Information
|
||||
|
||||
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
||||
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
||||
|
||||
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||
|
||||
@@ -168,4 +537,4 @@ Registered at District court Bremen HRB 35230 HB, Germany
|
||||
|
||||
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
||||
|
||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
121
readme.plan.md
Normal file
121
readme.plan.md
Normal file
@@ -0,0 +1,121 @@
|
||||
# GitZone Services Command Implementation Plan
|
||||
|
||||
## Overview
|
||||
Implement the `gitzone services` command to manage MongoDB and MinIO containers for development projects.
|
||||
|
||||
## Tasks
|
||||
|
||||
### Module Structure Setup
|
||||
- [x] Create `ts/mod_services/` directory
|
||||
- [x] Create `mod.plugins.ts` with required imports
|
||||
- [x] Create `helpers.ts` with utility functions
|
||||
- [x] Create `classes.serviceconfiguration.ts` for config handling
|
||||
- [x] Create `classes.dockercontainer.ts` for Docker operations
|
||||
- [x] Create `classes.servicemanager.ts` for service management
|
||||
- [x] Create `index.ts` with main command logic
|
||||
|
||||
### Core Functionality
|
||||
- [x] Implement ServiceConfiguration class
|
||||
- [x] Load/create `.nogit/env.json` configuration
|
||||
- [x] Generate random available ports (20000-30000 range)
|
||||
- [x] Preserve existing custom values
|
||||
- [x] Provide default values for missing fields
|
||||
|
||||
- [x] Implement DockerContainer class
|
||||
- [x] Check container status
|
||||
- [x] Start/stop/restart containers
|
||||
- [x] Execute Docker commands
|
||||
- [x] Handle container logs
|
||||
- [x] Manage volumes and port bindings
|
||||
|
||||
- [x] Implement ServiceManager class
|
||||
- [x] Manage MongoDB containers
|
||||
- [x] Manage MinIO containers
|
||||
- [x] Handle container lifecycle
|
||||
- [x] Generate project-specific container names
|
||||
- [x] Manage data directories in `.nogit/`
|
||||
- [x] Generate MongoDB Compass connection strings
|
||||
|
||||
### Commands Implementation
|
||||
- [x] `start` command - Start services (mongo|s3|all)
|
||||
- [x] `stop` command - Stop services (mongo|s3|all)
|
||||
- [x] `restart` command - Restart services (mongo|s3|all)
|
||||
- [x] `status` command - Show service status
|
||||
- [x] `config` command - Show current configuration
|
||||
- [x] `compass` command - Show MongoDB Compass connection string
|
||||
- [x] `logs` command - Show service logs with line count
|
||||
- [x] `remove` command - Remove containers (preserve data)
|
||||
- [x] `clean` command - Remove containers and data
|
||||
|
||||
### Integration
|
||||
- [x] Add `@push.rocks/smartshell` to main plugins.ts
|
||||
- [x] Add `@push.rocks/smartnetwork` to main plugins.ts
|
||||
- [x] Add `@push.rocks/smartinteraction` to main plugins.ts
|
||||
- [x] Register services command in `gitzone.cli.ts`
|
||||
|
||||
### Features
|
||||
- [x] Auto-configuration with smart defaults
|
||||
- [x] Random port assignment to avoid conflicts
|
||||
- [x] Project isolation with unique container names
|
||||
- [x] Data persistence in `.nogit/` directories
|
||||
- [x] Status display (running/stopped/not installed)
|
||||
- [x] Interactive confirmations for destructive operations
|
||||
- [x] Colored console output
|
||||
- [x] MinIO bucket auto-creation
|
||||
- [x] MongoDB Compass connection string with network IP
|
||||
|
||||
### Testing
|
||||
- [ ] Test service start/stop operations
|
||||
- [ ] Test configuration creation and updates
|
||||
- [ ] Test port collision handling
|
||||
- [ ] Test data persistence
|
||||
- [ ] Test MongoDB Compass connection string generation
|
||||
- [ ] Test all command variations
|
||||
|
||||
## Configuration Format
|
||||
```json
|
||||
{
|
||||
"PROJECT_NAME": "derived-from-package-name",
|
||||
"MONGODB_HOST": "localhost",
|
||||
"MONGODB_NAME": "project-name",
|
||||
"MONGODB_PORT": "random-port",
|
||||
"MONGODB_USER": "defaultadmin",
|
||||
"MONGODB_PASS": "defaultpass",
|
||||
"S3_HOST": "localhost",
|
||||
"S3_PORT": "random-port",
|
||||
"S3_CONSOLE_PORT": "s3-port+1",
|
||||
"S3_USER": "defaultadmin",
|
||||
"S3_PASS": "defaultpass",
|
||||
"S3_BUCKET": "project-name-documents"
|
||||
}
|
||||
```
|
||||
|
||||
## Command Examples
|
||||
```bash
|
||||
gitzone services start # Start all services
|
||||
gitzone services start mongo # Start only MongoDB
|
||||
gitzone services stop # Stop all services
|
||||
gitzone services status # Check service status
|
||||
gitzone services config # Show configuration
|
||||
gitzone services compass # Show MongoDB Compass connection string
|
||||
gitzone services logs mongo 50 # Show last 50 lines of MongoDB logs
|
||||
gitzone services remove # Remove containers (preserve data)
|
||||
gitzone services clean # Remove containers and data
|
||||
```
|
||||
|
||||
## Progress Notes
|
||||
Implementation started: 2025-08-14
|
||||
Implementation completed: 2025-08-14
|
||||
|
||||
## Summary
|
||||
Successfully implemented the `gitzone services` command in TypeScript, providing a complete replacement for the `services.sh` shell script. The implementation includes:
|
||||
|
||||
1. **Complete Docker service management** for MongoDB and MinIO containers
|
||||
2. **Smart configuration management** with automatic port assignment and conflict avoidance
|
||||
3. **MongoDB Compass support** with network IP detection for remote connections
|
||||
4. **Project isolation** using project-specific container names
|
||||
5. **Data persistence** in `.nogit/` directories
|
||||
6. **Interactive confirmations** for destructive operations
|
||||
7. **Comprehensive command set** including start, stop, restart, status, config, compass, logs, remove, and clean commands
|
||||
|
||||
The module is fully integrated into the gitzone CLI and ready for testing.
|
@@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@git.zone/cli',
|
||||
version: '1.15.5',
|
||||
version: '1.16.10',
|
||||
description: 'A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.'
|
||||
}
|
||||
|
@@ -40,7 +40,9 @@ export class GitzoneConfig {
|
||||
public async readConfigFromCwd() {
|
||||
const npmextraInstance = new plugins.npmextra.Npmextra(paths.cwd);
|
||||
this.data = npmextraInstance.dataFor<IGitzoneConfigData>('gitzone', {});
|
||||
this.data.npmciOptions = npmextraInstance.dataFor<IGitzoneConfigData['npmciOptions']>('npmci', {
|
||||
this.data.npmciOptions = npmextraInstance.dataFor<
|
||||
IGitzoneConfigData['npmciOptions']
|
||||
>('npmci', {
|
||||
npmAccessLevel: 'public',
|
||||
});
|
||||
}
|
||||
|
@@ -62,7 +62,35 @@ export let run = async () => {
|
||||
gitzoneSmartcli.addCommand('format').subscribe(async (argvArg) => {
|
||||
const config = GitzoneConfig.fromCwd();
|
||||
const modFormat = await import('./mod_format/index.js');
|
||||
await modFormat.run();
|
||||
|
||||
// Handle rollback commands
|
||||
if (argvArg.rollback) {
|
||||
await modFormat.handleRollback(argvArg.rollback);
|
||||
return;
|
||||
}
|
||||
|
||||
if (argvArg['list-backups']) {
|
||||
await modFormat.handleListBackups();
|
||||
return;
|
||||
}
|
||||
|
||||
if (argvArg['clean-backups']) {
|
||||
await modFormat.handleCleanBackups();
|
||||
return;
|
||||
}
|
||||
|
||||
// Handle format with options
|
||||
await modFormat.run({
|
||||
dryRun: argvArg['dry-run'],
|
||||
yes: argvArg.yes,
|
||||
planOnly: argvArg['plan-only'],
|
||||
savePlan: argvArg['save-plan'],
|
||||
fromPlan: argvArg['from-plan'],
|
||||
detailed: argvArg.detailed,
|
||||
interactive: argvArg.interactive !== false,
|
||||
parallel: argvArg.parallel !== false,
|
||||
verbose: argvArg.verbose,
|
||||
});
|
||||
});
|
||||
|
||||
/**
|
||||
@@ -103,6 +131,14 @@ export let run = async () => {
|
||||
modHelpers.run(argvArg);
|
||||
});
|
||||
|
||||
/**
|
||||
* manage development services (MongoDB, S3/MinIO)
|
||||
*/
|
||||
gitzoneSmartcli.addCommand('services').subscribe(async (argvArg) => {
|
||||
const modServices = await import('./mod_services/index.js');
|
||||
await modServices.run(argvArg);
|
||||
});
|
||||
|
||||
// start parsing of the cli
|
||||
gitzoneSmartcli.startParse();
|
||||
return await done.promise;
|
||||
|
@@ -1,6 +1,29 @@
|
||||
import { commitinfo } from '@push.rocks/commitinfo';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
// Create logger instance
|
||||
export const logger = plugins.smartlog.Smartlog.createForCommitinfo(commitinfo);
|
||||
|
||||
logger.addLogDestination(new plugins.smartlogDestinationLocal.DestinationLocal());
|
||||
// Add console destination
|
||||
const consoleDestination =
|
||||
new plugins.smartlogDestinationLocal.DestinationLocal();
|
||||
logger.addLogDestination(consoleDestination);
|
||||
|
||||
// Verbose logging helper
|
||||
let verboseMode = false;
|
||||
|
||||
export const setVerboseMode = (verbose: boolean): void => {
|
||||
verboseMode = verbose;
|
||||
logger.log('info', `Verbose mode ${verbose ? 'enabled' : 'disabled'}`);
|
||||
};
|
||||
|
||||
export const isVerboseMode = (): boolean => {
|
||||
return verboseMode;
|
||||
};
|
||||
|
||||
// Custom log method with verbose support
|
||||
export const logVerbose = (message: string): void => {
|
||||
if (verboseMode) {
|
||||
logger.log('info', `[VERBOSE] ${message}`);
|
||||
}
|
||||
};
|
||||
|
@@ -10,20 +10,22 @@ export const run = async (argvArg: any) => {
|
||||
await formatMod.run();
|
||||
}
|
||||
|
||||
|
||||
logger.log('info', `gathering facts...`);
|
||||
const aidoc = new plugins.tsdoc.AiDoc();
|
||||
await aidoc.start();
|
||||
|
||||
const nextCommitObject = await aidoc.buildNextCommitObject(paths.cwd);
|
||||
|
||||
logger.log('info', `---------
|
||||
logger.log(
|
||||
'info',
|
||||
`---------
|
||||
Next recommended commit would be:
|
||||
===========
|
||||
-> ${nextCommitObject.recommendedNextVersion}:
|
||||
-> ${nextCommitObject.recommendedNextVersionLevel}(${nextCommitObject.recommendedNextVersionScope}): ${nextCommitObject.recommendedNextVersionMessage}
|
||||
===========
|
||||
`);
|
||||
`,
|
||||
);
|
||||
const commitInteract = new plugins.smartinteract.SmartInteract();
|
||||
commitInteract.addQuestions([
|
||||
{
|
||||
@@ -72,32 +74,55 @@ export const run = async (argvArg: any) => {
|
||||
});
|
||||
|
||||
logger.log('info', `Baking commitinfo into code ...`);
|
||||
const commitInfo = new plugins.commitinfo.CommitInfo(paths.cwd, commitVersionType);
|
||||
const commitInfo = new plugins.commitinfo.CommitInfo(
|
||||
paths.cwd,
|
||||
commitVersionType,
|
||||
);
|
||||
await commitInfo.writeIntoPotentialDirs();
|
||||
|
||||
logger.log('info', `Writing changelog.md ...`);
|
||||
let changelog = nextCommitObject.changelog;
|
||||
changelog = changelog.replaceAll('{{nextVersion}}', (await commitInfo.getNextPlannedVersion()).versionString);
|
||||
changelog = changelog.replaceAll('{{nextVersionScope}}', `${await answerBucket.getAnswerFor('commitType')}(${await answerBucket.getAnswerFor('commitScope')})`);
|
||||
changelog = changelog.replaceAll('{{nextVersionMessage}}', nextCommitObject.recommendedNextVersionMessage);
|
||||
changelog = changelog.replaceAll(
|
||||
'{{nextVersion}}',
|
||||
(await commitInfo.getNextPlannedVersion()).versionString,
|
||||
);
|
||||
changelog = changelog.replaceAll(
|
||||
'{{nextVersionScope}}',
|
||||
`${await answerBucket.getAnswerFor('commitType')}(${await answerBucket.getAnswerFor('commitScope')})`,
|
||||
);
|
||||
changelog = changelog.replaceAll(
|
||||
'{{nextVersionMessage}}',
|
||||
nextCommitObject.recommendedNextVersionMessage,
|
||||
);
|
||||
if (nextCommitObject.recommendedNextVersionDetails?.length > 0) {
|
||||
changelog = changelog.replaceAll('{{nextVersionDetails}}', '- ' + nextCommitObject.recommendedNextVersionDetails.join('\n- '));
|
||||
changelog = changelog.replaceAll(
|
||||
'{{nextVersionDetails}}',
|
||||
'- ' + nextCommitObject.recommendedNextVersionDetails.join('\n- '),
|
||||
);
|
||||
} else {
|
||||
changelog = changelog.replaceAll('\n{{nextVersionDetails}}', '');
|
||||
}
|
||||
|
||||
await plugins.smartfile.memory.toFs(changelog, plugins.path.join(paths.cwd, `changelog.md`));
|
||||
await plugins.smartfile.memory.toFs(
|
||||
changelog,
|
||||
plugins.path.join(paths.cwd, `changelog.md`),
|
||||
);
|
||||
|
||||
logger.log('info', `Staging files for commit:`);
|
||||
await smartshellInstance.exec(`git add -A`);
|
||||
await smartshellInstance.exec(`git commit -m "${commitString}"`);
|
||||
await smartshellInstance.exec(`npm version ${commitVersionType}`);
|
||||
if (answerBucket.getAnswerFor('pushToOrigin') && !(process.env.CI === 'true')) {
|
||||
if (
|
||||
answerBucket.getAnswerFor('pushToOrigin') &&
|
||||
!(process.env.CI === 'true')
|
||||
) {
|
||||
await smartshellInstance.exec(`git push origin master --follow-tags`);
|
||||
}
|
||||
};
|
||||
|
||||
const createCommitStringFromAnswerBucket = (answerBucket: plugins.smartinteract.AnswerBucket) => {
|
||||
const createCommitStringFromAnswerBucket = (
|
||||
answerBucket: plugins.smartinteract.AnswerBucket,
|
||||
) => {
|
||||
const commitType = answerBucket.getAnswerFor('commitType');
|
||||
const commitScope = answerBucket.getAnswerFor('commitScope');
|
||||
const commitDescription = answerBucket.getAnswerFor('commitDescription');
|
||||
|
@@ -36,7 +36,10 @@ export const run = async () => {
|
||||
const registryUrls = answerBucket.getAnswerFor(`registryUrls`).split(',');
|
||||
const oldPackageName = answerBucket.getAnswerFor(`oldPackageName`);
|
||||
const newPackageName = answerBucket.getAnswerFor(`newPackageName`);
|
||||
logger.log('info', `Deprecating package ${oldPackageName} in favour of ${newPackageName}`);
|
||||
logger.log(
|
||||
'info',
|
||||
`Deprecating package ${oldPackageName} in favour of ${newPackageName}`,
|
||||
);
|
||||
const smartshellInstance = new plugins.smartshell.Smartshell({
|
||||
executor: 'bash',
|
||||
});
|
||||
|
82
ts/mod_format/classes.baseformatter.ts
Normal file
82
ts/mod_format/classes.baseformatter.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import { FormatContext } from './classes.formatcontext.js';
|
||||
import type { IPlannedChange } from './interfaces.format.js';
|
||||
import { Project } from '../classes.project.js';
|
||||
|
||||
export abstract class BaseFormatter {
|
||||
protected context: FormatContext;
|
||||
protected project: Project;
|
||||
protected stats: any; // Will be FormatStats from context
|
||||
|
||||
constructor(context: FormatContext, project: Project) {
|
||||
this.context = context;
|
||||
this.project = project;
|
||||
this.stats = context.getFormatStats();
|
||||
}
|
||||
|
||||
abstract get name(): string;
|
||||
abstract analyze(): Promise<IPlannedChange[]>;
|
||||
abstract applyChange(change: IPlannedChange): Promise<void>;
|
||||
|
||||
async execute(changes: IPlannedChange[]): Promise<void> {
|
||||
const startTime = this.stats.moduleStartTime(this.name);
|
||||
this.stats.startModule(this.name);
|
||||
|
||||
try {
|
||||
await this.preExecute();
|
||||
|
||||
for (const change of changes) {
|
||||
try {
|
||||
await this.applyChange(change);
|
||||
this.stats.recordFileOperation(this.name, change.type, true);
|
||||
} catch (error) {
|
||||
this.stats.recordFileOperation(this.name, change.type, false);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
await this.postExecute();
|
||||
} catch (error) {
|
||||
// Don't rollback here - let the FormatPlanner handle it
|
||||
throw error;
|
||||
} finally {
|
||||
this.stats.endModule(this.name, startTime);
|
||||
}
|
||||
}
|
||||
|
||||
protected async preExecute(): Promise<void> {
|
||||
// Override in subclasses if needed
|
||||
}
|
||||
|
||||
protected async postExecute(): Promise<void> {
|
||||
// Override in subclasses if needed
|
||||
}
|
||||
|
||||
protected async modifyFile(filepath: string, content: string): Promise<void> {
|
||||
// Validate filepath before writing
|
||||
if (!filepath || filepath.trim() === '') {
|
||||
throw new Error(`Invalid empty filepath in modifyFile`);
|
||||
}
|
||||
|
||||
// Ensure we have a proper path with directory component
|
||||
// If the path has no directory component (e.g., "package.json"), prepend "./"
|
||||
let normalizedPath = filepath;
|
||||
if (!plugins.path.parse(filepath).dir) {
|
||||
normalizedPath = './' + filepath;
|
||||
}
|
||||
|
||||
await plugins.smartfile.memory.toFs(content, normalizedPath);
|
||||
}
|
||||
|
||||
protected async createFile(filepath: string, content: string): Promise<void> {
|
||||
await plugins.smartfile.memory.toFs(content, filepath);
|
||||
}
|
||||
|
||||
protected async deleteFile(filepath: string): Promise<void> {
|
||||
await plugins.smartfile.fs.remove(filepath);
|
||||
}
|
||||
|
||||
protected async shouldProcessFile(filepath: string): Promise<boolean> {
|
||||
return true;
|
||||
}
|
||||
}
|
223
ts/mod_format/classes.changecache.ts
Normal file
223
ts/mod_format/classes.changecache.ts
Normal file
@@ -0,0 +1,223 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import * as paths from '../paths.js';
|
||||
|
||||
export interface IFileCache {
|
||||
path: string;
|
||||
checksum: string;
|
||||
modified: number;
|
||||
size: number;
|
||||
}
|
||||
|
||||
export interface ICacheManifest {
|
||||
version: string;
|
||||
lastFormat: number;
|
||||
files: IFileCache[];
|
||||
}
|
||||
|
||||
export class ChangeCache {
|
||||
private cacheDir: string;
|
||||
private manifestPath: string;
|
||||
private cacheVersion = '1.0.0';
|
||||
|
||||
constructor() {
|
||||
this.cacheDir = plugins.path.join(paths.cwd, '.nogit', 'gitzone-cache');
|
||||
this.manifestPath = plugins.path.join(this.cacheDir, 'manifest.json');
|
||||
}
|
||||
|
||||
async initialize(): Promise<void> {
|
||||
await plugins.smartfile.fs.ensureDir(this.cacheDir);
|
||||
}
|
||||
|
||||
async getManifest(): Promise<ICacheManifest> {
|
||||
const defaultManifest: ICacheManifest = {
|
||||
version: this.cacheVersion,
|
||||
lastFormat: 0,
|
||||
files: [],
|
||||
};
|
||||
|
||||
const exists = await plugins.smartfile.fs.fileExists(this.manifestPath);
|
||||
if (!exists) {
|
||||
return defaultManifest;
|
||||
}
|
||||
|
||||
try {
|
||||
const content = plugins.smartfile.fs.toStringSync(this.manifestPath);
|
||||
const manifest = JSON.parse(content);
|
||||
|
||||
// Validate the manifest structure
|
||||
if (this.isValidManifest(manifest)) {
|
||||
return manifest;
|
||||
} else {
|
||||
console.warn('Invalid manifest structure, returning default manifest');
|
||||
return defaultManifest;
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(
|
||||
`Failed to read cache manifest: ${error.message}, returning default manifest`,
|
||||
);
|
||||
// Try to delete the corrupted file
|
||||
try {
|
||||
await plugins.smartfile.fs.remove(this.manifestPath);
|
||||
} catch (removeError) {
|
||||
// Ignore removal errors
|
||||
}
|
||||
return defaultManifest;
|
||||
}
|
||||
}
|
||||
|
||||
async saveManifest(manifest: ICacheManifest): Promise<void> {
|
||||
// Validate before saving
|
||||
if (!this.isValidManifest(manifest)) {
|
||||
throw new Error('Invalid manifest structure, cannot save');
|
||||
}
|
||||
|
||||
// Ensure directory exists
|
||||
await plugins.smartfile.fs.ensureDir(this.cacheDir);
|
||||
|
||||
// Write directly with proper JSON stringification
|
||||
const jsonContent = JSON.stringify(manifest, null, 2);
|
||||
await plugins.smartfile.memory.toFs(jsonContent, this.manifestPath);
|
||||
}
|
||||
|
||||
async hasFileChanged(filePath: string): Promise<boolean> {
|
||||
const absolutePath = plugins.path.isAbsolute(filePath)
|
||||
? filePath
|
||||
: plugins.path.join(paths.cwd, filePath);
|
||||
|
||||
// Check if file exists
|
||||
const exists = await plugins.smartfile.fs.fileExists(absolutePath);
|
||||
if (!exists) {
|
||||
return true; // File doesn't exist, so it's "changed" (will be created)
|
||||
}
|
||||
|
||||
// Get current file stats
|
||||
const stats = await plugins.smartfile.fs.stat(absolutePath);
|
||||
|
||||
// Skip directories
|
||||
if (stats.isDirectory()) {
|
||||
return false; // Directories are not processed
|
||||
}
|
||||
|
||||
const content = plugins.smartfile.fs.toStringSync(absolutePath);
|
||||
const currentChecksum = this.calculateChecksum(content);
|
||||
|
||||
// Get cached info
|
||||
const manifest = await this.getManifest();
|
||||
const cachedFile = manifest.files.find((f) => f.path === filePath);
|
||||
|
||||
if (!cachedFile) {
|
||||
return true; // Not in cache, so it's changed
|
||||
}
|
||||
|
||||
// Compare checksums
|
||||
return (
|
||||
cachedFile.checksum !== currentChecksum ||
|
||||
cachedFile.size !== stats.size ||
|
||||
cachedFile.modified !== stats.mtimeMs
|
||||
);
|
||||
}
|
||||
|
||||
async updateFileCache(filePath: string): Promise<void> {
|
||||
const absolutePath = plugins.path.isAbsolute(filePath)
|
||||
? filePath
|
||||
: plugins.path.join(paths.cwd, filePath);
|
||||
|
||||
// Get current file stats
|
||||
const stats = await plugins.smartfile.fs.stat(absolutePath);
|
||||
|
||||
// Skip directories
|
||||
if (stats.isDirectory()) {
|
||||
return; // Don't cache directories
|
||||
}
|
||||
|
||||
const content = plugins.smartfile.fs.toStringSync(absolutePath);
|
||||
const checksum = this.calculateChecksum(content);
|
||||
|
||||
// Update manifest
|
||||
const manifest = await this.getManifest();
|
||||
const existingIndex = manifest.files.findIndex((f) => f.path === filePath);
|
||||
|
||||
const cacheEntry: IFileCache = {
|
||||
path: filePath,
|
||||
checksum,
|
||||
modified: stats.mtimeMs,
|
||||
size: stats.size,
|
||||
};
|
||||
|
||||
if (existingIndex !== -1) {
|
||||
manifest.files[existingIndex] = cacheEntry;
|
||||
} else {
|
||||
manifest.files.push(cacheEntry);
|
||||
}
|
||||
|
||||
manifest.lastFormat = Date.now();
|
||||
await this.saveManifest(manifest);
|
||||
}
|
||||
|
||||
async getChangedFiles(filePaths: string[]): Promise<string[]> {
|
||||
const changedFiles: string[] = [];
|
||||
|
||||
for (const filePath of filePaths) {
|
||||
if (await this.hasFileChanged(filePath)) {
|
||||
changedFiles.push(filePath);
|
||||
}
|
||||
}
|
||||
|
||||
return changedFiles;
|
||||
}
|
||||
|
||||
async clean(): Promise<void> {
|
||||
const manifest = await this.getManifest();
|
||||
const validFiles: IFileCache[] = [];
|
||||
|
||||
// Remove entries for files that no longer exist
|
||||
for (const file of manifest.files) {
|
||||
const absolutePath = plugins.path.isAbsolute(file.path)
|
||||
? file.path
|
||||
: plugins.path.join(paths.cwd, file.path);
|
||||
|
||||
if (await plugins.smartfile.fs.fileExists(absolutePath)) {
|
||||
validFiles.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
manifest.files = validFiles;
|
||||
await this.saveManifest(manifest);
|
||||
}
|
||||
|
||||
private calculateChecksum(content: string | Buffer): string {
|
||||
return plugins.crypto.createHash('sha256').update(content).digest('hex');
|
||||
}
|
||||
|
||||
private isValidManifest(manifest: any): manifest is ICacheManifest {
|
||||
// Check if manifest has the required structure
|
||||
if (!manifest || typeof manifest !== 'object') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check required fields
|
||||
if (
|
||||
typeof manifest.version !== 'string' ||
|
||||
typeof manifest.lastFormat !== 'number' ||
|
||||
!Array.isArray(manifest.files)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check each file entry
|
||||
for (const file of manifest.files) {
|
||||
if (
|
||||
!file ||
|
||||
typeof file !== 'object' ||
|
||||
typeof file.path !== 'string' ||
|
||||
typeof file.checksum !== 'string' ||
|
||||
typeof file.modified !== 'number' ||
|
||||
typeof file.size !== 'number'
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
117
ts/mod_format/classes.dependency-analyzer.ts
Normal file
117
ts/mod_format/classes.dependency-analyzer.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import { BaseFormatter } from './classes.baseformatter.js';
|
||||
|
||||
export interface IModuleDependency {
|
||||
module: string;
|
||||
dependencies: Set<string>;
|
||||
dependents: Set<string>;
|
||||
}
|
||||
|
||||
export class DependencyAnalyzer {
|
||||
private moduleDependencies: Map<string, IModuleDependency> = new Map();
|
||||
|
||||
constructor() {
|
||||
this.initializeDependencies();
|
||||
}
|
||||
|
||||
private initializeDependencies(): void {
|
||||
// Define dependencies between format modules
|
||||
const dependencies = {
|
||||
cleanup: [], // No dependencies
|
||||
npmextra: [], // No dependencies
|
||||
license: ['npmextra'], // Depends on npmextra for config
|
||||
packagejson: ['npmextra'], // Depends on npmextra for config
|
||||
templates: ['npmextra', 'packagejson'], // Depends on both
|
||||
gitignore: ['templates'], // Depends on templates
|
||||
tsconfig: ['packagejson'], // Depends on package.json
|
||||
prettier: [
|
||||
'cleanup',
|
||||
'npmextra',
|
||||
'packagejson',
|
||||
'templates',
|
||||
'gitignore',
|
||||
'tsconfig',
|
||||
], // Runs after most others
|
||||
readme: ['npmextra', 'packagejson'], // Depends on project metadata
|
||||
copy: ['npmextra'], // Depends on config
|
||||
};
|
||||
|
||||
// Initialize all modules
|
||||
for (const [module, deps] of Object.entries(dependencies)) {
|
||||
this.moduleDependencies.set(module, {
|
||||
module,
|
||||
dependencies: new Set(deps),
|
||||
dependents: new Set(),
|
||||
});
|
||||
}
|
||||
|
||||
// Build reverse dependencies (dependents)
|
||||
for (const [module, deps] of Object.entries(dependencies)) {
|
||||
for (const dep of deps) {
|
||||
const depModule = this.moduleDependencies.get(dep);
|
||||
if (depModule) {
|
||||
depModule.dependents.add(module);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
getExecutionGroups(modules: BaseFormatter[]): BaseFormatter[][] {
|
||||
const modulesMap = new Map(modules.map((m) => [m.name, m]));
|
||||
const executed = new Set<string>();
|
||||
const groups: BaseFormatter[][] = [];
|
||||
|
||||
while (executed.size < modules.length) {
|
||||
const currentGroup: BaseFormatter[] = [];
|
||||
|
||||
for (const module of modules) {
|
||||
if (executed.has(module.name)) continue;
|
||||
|
||||
const dependency = this.moduleDependencies.get(module.name);
|
||||
if (!dependency) {
|
||||
// Unknown module, execute in isolation
|
||||
currentGroup.push(module);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if all dependencies have been executed
|
||||
const allDepsExecuted = Array.from(dependency.dependencies).every(
|
||||
(dep) => executed.has(dep) || !modulesMap.has(dep),
|
||||
);
|
||||
|
||||
if (allDepsExecuted) {
|
||||
currentGroup.push(module);
|
||||
}
|
||||
}
|
||||
|
||||
if (currentGroup.length === 0) {
|
||||
// Circular dependency or error - execute remaining modules
|
||||
for (const module of modules) {
|
||||
if (!executed.has(module.name)) {
|
||||
currentGroup.push(module);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
currentGroup.forEach((m) => executed.add(m.name));
|
||||
groups.push(currentGroup);
|
||||
}
|
||||
|
||||
return groups;
|
||||
}
|
||||
|
||||
canRunInParallel(module1: string, module2: string): boolean {
|
||||
const dep1 = this.moduleDependencies.get(module1);
|
||||
const dep2 = this.moduleDependencies.get(module2);
|
||||
|
||||
if (!dep1 || !dep2) return false;
|
||||
|
||||
// Check if module1 depends on module2 or vice versa
|
||||
return (
|
||||
!dep1.dependencies.has(module2) &&
|
||||
!dep2.dependencies.has(module1) &&
|
||||
!dep1.dependents.has(module2) &&
|
||||
!dep2.dependents.has(module1)
|
||||
);
|
||||
}
|
||||
}
|
124
ts/mod_format/classes.diffreporter.ts
Normal file
124
ts/mod_format/classes.diffreporter.ts
Normal file
@@ -0,0 +1,124 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import type { IPlannedChange } from './interfaces.format.js';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
|
||||
export class DiffReporter {
|
||||
private diffs: Map<string, string> = new Map();
|
||||
|
||||
async generateDiff(
|
||||
filePath: string,
|
||||
oldContent: string,
|
||||
newContent: string,
|
||||
): Promise<string> {
|
||||
const diff = plugins.smartdiff.createDiff(oldContent, newContent);
|
||||
this.diffs.set(filePath, diff);
|
||||
return diff;
|
||||
}
|
||||
|
||||
async generateDiffForChange(change: IPlannedChange): Promise<string | null> {
|
||||
if (change.type !== 'modify') {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
const exists = await plugins.smartfile.fs.fileExists(change.path);
|
||||
if (!exists) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const currentContent = await plugins.smartfile.fs.toStringSync(
|
||||
change.path,
|
||||
);
|
||||
|
||||
// For planned changes, we need the new content
|
||||
if (!change.content) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return await this.generateDiff(
|
||||
change.path,
|
||||
currentContent,
|
||||
change.content,
|
||||
);
|
||||
} catch (error) {
|
||||
logger.log(
|
||||
'error',
|
||||
`Failed to generate diff for ${change.path}: ${error.message}`,
|
||||
);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
displayDiff(filePath: string, diff?: string): void {
|
||||
const diffToShow = diff || this.diffs.get(filePath);
|
||||
|
||||
if (!diffToShow) {
|
||||
logger.log('warn', `No diff available for ${filePath}`);
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`\n${this.formatDiffHeader(filePath)}`);
|
||||
console.log(this.colorDiff(diffToShow));
|
||||
console.log('━'.repeat(50));
|
||||
}
|
||||
|
||||
displayAllDiffs(): void {
|
||||
if (this.diffs.size === 0) {
|
||||
logger.log('info', 'No diffs to display');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('\nFile Changes:');
|
||||
console.log('═'.repeat(50));
|
||||
|
||||
for (const [filePath, diff] of this.diffs) {
|
||||
this.displayDiff(filePath, diff);
|
||||
}
|
||||
}
|
||||
|
||||
private formatDiffHeader(filePath: string): string {
|
||||
return `📄 ${filePath}`;
|
||||
}
|
||||
|
||||
private colorDiff(diff: string): string {
|
||||
const lines = diff.split('\n');
|
||||
const coloredLines = lines.map((line) => {
|
||||
if (line.startsWith('+') && !line.startsWith('+++')) {
|
||||
return `\x1b[32m${line}\x1b[0m`; // Green for additions
|
||||
} else if (line.startsWith('-') && !line.startsWith('---')) {
|
||||
return `\x1b[31m${line}\x1b[0m`; // Red for deletions
|
||||
} else if (line.startsWith('@')) {
|
||||
return `\x1b[36m${line}\x1b[0m`; // Cyan for line numbers
|
||||
} else {
|
||||
return line;
|
||||
}
|
||||
});
|
||||
|
||||
return coloredLines.join('\n');
|
||||
}
|
||||
|
||||
async saveDiffReport(outputPath: string): Promise<void> {
|
||||
const report = {
|
||||
timestamp: new Date().toISOString(),
|
||||
totalFiles: this.diffs.size,
|
||||
diffs: Array.from(this.diffs.entries()).map(([path, diff]) => ({
|
||||
path,
|
||||
diff,
|
||||
})),
|
||||
};
|
||||
|
||||
await plugins.smartfile.memory.toFs(
|
||||
JSON.stringify(report, null, 2),
|
||||
outputPath,
|
||||
);
|
||||
logger.log('info', `Diff report saved to ${outputPath}`);
|
||||
}
|
||||
|
||||
hasAnyDiffs(): boolean {
|
||||
return this.diffs.size > 0;
|
||||
}
|
||||
|
||||
getDiffCount(): number {
|
||||
return this.diffs.size;
|
||||
}
|
||||
}
|
14
ts/mod_format/classes.formatcontext.ts
Normal file
14
ts/mod_format/classes.formatcontext.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import { FormatStats } from './classes.formatstats.js';
|
||||
|
||||
export class FormatContext {
|
||||
private formatStats: FormatStats;
|
||||
|
||||
constructor() {
|
||||
this.formatStats = new FormatStats();
|
||||
}
|
||||
|
||||
getFormatStats(): FormatStats {
|
||||
return this.formatStats;
|
||||
}
|
||||
}
|
170
ts/mod_format/classes.formatplanner.ts
Normal file
170
ts/mod_format/classes.formatplanner.ts
Normal file
@@ -0,0 +1,170 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import { FormatContext } from './classes.formatcontext.js';
|
||||
import { BaseFormatter } from './classes.baseformatter.js';
|
||||
import type { IFormatPlan, IPlannedChange } from './interfaces.format.js';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
import { DependencyAnalyzer } from './classes.dependency-analyzer.js';
|
||||
import { DiffReporter } from './classes.diffreporter.js';
|
||||
|
||||
export class FormatPlanner {
|
||||
private plannedChanges: Map<string, IPlannedChange[]> = new Map();
|
||||
private dependencyAnalyzer = new DependencyAnalyzer();
|
||||
private diffReporter = new DiffReporter();
|
||||
|
||||
async planFormat(modules: BaseFormatter[]): Promise<IFormatPlan> {
|
||||
const plan: IFormatPlan = {
|
||||
summary: {
|
||||
totalFiles: 0,
|
||||
filesAdded: 0,
|
||||
filesModified: 0,
|
||||
filesRemoved: 0,
|
||||
estimatedTime: 0,
|
||||
},
|
||||
changes: [],
|
||||
warnings: [],
|
||||
};
|
||||
|
||||
for (const module of modules) {
|
||||
try {
|
||||
const changes = await module.analyze();
|
||||
this.plannedChanges.set(module.name, changes);
|
||||
|
||||
for (const change of changes) {
|
||||
plan.changes.push(change);
|
||||
|
||||
// Update summary
|
||||
switch (change.type) {
|
||||
case 'create':
|
||||
plan.summary.filesAdded++;
|
||||
break;
|
||||
case 'modify':
|
||||
plan.summary.filesModified++;
|
||||
break;
|
||||
case 'delete':
|
||||
plan.summary.filesRemoved++;
|
||||
break;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
plan.warnings.push({
|
||||
level: 'error',
|
||||
message: `Failed to analyze module ${module.name}: ${error.message}`,
|
||||
module: module.name,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
plan.summary.totalFiles =
|
||||
plan.summary.filesAdded +
|
||||
plan.summary.filesModified +
|
||||
plan.summary.filesRemoved;
|
||||
plan.summary.estimatedTime = plan.summary.totalFiles * 100; // 100ms per file estimate
|
||||
|
||||
return plan;
|
||||
}
|
||||
|
||||
async executePlan(
|
||||
plan: IFormatPlan,
|
||||
modules: BaseFormatter[],
|
||||
context: FormatContext,
|
||||
parallel: boolean = false,
|
||||
): Promise<void> {
|
||||
const startTime = Date.now();
|
||||
|
||||
try {
|
||||
// Always use sequential execution to avoid race conditions
|
||||
for (const module of modules) {
|
||||
const changes = this.plannedChanges.get(module.name) || [];
|
||||
|
||||
if (changes.length > 0) {
|
||||
logger.log('info', `Executing ${module.name} formatter...`);
|
||||
await module.execute(changes);
|
||||
}
|
||||
}
|
||||
|
||||
const endTime = Date.now();
|
||||
const duration = endTime - startTime;
|
||||
logger.log('info', `Format operations completed in ${duration}ms`);
|
||||
} catch (error) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async displayPlan(
|
||||
plan: IFormatPlan,
|
||||
detailed: boolean = false,
|
||||
): Promise<void> {
|
||||
console.log('\nFormat Plan:');
|
||||
console.log('━'.repeat(50));
|
||||
console.log(`Summary: ${plan.summary.totalFiles} files will be changed`);
|
||||
console.log(` • ${plan.summary.filesAdded} new files`);
|
||||
console.log(` • ${plan.summary.filesModified} modified files`);
|
||||
console.log(` • ${plan.summary.filesRemoved} deleted files`);
|
||||
console.log('');
|
||||
console.log('Changes by module:');
|
||||
|
||||
// Group changes by module
|
||||
const changesByModule = new Map<string, IPlannedChange[]>();
|
||||
for (const change of plan.changes) {
|
||||
const moduleChanges = changesByModule.get(change.module) || [];
|
||||
moduleChanges.push(change);
|
||||
changesByModule.set(change.module, moduleChanges);
|
||||
}
|
||||
|
||||
for (const [module, changes] of changesByModule) {
|
||||
console.log(
|
||||
`\n${this.getModuleIcon(module)} ${module} (${changes.length} ${changes.length === 1 ? 'file' : 'files'})`,
|
||||
);
|
||||
|
||||
for (const change of changes) {
|
||||
const icon = this.getChangeIcon(change.type);
|
||||
console.log(` ${icon} ${change.path} - ${change.description}`);
|
||||
|
||||
// Show diff for modified files if detailed view is requested
|
||||
if (detailed && change.type === 'modify') {
|
||||
const diff = await this.diffReporter.generateDiffForChange(change);
|
||||
if (diff) {
|
||||
this.diffReporter.displayDiff(change.path, diff);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (plan.warnings.length > 0) {
|
||||
console.log('\nWarnings:');
|
||||
for (const warning of plan.warnings) {
|
||||
const icon = warning.level === 'error' ? '❌' : '⚠️';
|
||||
console.log(` ${icon} ${warning.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\n' + '━'.repeat(50));
|
||||
}
|
||||
|
||||
private getModuleIcon(module: string): string {
|
||||
const icons: Record<string, string> = {
|
||||
packagejson: '📦',
|
||||
license: '📝',
|
||||
tsconfig: '🔧',
|
||||
cleanup: '🚮',
|
||||
gitignore: '🔒',
|
||||
prettier: '✨',
|
||||
readme: '📖',
|
||||
templates: '📄',
|
||||
npmextra: '⚙️',
|
||||
copy: '📋',
|
||||
};
|
||||
return icons[module] || '📁';
|
||||
}
|
||||
|
||||
private getChangeIcon(type: 'create' | 'modify' | 'delete'): string {
|
||||
switch (type) {
|
||||
case 'create':
|
||||
return '✅';
|
||||
case 'modify':
|
||||
return '✏️';
|
||||
case 'delete':
|
||||
return '❌';
|
||||
}
|
||||
}
|
||||
}
|
229
ts/mod_format/classes.formatstats.ts
Normal file
229
ts/mod_format/classes.formatstats.ts
Normal file
@@ -0,0 +1,229 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
|
||||
export interface IModuleStats {
|
||||
name: string;
|
||||
filesProcessed: number;
|
||||
executionTime: number;
|
||||
errors: number;
|
||||
successes: number;
|
||||
filesCreated: number;
|
||||
filesModified: number;
|
||||
filesDeleted: number;
|
||||
}
|
||||
|
||||
export interface IFormatStats {
|
||||
totalExecutionTime: number;
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
moduleStats: Map<string, IModuleStats>;
|
||||
overallStats: {
|
||||
totalFiles: number;
|
||||
totalCreated: number;
|
||||
totalModified: number;
|
||||
totalDeleted: number;
|
||||
totalErrors: number;
|
||||
cacheHits: number;
|
||||
cacheMisses: number;
|
||||
};
|
||||
}
|
||||
|
||||
export class FormatStats {
|
||||
private stats: IFormatStats;
|
||||
|
||||
constructor() {
|
||||
this.stats = {
|
||||
totalExecutionTime: 0,
|
||||
startTime: Date.now(),
|
||||
endTime: 0,
|
||||
moduleStats: new Map(),
|
||||
overallStats: {
|
||||
totalFiles: 0,
|
||||
totalCreated: 0,
|
||||
totalModified: 0,
|
||||
totalDeleted: 0,
|
||||
totalErrors: 0,
|
||||
cacheHits: 0,
|
||||
cacheMisses: 0,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
startModule(moduleName: string): void {
|
||||
this.stats.moduleStats.set(moduleName, {
|
||||
name: moduleName,
|
||||
filesProcessed: 0,
|
||||
executionTime: 0,
|
||||
errors: 0,
|
||||
successes: 0,
|
||||
filesCreated: 0,
|
||||
filesModified: 0,
|
||||
filesDeleted: 0,
|
||||
});
|
||||
}
|
||||
|
||||
moduleStartTime(moduleName: string): number {
|
||||
return Date.now();
|
||||
}
|
||||
|
||||
endModule(moduleName: string, startTime: number): void {
|
||||
const moduleStats = this.stats.moduleStats.get(moduleName);
|
||||
if (moduleStats) {
|
||||
moduleStats.executionTime = Date.now() - startTime;
|
||||
}
|
||||
}
|
||||
|
||||
recordFileOperation(
|
||||
moduleName: string,
|
||||
operation: 'create' | 'modify' | 'delete',
|
||||
success: boolean = true,
|
||||
): void {
|
||||
const moduleStats = this.stats.moduleStats.get(moduleName);
|
||||
if (!moduleStats) return;
|
||||
|
||||
moduleStats.filesProcessed++;
|
||||
|
||||
if (success) {
|
||||
moduleStats.successes++;
|
||||
this.stats.overallStats.totalFiles++;
|
||||
|
||||
switch (operation) {
|
||||
case 'create':
|
||||
moduleStats.filesCreated++;
|
||||
this.stats.overallStats.totalCreated++;
|
||||
break;
|
||||
case 'modify':
|
||||
moduleStats.filesModified++;
|
||||
this.stats.overallStats.totalModified++;
|
||||
break;
|
||||
case 'delete':
|
||||
moduleStats.filesDeleted++;
|
||||
this.stats.overallStats.totalDeleted++;
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
moduleStats.errors++;
|
||||
this.stats.overallStats.totalErrors++;
|
||||
}
|
||||
}
|
||||
|
||||
recordCacheHit(): void {
|
||||
this.stats.overallStats.cacheHits++;
|
||||
}
|
||||
|
||||
recordCacheMiss(): void {
|
||||
this.stats.overallStats.cacheMisses++;
|
||||
}
|
||||
|
||||
finish(): void {
|
||||
this.stats.endTime = Date.now();
|
||||
this.stats.totalExecutionTime = this.stats.endTime - this.stats.startTime;
|
||||
}
|
||||
|
||||
displayStats(): void {
|
||||
console.log('\n📊 Format Operation Statistics:');
|
||||
console.log('═'.repeat(50));
|
||||
|
||||
// Overall stats
|
||||
console.log('\nOverall Summary:');
|
||||
console.log(
|
||||
` Total Execution Time: ${this.formatDuration(this.stats.totalExecutionTime)}`,
|
||||
);
|
||||
console.log(` Files Processed: ${this.stats.overallStats.totalFiles}`);
|
||||
console.log(` • Created: ${this.stats.overallStats.totalCreated}`);
|
||||
console.log(` • Modified: ${this.stats.overallStats.totalModified}`);
|
||||
console.log(` • Deleted: ${this.stats.overallStats.totalDeleted}`);
|
||||
console.log(` Errors: ${this.stats.overallStats.totalErrors}`);
|
||||
|
||||
if (
|
||||
this.stats.overallStats.cacheHits > 0 ||
|
||||
this.stats.overallStats.cacheMisses > 0
|
||||
) {
|
||||
const cacheHitRate =
|
||||
(this.stats.overallStats.cacheHits /
|
||||
(this.stats.overallStats.cacheHits +
|
||||
this.stats.overallStats.cacheMisses)) *
|
||||
100;
|
||||
console.log(` Cache Hit Rate: ${cacheHitRate.toFixed(1)}%`);
|
||||
console.log(` • Hits: ${this.stats.overallStats.cacheHits}`);
|
||||
console.log(` • Misses: ${this.stats.overallStats.cacheMisses}`);
|
||||
}
|
||||
|
||||
// Module stats
|
||||
console.log('\nModule Breakdown:');
|
||||
console.log('─'.repeat(50));
|
||||
|
||||
const sortedModules = Array.from(this.stats.moduleStats.values()).sort(
|
||||
(a, b) => b.filesProcessed - a.filesProcessed,
|
||||
);
|
||||
|
||||
for (const moduleStats of sortedModules) {
|
||||
console.log(
|
||||
`\n${this.getModuleIcon(moduleStats.name)} ${moduleStats.name}:`,
|
||||
);
|
||||
console.log(
|
||||
` Execution Time: ${this.formatDuration(moduleStats.executionTime)}`,
|
||||
);
|
||||
console.log(` Files Processed: ${moduleStats.filesProcessed}`);
|
||||
|
||||
if (moduleStats.filesCreated > 0) {
|
||||
console.log(` • Created: ${moduleStats.filesCreated}`);
|
||||
}
|
||||
if (moduleStats.filesModified > 0) {
|
||||
console.log(` • Modified: ${moduleStats.filesModified}`);
|
||||
}
|
||||
if (moduleStats.filesDeleted > 0) {
|
||||
console.log(` • Deleted: ${moduleStats.filesDeleted}`);
|
||||
}
|
||||
|
||||
if (moduleStats.errors > 0) {
|
||||
console.log(` ❌ Errors: ${moduleStats.errors}`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\n' + '═'.repeat(50));
|
||||
}
|
||||
|
||||
async saveReport(outputPath: string): Promise<void> {
|
||||
const report = {
|
||||
timestamp: new Date().toISOString(),
|
||||
executionTime: this.stats.totalExecutionTime,
|
||||
overallStats: this.stats.overallStats,
|
||||
moduleStats: Array.from(this.stats.moduleStats.values()),
|
||||
};
|
||||
|
||||
await plugins.smartfile.memory.toFs(
|
||||
JSON.stringify(report, null, 2),
|
||||
outputPath,
|
||||
);
|
||||
logger.log('info', `Statistics report saved to ${outputPath}`);
|
||||
}
|
||||
|
||||
private formatDuration(ms: number): string {
|
||||
if (ms < 1000) {
|
||||
return `${ms}ms`;
|
||||
} else if (ms < 60000) {
|
||||
return `${(ms / 1000).toFixed(1)}s`;
|
||||
} else {
|
||||
const minutes = Math.floor(ms / 60000);
|
||||
const seconds = Math.floor((ms % 60000) / 1000);
|
||||
return `${minutes}m ${seconds}s`;
|
||||
}
|
||||
}
|
||||
|
||||
private getModuleIcon(module: string): string {
|
||||
const icons: Record<string, string> = {
|
||||
packagejson: '📦',
|
||||
license: '📝',
|
||||
tsconfig: '🔧',
|
||||
cleanup: '🚮',
|
||||
gitignore: '🔒',
|
||||
prettier: '✨',
|
||||
readme: '📖',
|
||||
templates: '📄',
|
||||
npmextra: '⚙️',
|
||||
copy: '📋',
|
||||
};
|
||||
return icons[module] || '📁';
|
||||
}
|
||||
}
|
318
ts/mod_format/classes.rollbackmanager.ts
Normal file
318
ts/mod_format/classes.rollbackmanager.ts
Normal file
@@ -0,0 +1,318 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import * as paths from '../paths.js';
|
||||
import type { IFormatOperation } from './interfaces.format.js';
|
||||
|
||||
export class RollbackManager {
|
||||
private backupDir: string;
|
||||
private manifestPath: string;
|
||||
|
||||
constructor() {
|
||||
this.backupDir = plugins.path.join(paths.cwd, '.nogit', 'gitzone-backups');
|
||||
this.manifestPath = plugins.path.join(this.backupDir, 'manifest.json');
|
||||
}
|
||||
|
||||
async createOperation(): Promise<IFormatOperation> {
|
||||
await this.ensureBackupDir();
|
||||
|
||||
const operation: IFormatOperation = {
|
||||
id: this.generateOperationId(),
|
||||
timestamp: Date.now(),
|
||||
files: [],
|
||||
status: 'pending',
|
||||
};
|
||||
|
||||
await this.updateManifest(operation);
|
||||
return operation;
|
||||
}
|
||||
|
||||
async backupFile(filepath: string, operationId: string): Promise<void> {
|
||||
const operation = await this.getOperation(operationId);
|
||||
if (!operation) {
|
||||
throw new Error(`Operation ${operationId} not found`);
|
||||
}
|
||||
|
||||
const absolutePath = plugins.path.isAbsolute(filepath)
|
||||
? filepath
|
||||
: plugins.path.join(paths.cwd, filepath);
|
||||
|
||||
// Check if file exists
|
||||
const exists = await plugins.smartfile.fs.fileExists(absolutePath);
|
||||
if (!exists) {
|
||||
// File doesn't exist yet (will be created), so we skip backup
|
||||
return;
|
||||
}
|
||||
|
||||
// Read file content and metadata
|
||||
const content = plugins.smartfile.fs.toStringSync(absolutePath);
|
||||
const stats = await plugins.smartfile.fs.stat(absolutePath);
|
||||
const checksum = this.calculateChecksum(content);
|
||||
|
||||
// Create backup
|
||||
const backupPath = this.getBackupPath(operationId, filepath);
|
||||
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(backupPath));
|
||||
await plugins.smartfile.memory.toFs(content, backupPath);
|
||||
|
||||
// Update operation
|
||||
operation.files.push({
|
||||
path: filepath,
|
||||
originalContent: content,
|
||||
checksum,
|
||||
permissions: stats.mode.toString(8),
|
||||
});
|
||||
|
||||
await this.updateManifest(operation);
|
||||
}
|
||||
|
||||
async rollback(operationId: string): Promise<void> {
|
||||
const operation = await this.getOperation(operationId);
|
||||
if (!operation) {
|
||||
// Operation doesn't exist, might have already been rolled back or never created
|
||||
console.warn(`Operation ${operationId} not found for rollback, skipping`);
|
||||
return;
|
||||
}
|
||||
|
||||
if (operation.status === 'rolled-back') {
|
||||
throw new Error(`Operation ${operationId} has already been rolled back`);
|
||||
}
|
||||
|
||||
// Restore files in reverse order
|
||||
for (let i = operation.files.length - 1; i >= 0; i--) {
|
||||
const file = operation.files[i];
|
||||
const absolutePath = plugins.path.isAbsolute(file.path)
|
||||
? file.path
|
||||
: plugins.path.join(paths.cwd, file.path);
|
||||
|
||||
// Verify backup integrity
|
||||
const backupPath = this.getBackupPath(operationId, file.path);
|
||||
const backupContent = plugins.smartfile.fs.toStringSync(backupPath);
|
||||
const backupChecksum = this.calculateChecksum(backupContent);
|
||||
|
||||
if (backupChecksum !== file.checksum) {
|
||||
throw new Error(`Backup integrity check failed for ${file.path}`);
|
||||
}
|
||||
|
||||
// Restore file
|
||||
await plugins.smartfile.memory.toFs(file.originalContent, absolutePath);
|
||||
|
||||
// Restore permissions
|
||||
const mode = parseInt(file.permissions, 8);
|
||||
// Note: Permissions restoration may not work on all platforms
|
||||
}
|
||||
|
||||
// Update operation status
|
||||
operation.status = 'rolled-back';
|
||||
await this.updateManifest(operation);
|
||||
}
|
||||
|
||||
async markComplete(operationId: string): Promise<void> {
|
||||
const operation = await this.getOperation(operationId);
|
||||
if (!operation) {
|
||||
throw new Error(`Operation ${operationId} not found`);
|
||||
}
|
||||
|
||||
operation.status = 'completed';
|
||||
await this.updateManifest(operation);
|
||||
}
|
||||
|
||||
async cleanOldBackups(retentionDays: number): Promise<void> {
|
||||
const manifest = await this.getManifest();
|
||||
const cutoffTime = Date.now() - retentionDays * 24 * 60 * 60 * 1000;
|
||||
|
||||
const operationsToDelete = manifest.operations.filter(
|
||||
(op) => op.timestamp < cutoffTime && op.status === 'completed',
|
||||
);
|
||||
|
||||
for (const operation of operationsToDelete) {
|
||||
// Remove backup files
|
||||
const operationDir = plugins.path.join(
|
||||
this.backupDir,
|
||||
'operations',
|
||||
operation.id,
|
||||
);
|
||||
await plugins.smartfile.fs.remove(operationDir);
|
||||
|
||||
// Remove from manifest
|
||||
manifest.operations = manifest.operations.filter(
|
||||
(op) => op.id !== operation.id,
|
||||
);
|
||||
}
|
||||
|
||||
await this.saveManifest(manifest);
|
||||
}
|
||||
|
||||
async verifyBackup(operationId: string): Promise<boolean> {
|
||||
const operation = await this.getOperation(operationId);
|
||||
if (!operation) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (const file of operation.files) {
|
||||
const backupPath = this.getBackupPath(operationId, file.path);
|
||||
const exists = await plugins.smartfile.fs.fileExists(backupPath);
|
||||
|
||||
if (!exists) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const content = plugins.smartfile.fs.toStringSync(backupPath);
|
||||
const checksum = this.calculateChecksum(content);
|
||||
|
||||
if (checksum !== file.checksum) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
async listBackups(): Promise<IFormatOperation[]> {
|
||||
const manifest = await this.getManifest();
|
||||
return manifest.operations;
|
||||
}
|
||||
|
||||
private async ensureBackupDir(): Promise<void> {
|
||||
await plugins.smartfile.fs.ensureDir(this.backupDir);
|
||||
await plugins.smartfile.fs.ensureDir(
|
||||
plugins.path.join(this.backupDir, 'operations'),
|
||||
);
|
||||
}
|
||||
|
||||
private generateOperationId(): string {
|
||||
const timestamp = new Date().toISOString().replace(/[:.]/g, '-');
|
||||
const random = Math.random().toString(36).substring(2, 8);
|
||||
return `${timestamp}-${random}`;
|
||||
}
|
||||
|
||||
private getBackupPath(operationId: string, filepath: string): string {
|
||||
const filename = plugins.path.basename(filepath);
|
||||
const dir = plugins.path.dirname(filepath);
|
||||
const safeDir = dir.replace(/[/\\]/g, '__');
|
||||
return plugins.path.join(
|
||||
this.backupDir,
|
||||
'operations',
|
||||
operationId,
|
||||
'files',
|
||||
safeDir,
|
||||
`${filename}.backup`,
|
||||
);
|
||||
}
|
||||
|
||||
private calculateChecksum(content: string | Buffer): string {
|
||||
return plugins.crypto.createHash('sha256').update(content).digest('hex');
|
||||
}
|
||||
|
||||
private async getManifest(): Promise<{ operations: IFormatOperation[] }> {
|
||||
const defaultManifest = { operations: [] };
|
||||
|
||||
const exists = await plugins.smartfile.fs.fileExists(this.manifestPath);
|
||||
if (!exists) {
|
||||
return defaultManifest;
|
||||
}
|
||||
|
||||
try {
|
||||
const content = plugins.smartfile.fs.toStringSync(this.manifestPath);
|
||||
const manifest = JSON.parse(content);
|
||||
|
||||
// Validate the manifest structure
|
||||
if (this.isValidManifest(manifest)) {
|
||||
return manifest;
|
||||
} else {
|
||||
console.warn(
|
||||
'Invalid rollback manifest structure, returning default manifest',
|
||||
);
|
||||
return defaultManifest;
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(
|
||||
`Failed to read rollback manifest: ${error.message}, returning default manifest`,
|
||||
);
|
||||
// Try to delete the corrupted file
|
||||
try {
|
||||
await plugins.smartfile.fs.remove(this.manifestPath);
|
||||
} catch (removeError) {
|
||||
// Ignore removal errors
|
||||
}
|
||||
return defaultManifest;
|
||||
}
|
||||
}
|
||||
|
||||
private async saveManifest(manifest: {
|
||||
operations: IFormatOperation[];
|
||||
}): Promise<void> {
|
||||
// Validate before saving
|
||||
if (!this.isValidManifest(manifest)) {
|
||||
throw new Error('Invalid rollback manifest structure, cannot save');
|
||||
}
|
||||
|
||||
// Ensure directory exists
|
||||
await this.ensureBackupDir();
|
||||
|
||||
// Write directly with proper JSON stringification
|
||||
const jsonContent = JSON.stringify(manifest, null, 2);
|
||||
await plugins.smartfile.memory.toFs(jsonContent, this.manifestPath);
|
||||
}
|
||||
|
||||
private async getOperation(
|
||||
operationId: string,
|
||||
): Promise<IFormatOperation | null> {
|
||||
const manifest = await this.getManifest();
|
||||
return manifest.operations.find((op) => op.id === operationId) || null;
|
||||
}
|
||||
|
||||
private async updateManifest(operation: IFormatOperation): Promise<void> {
|
||||
const manifest = await this.getManifest();
|
||||
const existingIndex = manifest.operations.findIndex(
|
||||
(op) => op.id === operation.id,
|
||||
);
|
||||
|
||||
if (existingIndex !== -1) {
|
||||
manifest.operations[existingIndex] = operation;
|
||||
} else {
|
||||
manifest.operations.push(operation);
|
||||
}
|
||||
|
||||
await this.saveManifest(manifest);
|
||||
}
|
||||
|
||||
private isValidManifest(
|
||||
manifest: any,
|
||||
): manifest is { operations: IFormatOperation[] } {
|
||||
// Check if manifest has the required structure
|
||||
if (!manifest || typeof manifest !== 'object') {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check required fields
|
||||
if (!Array.isArray(manifest.operations)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check each operation entry
|
||||
for (const operation of manifest.operations) {
|
||||
if (
|
||||
!operation ||
|
||||
typeof operation !== 'object' ||
|
||||
typeof operation.id !== 'string' ||
|
||||
typeof operation.timestamp !== 'number' ||
|
||||
typeof operation.status !== 'string' ||
|
||||
!Array.isArray(operation.files)
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Check each file in the operation
|
||||
for (const file of operation.files) {
|
||||
if (
|
||||
!file ||
|
||||
typeof file !== 'object' ||
|
||||
typeof file.path !== 'string' ||
|
||||
typeof file.checksum !== 'string'
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
@@ -4,14 +4,21 @@ import * as paths from '../paths.js';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
import { Project } from '../classes.project.js';
|
||||
|
||||
const filesToDelete = ['defaults.yml', 'yarn.lock', 'package-lock.json', 'tslint.json'];
|
||||
const filesToDelete = [
|
||||
'defaults.yml',
|
||||
'yarn.lock',
|
||||
'package-lock.json',
|
||||
'tslint.json',
|
||||
];
|
||||
|
||||
export const run = async (projectArg: Project) => {
|
||||
for (const relativeFilePath of filesToDelete) {
|
||||
const fileExists = plugins.smartfile.fs.fileExistsSync(relativeFilePath);
|
||||
if (fileExists) {
|
||||
logger.log('info', `Found ${relativeFilePath}! Removing it!`);
|
||||
plugins.smartfile.fs.removeSync(plugins.path.join(paths.cwd, relativeFilePath));
|
||||
plugins.smartfile.fs.removeSync(
|
||||
plugins.path.join(paths.cwd, relativeFilePath),
|
||||
);
|
||||
} else {
|
||||
logger.log('info', `Project is free of ${relativeFilePath}`);
|
||||
}
|
||||
|
@@ -1,6 +1,85 @@
|
||||
import type { Project } from '../classes.project.js';
|
||||
import * as plugins from '../plugins.js';
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
|
||||
export const run = async (projectArg: Project) => {
|
||||
const gitzoneConfig = await projectArg.gitzoneConfig;
|
||||
|
||||
// Get copy configuration from npmextra.json
|
||||
const npmextraConfig = new plugins.npmextra.Npmextra();
|
||||
const copyConfig = npmextraConfig.dataFor<any>('gitzone.format.copy', {
|
||||
patterns: [],
|
||||
});
|
||||
|
||||
if (!copyConfig.patterns || copyConfig.patterns.length === 0) {
|
||||
logger.log('info', 'No copy patterns configured in npmextra.json');
|
||||
return;
|
||||
}
|
||||
|
||||
for (const pattern of copyConfig.patterns) {
|
||||
if (!pattern.from || !pattern.to) {
|
||||
logger.log('warn', 'Invalid copy pattern - missing "from" or "to" field');
|
||||
continue;
|
||||
}
|
||||
|
||||
try {
|
||||
// Handle glob patterns
|
||||
const files = await plugins.smartfile.fs.listFileTree('.', pattern.from);
|
||||
|
||||
for (const file of files) {
|
||||
const sourcePath = file;
|
||||
let destPath = pattern.to;
|
||||
|
||||
// If destination is a directory, preserve filename
|
||||
if (pattern.to.endsWith('/')) {
|
||||
const filename = plugins.path.basename(file);
|
||||
destPath = plugins.path.join(pattern.to, filename);
|
||||
}
|
||||
|
||||
// Handle template variables in destination path
|
||||
if (pattern.preservePath) {
|
||||
const relativePath = plugins.path.relative(
|
||||
plugins.path.dirname(pattern.from.replace(/\*/g, '')),
|
||||
file,
|
||||
);
|
||||
destPath = plugins.path.join(pattern.to, relativePath);
|
||||
}
|
||||
|
||||
// Ensure destination directory exists
|
||||
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(destPath));
|
||||
|
||||
// Copy file
|
||||
await plugins.smartfile.fs.copy(sourcePath, destPath);
|
||||
logger.log('info', `Copied ${sourcePath} to ${destPath}`);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.log(
|
||||
'error',
|
||||
`Failed to copy pattern ${pattern.from}: ${error.message}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Example npmextra.json configuration:
|
||||
* {
|
||||
* "gitzone": {
|
||||
* "format": {
|
||||
* "copy": {
|
||||
* "patterns": [
|
||||
* {
|
||||
* "from": "src/assets/*",
|
||||
* "to": "dist/assets/",
|
||||
* "preservePath": true
|
||||
* },
|
||||
* {
|
||||
* "from": "config/*.json",
|
||||
* "to": "dist/"
|
||||
* }
|
||||
* ]
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
*/
|
||||
|
@@ -8,14 +8,40 @@ const gitignorePath = plugins.path.join(paths.cwd, './.gitignore');
|
||||
|
||||
export const run = async (projectArg: Project) => {
|
||||
const gitignoreExists = await plugins.smartfile.fs.fileExists(gitignorePath);
|
||||
const templateModule = await import('../mod_template/index.js');
|
||||
const ciTemplate = await templateModule.getTemplate('gitignore');
|
||||
let customContent = '';
|
||||
|
||||
if (gitignoreExists) {
|
||||
// lets get the existing gitignore file
|
||||
const existingGitIgnoreString = plugins.smartfile.fs.toStringSync(gitignorePath);
|
||||
let customPart = existingGitIgnoreString.split('# custom\n')[1];
|
||||
customPart ? null : (customPart = '');
|
||||
const existingGitIgnoreString =
|
||||
plugins.smartfile.fs.toStringSync(gitignorePath);
|
||||
|
||||
// Check for different custom section markers
|
||||
const customMarkers = ['#------# custom', '# custom'];
|
||||
for (const marker of customMarkers) {
|
||||
const splitResult = existingGitIgnoreString.split(marker);
|
||||
if (splitResult.length > 1) {
|
||||
// Get everything after the marker (excluding the marker itself)
|
||||
customContent = splitResult[1].trim();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Write the template
|
||||
const templateModule = await import('../mod_template/index.js');
|
||||
const ciTemplate = await templateModule.getTemplate('gitignore');
|
||||
await ciTemplate.writeToDisk(paths.cwd);
|
||||
|
||||
// Append the custom content if it exists
|
||||
if (customContent) {
|
||||
const newGitignoreContent =
|
||||
plugins.smartfile.fs.toStringSync(gitignorePath);
|
||||
// The template already ends with "#------# custom", so just append the content
|
||||
const finalContent =
|
||||
newGitignoreContent.trimEnd() + '\n' + customContent + '\n';
|
||||
await plugins.smartfile.fs.toFs(finalContent, gitignorePath);
|
||||
logger.log('info', 'Updated .gitignore while preserving custom section!');
|
||||
} else {
|
||||
logger.log('info', 'Added a .gitignore!');
|
||||
}
|
||||
ciTemplate.writeToDisk(paths.cwd);
|
||||
logger.log('info', 'Added a .gitignore!');
|
||||
};
|
||||
|
@@ -24,7 +24,9 @@ export const run = async (projectArg: Project) => {
|
||||
} else {
|
||||
logger.log('error', 'Error -> licenses failed. Here is why:');
|
||||
for (const failedModule of licenseCheckResult.failingModules) {
|
||||
console.log(`${failedModule.name} fails with license ${failedModule.license}`);
|
||||
console.log(
|
||||
`${failedModule.name} fails with license ${failedModule.license}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@@ -29,7 +29,12 @@ export const run = async (projectArg: Project) => {
|
||||
|
||||
const interactInstance = new plugins.smartinteract.SmartInteract();
|
||||
for (const expectedRepoInformationItem of expectedRepoInformation) {
|
||||
if (!plugins.smartobject.smartGet(npmextraJson.gitzone, expectedRepoInformationItem)) {
|
||||
if (
|
||||
!plugins.smartobject.smartGet(
|
||||
npmextraJson.gitzone,
|
||||
expectedRepoInformationItem,
|
||||
)
|
||||
) {
|
||||
interactInstance.addQuestions([
|
||||
{
|
||||
message: `What is the value of ${expectedRepoInformationItem}`,
|
||||
@@ -43,7 +48,9 @@ export const run = async (projectArg: Project) => {
|
||||
|
||||
const answerbucket = await interactInstance.runQueue();
|
||||
for (const expectedRepoInformationItem of expectedRepoInformation) {
|
||||
const cliProvidedValue = answerbucket.getAnswerFor(expectedRepoInformationItem);
|
||||
const cliProvidedValue = answerbucket.getAnswerFor(
|
||||
expectedRepoInformationItem,
|
||||
);
|
||||
if (cliProvidedValue) {
|
||||
plugins.smartobject.smartAdd(
|
||||
npmextraJson.gitzone,
|
||||
|
@@ -13,7 +13,61 @@ const ensureDependency = async (
|
||||
position: 'dep' | 'devDep' | 'everywhere',
|
||||
constraint: 'exclude' | 'include' | 'latest',
|
||||
dependencyArg: string,
|
||||
) => {};
|
||||
) => {
|
||||
const [packageName, version] = dependencyArg.includes('@')
|
||||
? dependencyArg.split('@').filter(Boolean)
|
||||
: [dependencyArg, 'latest'];
|
||||
|
||||
const targetSections: string[] = [];
|
||||
|
||||
switch (position) {
|
||||
case 'dep':
|
||||
targetSections.push('dependencies');
|
||||
break;
|
||||
case 'devDep':
|
||||
targetSections.push('devDependencies');
|
||||
break;
|
||||
case 'everywhere':
|
||||
targetSections.push('dependencies', 'devDependencies');
|
||||
break;
|
||||
}
|
||||
|
||||
for (const section of targetSections) {
|
||||
if (!packageJsonObjectArg[section]) {
|
||||
packageJsonObjectArg[section] = {};
|
||||
}
|
||||
|
||||
switch (constraint) {
|
||||
case 'exclude':
|
||||
delete packageJsonObjectArg[section][packageName];
|
||||
break;
|
||||
case 'include':
|
||||
if (!packageJsonObjectArg[section][packageName]) {
|
||||
packageJsonObjectArg[section][packageName] =
|
||||
version === 'latest' ? '^1.0.0' : version;
|
||||
}
|
||||
break;
|
||||
case 'latest':
|
||||
// Fetch latest version from npm
|
||||
try {
|
||||
const registry = new plugins.smartnpm.NpmRegistry();
|
||||
const packageInfo = await registry.getPackageInfo(packageName);
|
||||
const latestVersion = packageInfo['dist-tags'].latest;
|
||||
packageJsonObjectArg[section][packageName] = `^${latestVersion}`;
|
||||
} catch (error) {
|
||||
logger.log(
|
||||
'warn',
|
||||
`Could not fetch latest version for ${packageName}, using existing or default`,
|
||||
);
|
||||
if (!packageJsonObjectArg[section][packageName]) {
|
||||
packageJsonObjectArg[section][packageName] =
|
||||
version === 'latest' ? '^1.0.0' : version;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
export const run = async (projectArg: Project) => {
|
||||
const formatStreamWrapper = new plugins.smartstream.StreamWrapper([
|
||||
@@ -29,10 +83,10 @@ export const run = async (projectArg: Project) => {
|
||||
type: 'git',
|
||||
url: `https://${gitzoneData.module.githost}/${gitzoneData.module.gitscope}/${gitzoneData.module.gitrepo}.git`,
|
||||
};
|
||||
(packageJson.bugs = {
|
||||
((packageJson.bugs = {
|
||||
url: `https://${gitzoneData.module.githost}/${gitzoneData.module.gitscope}/${gitzoneData.module.gitrepo}/issues`,
|
||||
}),
|
||||
(packageJson.homepage = `https://${gitzoneData.module.githost}/${gitzoneData.module.gitscope}/${gitzoneData.module.gitrepo}#readme`);
|
||||
(packageJson.homepage = `https://${gitzoneData.module.githost}/${gitzoneData.module.gitscope}/${gitzoneData.module.gitrepo}#readme`));
|
||||
|
||||
// Check for module type
|
||||
if (!packageJson.type) {
|
||||
@@ -42,9 +96,15 @@ export const run = async (projectArg: Project) => {
|
||||
|
||||
// Check for private or public
|
||||
if (packageJson.private !== undefined) {
|
||||
logger.log('info', 'Success -> found private/public info in package.json!');
|
||||
logger.log(
|
||||
'info',
|
||||
'Success -> found private/public info in package.json!',
|
||||
);
|
||||
} else {
|
||||
logger.log('error', 'found no private boolean! Setting it to private for now!');
|
||||
logger.log(
|
||||
'error',
|
||||
'found no private boolean! Setting it to private for now!',
|
||||
);
|
||||
packageJson.private = true;
|
||||
}
|
||||
|
||||
@@ -52,7 +112,10 @@ export const run = async (projectArg: Project) => {
|
||||
if (packageJson.license) {
|
||||
logger.log('info', 'Success -> found license in package.json!');
|
||||
} else {
|
||||
logger.log('error', 'found no license! Setting it to UNLICENSED for now!');
|
||||
logger.log(
|
||||
'error',
|
||||
'found no license! Setting it to UNLICENSED for now!',
|
||||
);
|
||||
packageJson.license = 'UNLICENSED';
|
||||
}
|
||||
|
||||
@@ -60,13 +123,19 @@ export const run = async (projectArg: Project) => {
|
||||
if (packageJson.scripts.build) {
|
||||
logger.log('info', 'Success -> found build script in package.json!');
|
||||
} else {
|
||||
logger.log('error', 'found no build script! Putting a placeholder there for now!');
|
||||
logger.log(
|
||||
'error',
|
||||
'found no build script! Putting a placeholder there for now!',
|
||||
);
|
||||
packageJson.scripts.build = `echo "Not needed for now"`;
|
||||
}
|
||||
|
||||
// Check for buildDocs script
|
||||
if (!packageJson.scripts.buildDocs) {
|
||||
logger.log('info', 'found no buildDocs script! Putting tsdoc script there now.');
|
||||
logger.log(
|
||||
'info',
|
||||
'found no buildDocs script! Putting tsdoc script there now.',
|
||||
);
|
||||
packageJson.scripts.buildDocs = `tsdoc`;
|
||||
}
|
||||
|
||||
@@ -85,9 +154,24 @@ export const run = async (projectArg: Project) => {
|
||||
];
|
||||
|
||||
// check for dependencies
|
||||
await ensureDependency(packageJson, 'devDep', 'latest', '@push.rocks/tapbundle');
|
||||
await ensureDependency(packageJson, 'devDep', 'latest', '@git.zone/tstest');
|
||||
await ensureDependency(packageJson, 'devDep', 'latest', '@git.zone/tsbuild');
|
||||
await ensureDependency(
|
||||
packageJson,
|
||||
'devDep',
|
||||
'latest',
|
||||
'@push.rocks/tapbundle',
|
||||
);
|
||||
await ensureDependency(
|
||||
packageJson,
|
||||
'devDep',
|
||||
'latest',
|
||||
'@git.zone/tstest',
|
||||
);
|
||||
await ensureDependency(
|
||||
packageJson,
|
||||
'devDep',
|
||||
'latest',
|
||||
'@git.zone/tsbuild',
|
||||
);
|
||||
|
||||
// set overrides
|
||||
const overrides = plugins.smartfile.fs.toObjectSync(
|
||||
|
@@ -16,7 +16,12 @@ const prettierDefaultMarkdownConfig: prettier.Options = {
|
||||
parser: 'markdown',
|
||||
};
|
||||
|
||||
const filesToFormat = [`ts/**/*.ts`, `test/**/*.ts`, `readme.md`, `docs/**/*.md`];
|
||||
const filesToFormat = [
|
||||
`ts/**/*.ts`,
|
||||
`test/**/*.ts`,
|
||||
`readme.md`,
|
||||
`docs/**/*.md`,
|
||||
];
|
||||
|
||||
const choosePrettierConfig = (fileArg: plugins.smartfile.SmartFile) => {
|
||||
switch (fileArg.parsedPath.ext) {
|
||||
@@ -39,7 +44,10 @@ const prettierTypeScriptPipestop = plugins.through2.obj(
|
||||
cb(null);
|
||||
} else {
|
||||
logger.log('info', `${fileArg.path} is being reformated!`);
|
||||
const formatedFileString = await prettier.format(fileString, chosenConfig);
|
||||
const formatedFileString = await prettier.format(
|
||||
fileString,
|
||||
chosenConfig,
|
||||
);
|
||||
fileArg.setContentsFromString(formatedFileString);
|
||||
cb(null, fileArg);
|
||||
}
|
||||
|
@@ -18,7 +18,8 @@ export const run = async () => {
|
||||
}
|
||||
|
||||
// Check and initialize readme.hints.md if it doesn't exist
|
||||
const readmeHintsExists = await plugins.smartfile.fs.fileExists(readmeHintsPath);
|
||||
const readmeHintsExists =
|
||||
await plugins.smartfile.fs.fileExists(readmeHintsPath);
|
||||
if (!readmeHintsExists) {
|
||||
await plugins.smartfile.fs.toFs(
|
||||
'# Project Readme Hints\n\nThis is the initial readme hints file.',
|
||||
|
@@ -26,10 +26,12 @@ export const run = async (project: Project) => {
|
||||
case 'npm':
|
||||
case 'wcc':
|
||||
if (project.gitzoneConfig.data.npmciOptions.npmAccessLevel === 'public') {
|
||||
const ciTemplateDefault = await templateModule.getTemplate('ci_default');
|
||||
const ciTemplateDefault =
|
||||
await templateModule.getTemplate('ci_default');
|
||||
ciTemplateDefault.writeToDisk(paths.cwd);
|
||||
} else {
|
||||
const ciTemplateDefault = await templateModule.getTemplate('ci_default_private');
|
||||
const ciTemplateDefault =
|
||||
await templateModule.getTemplate('ci_default_private');
|
||||
ciTemplateDefault.writeToDisk(paths.cwd);
|
||||
}
|
||||
logger.log('info', 'Updated .gitlabci.yml!');
|
||||
@@ -41,7 +43,8 @@ export const run = async (project: Project) => {
|
||||
logger.log('info', 'Updated CI/CD config files!');
|
||||
|
||||
// lets care about docker
|
||||
const dockerTemplate = await templateModule.getTemplate('dockerfile_service');
|
||||
const dockerTemplate =
|
||||
await templateModule.getTemplate('dockerfile_service');
|
||||
dockerTemplate.writeToDisk(paths.cwd);
|
||||
logger.log('info', 'Updated Dockerfile!');
|
||||
|
||||
@@ -56,17 +59,22 @@ export const run = async (project: Project) => {
|
||||
|
||||
// update html
|
||||
if (project.gitzoneConfig.data.projectType === 'website') {
|
||||
const websiteUpdateTemplate = await templateModule.getTemplate('website_update');
|
||||
const variables ={
|
||||
const websiteUpdateTemplate =
|
||||
await templateModule.getTemplate('website_update');
|
||||
const variables = {
|
||||
assetbrokerUrl: project.gitzoneConfig.data.module.assetbrokerUrl,
|
||||
legalUrl: project.gitzoneConfig.data.module.legalUrl,
|
||||
};
|
||||
console.log('updating website template with variables\n', JSON.stringify(variables, null, 2));
|
||||
console.log(
|
||||
'updating website template with variables\n',
|
||||
JSON.stringify(variables, null, 2),
|
||||
);
|
||||
websiteUpdateTemplate.supplyVariables(variables);
|
||||
await websiteUpdateTemplate.writeToDisk(paths.cwd);
|
||||
logger.log('info', `Updated html for website!`);
|
||||
} else if (project.gitzoneConfig.data.projectType === 'service') {
|
||||
const websiteUpdateTemplate = await templateModule.getTemplate('service_update');
|
||||
const websiteUpdateTemplate =
|
||||
await templateModule.getTemplate('service_update');
|
||||
await websiteUpdateTemplate.writeToDisk(paths.cwd);
|
||||
logger.log('info', `Updated html for element template!`);
|
||||
} else if (project.gitzoneConfig.data.projectType === 'wcc') {
|
||||
|
@@ -19,8 +19,12 @@ export const run = async (projectArg: Project) => {
|
||||
const publishModules = await tsPublishInstance.getModuleSubDirs(paths.cwd);
|
||||
for (const publishModule of Object.keys(publishModules)) {
|
||||
const publishConfig = publishModules[publishModule];
|
||||
tsconfigObject.compilerOptions.paths[`${publishConfig.name}`] = [`./${publishModule}/index.js`];
|
||||
tsconfigObject.compilerOptions.paths[`${publishConfig.name}`] = [
|
||||
`./${publishModule}/index.js`,
|
||||
];
|
||||
}
|
||||
tsconfigSmartfile.setContentsFromString(JSON.stringify(tsconfigObject, null, 2));
|
||||
tsconfigSmartfile.setContentsFromString(
|
||||
JSON.stringify(tsconfigObject, null, 2),
|
||||
);
|
||||
await tsconfigSmartfile.write();
|
||||
};
|
||||
|
44
ts/mod_format/formatters/cleanup.formatter.ts
Normal file
44
ts/mod_format/formatters/cleanup.formatter.ts
Normal file
@@ -0,0 +1,44 @@
|
||||
import { BaseFormatter } from '../classes.baseformatter.js';
|
||||
import type { IPlannedChange } from '../interfaces.format.js';
|
||||
import * as plugins from '../mod.plugins.js';
|
||||
import * as cleanupFormatter from '../format.cleanup.js';
|
||||
|
||||
export class CleanupFormatter extends BaseFormatter {
|
||||
get name(): string {
|
||||
return 'cleanup';
|
||||
}
|
||||
|
||||
async analyze(): Promise<IPlannedChange[]> {
|
||||
const changes: IPlannedChange[] = [];
|
||||
|
||||
// List of files to remove
|
||||
const filesToRemove = [
|
||||
'yarn.lock',
|
||||
'package-lock.json',
|
||||
'tslint.json',
|
||||
'defaults.yml',
|
||||
];
|
||||
|
||||
for (const file of filesToRemove) {
|
||||
const exists = await plugins.smartfile.fs.fileExists(file);
|
||||
if (exists) {
|
||||
changes.push({
|
||||
type: 'delete',
|
||||
path: file,
|
||||
module: this.name,
|
||||
description: `Remove obsolete file`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return changes;
|
||||
}
|
||||
|
||||
async applyChange(change: IPlannedChange): Promise<void> {
|
||||
switch (change.type) {
|
||||
case 'delete':
|
||||
await this.deleteFile(change.path);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
8
ts/mod_format/formatters/copy.formatter.ts
Normal file
8
ts/mod_format/formatters/copy.formatter.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { LegacyFormatter } from './legacy.formatter.js';
|
||||
import * as formatCopy from '../format.copy.js';
|
||||
|
||||
export class CopyFormatter extends LegacyFormatter {
|
||||
constructor(context: any, project: any) {
|
||||
super(context, project, 'copy', formatCopy);
|
||||
}
|
||||
}
|
8
ts/mod_format/formatters/gitignore.formatter.ts
Normal file
8
ts/mod_format/formatters/gitignore.formatter.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { LegacyFormatter } from './legacy.formatter.js';
|
||||
import * as formatGitignore from '../format.gitignore.js';
|
||||
|
||||
export class GitignoreFormatter extends LegacyFormatter {
|
||||
constructor(context: any, project: any) {
|
||||
super(context, project, 'gitignore', formatGitignore);
|
||||
}
|
||||
}
|
43
ts/mod_format/formatters/legacy.formatter.ts
Normal file
43
ts/mod_format/formatters/legacy.formatter.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { BaseFormatter } from '../classes.baseformatter.js';
|
||||
import type { IPlannedChange } from '../interfaces.format.js';
|
||||
import { Project } from '../../classes.project.js';
|
||||
import * as plugins from '../mod.plugins.js';
|
||||
|
||||
// This is a wrapper for existing format modules
|
||||
export class LegacyFormatter extends BaseFormatter {
|
||||
private moduleName: string;
|
||||
private formatModule: any;
|
||||
|
||||
constructor(
|
||||
context: any,
|
||||
project: Project,
|
||||
moduleName: string,
|
||||
formatModule: any,
|
||||
) {
|
||||
super(context, project);
|
||||
this.moduleName = moduleName;
|
||||
this.formatModule = formatModule;
|
||||
}
|
||||
|
||||
get name(): string {
|
||||
return this.moduleName;
|
||||
}
|
||||
|
||||
async analyze(): Promise<IPlannedChange[]> {
|
||||
// For legacy modules, we can't easily predict changes
|
||||
// So we'll return a generic change that indicates the module will run
|
||||
return [
|
||||
{
|
||||
type: 'modify',
|
||||
path: '<various files>',
|
||||
module: this.name,
|
||||
description: `Run ${this.name} formatter`,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
async applyChange(change: IPlannedChange): Promise<void> {
|
||||
// Run the legacy format module
|
||||
await this.formatModule.run(this.project);
|
||||
}
|
||||
}
|
8
ts/mod_format/formatters/license.formatter.ts
Normal file
8
ts/mod_format/formatters/license.formatter.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { LegacyFormatter } from './legacy.formatter.js';
|
||||
import * as formatLicense from '../format.license.js';
|
||||
|
||||
export class LicenseFormatter extends LegacyFormatter {
|
||||
constructor(context: any, project: any) {
|
||||
super(context, project, 'license', formatLicense);
|
||||
}
|
||||
}
|
8
ts/mod_format/formatters/npmextra.formatter.ts
Normal file
8
ts/mod_format/formatters/npmextra.formatter.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { LegacyFormatter } from './legacy.formatter.js';
|
||||
import * as formatNpmextra from '../format.npmextra.js';
|
||||
|
||||
export class NpmextraFormatter extends LegacyFormatter {
|
||||
constructor(context: any, project: any) {
|
||||
super(context, project, 'npmextra', formatNpmextra);
|
||||
}
|
||||
}
|
8
ts/mod_format/formatters/packagejson.formatter.ts
Normal file
8
ts/mod_format/formatters/packagejson.formatter.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { LegacyFormatter } from './legacy.formatter.js';
|
||||
import * as formatPackageJson from '../format.packagejson.js';
|
||||
|
||||
export class PackageJsonFormatter extends LegacyFormatter {
|
||||
constructor(context: any, project: any) {
|
||||
super(context, project, 'packagejson', formatPackageJson);
|
||||
}
|
||||
}
|
223
ts/mod_format/formatters/prettier.formatter.ts
Normal file
223
ts/mod_format/formatters/prettier.formatter.ts
Normal file
@@ -0,0 +1,223 @@
|
||||
import { BaseFormatter } from '../classes.baseformatter.js';
|
||||
import type { IPlannedChange } from '../interfaces.format.js';
|
||||
import * as plugins from '../mod.plugins.js';
|
||||
import { logger, logVerbose } from '../../gitzone.logging.js';
|
||||
|
||||
export class PrettierFormatter extends BaseFormatter {
|
||||
get name(): string {
|
||||
return 'prettier';
|
||||
}
|
||||
|
||||
async analyze(): Promise<IPlannedChange[]> {
|
||||
const changes: IPlannedChange[] = [];
|
||||
|
||||
// Define directories to format (TypeScript directories by default)
|
||||
const includeDirs = ['ts', 'ts_*', 'test', 'tests'];
|
||||
|
||||
// File extensions to format
|
||||
const extensions = '{ts,tsx,js,jsx,json,md,css,scss,html,xml,yaml,yml}';
|
||||
|
||||
// Also format root-level config files
|
||||
const rootConfigFiles = [
|
||||
'package.json',
|
||||
'tsconfig.json',
|
||||
'npmextra.json',
|
||||
'.prettierrc',
|
||||
'.prettierrc.json',
|
||||
'.prettierrc.js',
|
||||
'readme.md',
|
||||
'README.md',
|
||||
'changelog.md',
|
||||
'CHANGELOG.md',
|
||||
// Skip files without extensions as prettier can't infer parser
|
||||
// 'license',
|
||||
// 'LICENSE',
|
||||
'*.md',
|
||||
];
|
||||
|
||||
// Collect all files to format
|
||||
const allFiles: string[] = [];
|
||||
|
||||
// Add files from TypeScript directories
|
||||
for (const dir of includeDirs) {
|
||||
const globPattern = `${dir}/**/*.${extensions}`;
|
||||
const dirFiles = await plugins.smartfile.fs.listFileTree(
|
||||
'.',
|
||||
globPattern,
|
||||
);
|
||||
allFiles.push(...dirFiles);
|
||||
}
|
||||
|
||||
// Add root config files
|
||||
for (const pattern of rootConfigFiles) {
|
||||
const rootFiles = await plugins.smartfile.fs.listFileTree('.', pattern);
|
||||
// Only include files at root level (no slashes in path)
|
||||
const rootLevelFiles = rootFiles.filter((f) => !f.includes('/'));
|
||||
allFiles.push(...rootLevelFiles);
|
||||
}
|
||||
|
||||
// Remove duplicates
|
||||
const uniqueFiles = [...new Set(allFiles)];
|
||||
|
||||
// Get all files that match the pattern
|
||||
const files = uniqueFiles;
|
||||
|
||||
// Ensure we only process actual files (not directories)
|
||||
const validFiles: string[] = [];
|
||||
for (const file of files) {
|
||||
try {
|
||||
const stats = await plugins.smartfile.fs.stat(file);
|
||||
if (!stats.isDirectory()) {
|
||||
validFiles.push(file);
|
||||
}
|
||||
} catch (error) {
|
||||
// Skip files that can't be accessed
|
||||
logVerbose(`Skipping ${file} - cannot access: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Check which files need formatting
|
||||
for (const file of validFiles) {
|
||||
// Skip files that haven't changed
|
||||
if (!(await this.shouldProcessFile(file))) {
|
||||
logVerbose(`Skipping ${file} - no changes detected`);
|
||||
continue;
|
||||
}
|
||||
|
||||
changes.push({
|
||||
type: 'modify',
|
||||
path: file,
|
||||
module: this.name,
|
||||
description: 'Format with Prettier',
|
||||
});
|
||||
}
|
||||
|
||||
logger.log('info', `Found ${changes.length} files to format with Prettier`);
|
||||
return changes;
|
||||
}
|
||||
|
||||
async execute(changes: IPlannedChange[]): Promise<void> {
|
||||
const startTime = this.stats.moduleStartTime(this.name);
|
||||
this.stats.startModule(this.name);
|
||||
|
||||
try {
|
||||
await this.preExecute();
|
||||
|
||||
logVerbose(`Processing ${changes.length} files sequentially`);
|
||||
|
||||
// Process files sequentially to avoid prettier cache/state issues
|
||||
for (let i = 0; i < changes.length; i++) {
|
||||
const change = changes[i];
|
||||
logVerbose(
|
||||
`Processing file ${i + 1}/${changes.length}: ${change.path}`,
|
||||
);
|
||||
|
||||
try {
|
||||
await this.applyChange(change);
|
||||
this.stats.recordFileOperation(this.name, change.type, true);
|
||||
} catch (error) {
|
||||
this.stats.recordFileOperation(this.name, change.type, false);
|
||||
logger.log(
|
||||
'error',
|
||||
`Failed to format ${change.path}: ${error.message}`,
|
||||
);
|
||||
// Don't throw - continue with other files
|
||||
}
|
||||
}
|
||||
|
||||
await this.postExecute();
|
||||
} catch (error) {
|
||||
// Rollback removed - no longer tracking operations
|
||||
throw error;
|
||||
} finally {
|
||||
this.stats.endModule(this.name, startTime);
|
||||
}
|
||||
}
|
||||
|
||||
async applyChange(change: IPlannedChange): Promise<void> {
|
||||
if (change.type !== 'modify') return;
|
||||
|
||||
try {
|
||||
// Validate the path before processing
|
||||
if (!change.path || change.path.trim() === '') {
|
||||
logger.log(
|
||||
'error',
|
||||
`Invalid empty path in change: ${JSON.stringify(change)}`,
|
||||
);
|
||||
throw new Error('Invalid empty path');
|
||||
}
|
||||
|
||||
// Read current content
|
||||
const content = plugins.smartfile.fs.toStringSync(change.path);
|
||||
|
||||
// Format with prettier
|
||||
const prettier = await import('prettier');
|
||||
|
||||
// Skip files that prettier can't parse without explicit parser
|
||||
const fileExt = plugins.path.extname(change.path).toLowerCase();
|
||||
if (!fileExt || fileExt === '') {
|
||||
// Files without extensions need explicit parser
|
||||
logVerbose(
|
||||
`Skipping ${change.path} - no file extension for parser inference`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const formatted = await prettier.format(content, {
|
||||
filepath: change.path,
|
||||
...(await this.getPrettierConfig()),
|
||||
});
|
||||
|
||||
// Only write if content actually changed
|
||||
if (formatted !== content) {
|
||||
// Debug: log the path being written
|
||||
logVerbose(`Writing formatted content to: ${change.path}`);
|
||||
await this.modifyFile(change.path, formatted);
|
||||
logVerbose(`Formatted ${change.path}`);
|
||||
} else {
|
||||
logVerbose(`No formatting changes for ${change.path}`);
|
||||
}
|
||||
} catch (prettierError) {
|
||||
// Check if it's a parser error
|
||||
if (
|
||||
prettierError.message &&
|
||||
prettierError.message.includes('No parser could be inferred')
|
||||
) {
|
||||
logVerbose(`Skipping ${change.path} - ${prettierError.message}`);
|
||||
return; // Skip this file silently
|
||||
}
|
||||
throw prettierError;
|
||||
}
|
||||
} catch (error) {
|
||||
// Log the full error stack for debugging mkdir issues
|
||||
if (error.message && error.message.includes('mkdir')) {
|
||||
logger.log(
|
||||
'error',
|
||||
`Failed to format ${change.path}: ${error.message}`,
|
||||
);
|
||||
logger.log('error', `Error stack: ${error.stack}`);
|
||||
} else {
|
||||
logger.log(
|
||||
'error',
|
||||
`Failed to format ${change.path}: ${error.message}`,
|
||||
);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private async getPrettierConfig(): Promise<any> {
|
||||
// Try to load prettier config from the project
|
||||
const prettierConfig = new plugins.npmextra.Npmextra();
|
||||
return prettierConfig.dataFor('prettier', {
|
||||
// Default prettier config
|
||||
singleQuote: true,
|
||||
trailingComma: 'all',
|
||||
printWidth: 80,
|
||||
tabWidth: 2,
|
||||
semi: true,
|
||||
arrowParens: 'always',
|
||||
});
|
||||
}
|
||||
}
|
24
ts/mod_format/formatters/readme.formatter.ts
Normal file
24
ts/mod_format/formatters/readme.formatter.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import { BaseFormatter } from '../classes.baseformatter.js';
|
||||
import type { IPlannedChange } from '../interfaces.format.js';
|
||||
import * as formatReadme from '../format.readme.js';
|
||||
|
||||
export class ReadmeFormatter extends BaseFormatter {
|
||||
get name(): string {
|
||||
return 'readme';
|
||||
}
|
||||
|
||||
async analyze(): Promise<IPlannedChange[]> {
|
||||
return [
|
||||
{
|
||||
type: 'modify',
|
||||
path: 'readme.md',
|
||||
module: this.name,
|
||||
description: 'Ensure readme files exist',
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
async applyChange(change: IPlannedChange): Promise<void> {
|
||||
await formatReadme.run();
|
||||
}
|
||||
}
|
8
ts/mod_format/formatters/templates.formatter.ts
Normal file
8
ts/mod_format/formatters/templates.formatter.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { LegacyFormatter } from './legacy.formatter.js';
|
||||
import * as formatTemplates from '../format.templates.js';
|
||||
|
||||
export class TemplatesFormatter extends LegacyFormatter {
|
||||
constructor(context: any, project: any) {
|
||||
super(context, project, 'templates', formatTemplates);
|
||||
}
|
||||
}
|
8
ts/mod_format/formatters/tsconfig.formatter.ts
Normal file
8
ts/mod_format/formatters/tsconfig.formatter.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import { LegacyFormatter } from './legacy.formatter.js';
|
||||
import * as formatTsconfig from '../format.tsconfig.js';
|
||||
|
||||
export class TsconfigFormatter extends LegacyFormatter {
|
||||
constructor(context: any, project: any) {
|
||||
super(context, project, 'tsconfig', formatTsconfig);
|
||||
}
|
||||
}
|
@@ -1,40 +1,192 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import { Project } from '../classes.project.js';
|
||||
import { FormatContext } from './classes.formatcontext.js';
|
||||
import { FormatPlanner } from './classes.formatplanner.js';
|
||||
import { logger, setVerboseMode } from '../gitzone.logging.js';
|
||||
|
||||
// Import wrapper classes for formatters
|
||||
import { CleanupFormatter } from './formatters/cleanup.formatter.js';
|
||||
import { NpmextraFormatter } from './formatters/npmextra.formatter.js';
|
||||
import { LicenseFormatter } from './formatters/license.formatter.js';
|
||||
import { PackageJsonFormatter } from './formatters/packagejson.formatter.js';
|
||||
import { TemplatesFormatter } from './formatters/templates.formatter.js';
|
||||
import { GitignoreFormatter } from './formatters/gitignore.formatter.js';
|
||||
import { TsconfigFormatter } from './formatters/tsconfig.formatter.js';
|
||||
import { PrettierFormatter } from './formatters/prettier.formatter.js';
|
||||
import { ReadmeFormatter } from './formatters/readme.formatter.js';
|
||||
import { CopyFormatter } from './formatters/copy.formatter.js';
|
||||
|
||||
export let run = async (
|
||||
options: {
|
||||
dryRun?: boolean;
|
||||
yes?: boolean;
|
||||
planOnly?: boolean;
|
||||
savePlan?: string;
|
||||
fromPlan?: string;
|
||||
detailed?: boolean;
|
||||
interactive?: boolean;
|
||||
parallel?: boolean;
|
||||
verbose?: boolean;
|
||||
} = {},
|
||||
): Promise<any> => {
|
||||
// Set verbose mode if requested
|
||||
if (options.verbose) {
|
||||
setVerboseMode(true);
|
||||
}
|
||||
|
||||
export let run = async (writeArg: boolean = true): Promise<any> => {
|
||||
const project = await Project.fromCwd();
|
||||
const context = new FormatContext();
|
||||
// Cache system removed - no longer needed
|
||||
const planner = new FormatPlanner();
|
||||
|
||||
// cleanup
|
||||
const formatCleanup = await import('./format.cleanup.js');
|
||||
await formatCleanup.run(project);
|
||||
// Get configuration from npmextra
|
||||
const npmextraConfig = new plugins.npmextra.Npmextra();
|
||||
const formatConfig = npmextraConfig.dataFor<any>('gitzone.format', {
|
||||
interactive: true,
|
||||
showDiffs: false,
|
||||
autoApprove: false,
|
||||
planTimeout: 30000,
|
||||
rollback: {
|
||||
enabled: true,
|
||||
autoRollbackOnError: true,
|
||||
backupRetentionDays: 7,
|
||||
maxBackupSize: '100MB',
|
||||
excludePatterns: ['node_modules/**', '.git/**'],
|
||||
},
|
||||
modules: {
|
||||
skip: [],
|
||||
only: [],
|
||||
order: [],
|
||||
},
|
||||
parallel: true,
|
||||
cache: {
|
||||
enabled: true,
|
||||
clean: true, // Clean invalid entries from cache
|
||||
},
|
||||
});
|
||||
|
||||
// npmextra
|
||||
const formatNpmextra = await import('./format.npmextra.js');
|
||||
await formatNpmextra.run(project);
|
||||
// Cache cleaning removed - no longer using cache system
|
||||
|
||||
// license
|
||||
const formatLicense = await import('./format.license.js');
|
||||
await formatLicense.run(project);
|
||||
// Override config with command options
|
||||
const interactive = options.interactive ?? formatConfig.interactive;
|
||||
const autoApprove = options.yes ?? formatConfig.autoApprove;
|
||||
const parallel = options.parallel ?? formatConfig.parallel;
|
||||
|
||||
// format package.json
|
||||
const formatPackageJson = await import('./format.packagejson.js');
|
||||
await formatPackageJson.run(project);
|
||||
try {
|
||||
// Initialize formatters
|
||||
const formatters = [
|
||||
new CleanupFormatter(context, project),
|
||||
new NpmextraFormatter(context, project),
|
||||
new LicenseFormatter(context, project),
|
||||
new PackageJsonFormatter(context, project),
|
||||
new TemplatesFormatter(context, project),
|
||||
new GitignoreFormatter(context, project),
|
||||
new TsconfigFormatter(context, project),
|
||||
new PrettierFormatter(context, project),
|
||||
new ReadmeFormatter(context, project),
|
||||
new CopyFormatter(context, project),
|
||||
];
|
||||
|
||||
// format .gitlab-ci.yml
|
||||
const formatTemplates = await import('./format.templates.js');
|
||||
await formatTemplates.run(project);
|
||||
// Filter formatters based on configuration
|
||||
const activeFormatters = formatters.filter((formatter) => {
|
||||
if (formatConfig.modules.only.length > 0) {
|
||||
return formatConfig.modules.only.includes(formatter.name);
|
||||
}
|
||||
if (formatConfig.modules.skip.includes(formatter.name)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
});
|
||||
|
||||
// format .gitignore
|
||||
const formatGitignore = await import('./format.gitignore.js');
|
||||
await formatGitignore.run(project);
|
||||
// Plan phase
|
||||
logger.log('info', 'Analyzing project for format operations...');
|
||||
let plan = options.fromPlan
|
||||
? JSON.parse(await plugins.smartfile.fs.toStringSync(options.fromPlan))
|
||||
: await planner.planFormat(activeFormatters);
|
||||
|
||||
// format TypeScript
|
||||
const formatTsConfig = await import('./format.tsconfig.js');
|
||||
await formatTsConfig.run(project);
|
||||
const formatPrettier = await import('./format.prettier.js');
|
||||
await formatPrettier.run(project);
|
||||
// Display plan
|
||||
await planner.displayPlan(plan, options.detailed);
|
||||
|
||||
// format readme.md
|
||||
const formatReadme = await import('./format.readme.js');
|
||||
await formatReadme.run();
|
||||
// Save plan if requested
|
||||
if (options.savePlan) {
|
||||
await plugins.smartfile.memory.toFs(
|
||||
JSON.stringify(plan, null, 2),
|
||||
options.savePlan,
|
||||
);
|
||||
logger.log('info', `Plan saved to ${options.savePlan}`);
|
||||
}
|
||||
|
||||
// Exit if plan-only mode
|
||||
if (options.planOnly) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Dry-run mode
|
||||
if (options.dryRun) {
|
||||
logger.log('info', 'Dry-run mode - no changes will be made');
|
||||
return;
|
||||
}
|
||||
|
||||
// Interactive confirmation
|
||||
if (interactive && !autoApprove) {
|
||||
const interactInstance = new plugins.smartinteract.SmartInteract();
|
||||
const response = await interactInstance.askQuestion({
|
||||
type: 'confirm',
|
||||
name: 'proceed',
|
||||
message: 'Proceed with formatting?',
|
||||
default: true,
|
||||
});
|
||||
|
||||
if (!(response as any).value) {
|
||||
logger.log('info', 'Format operation cancelled by user');
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Execute phase
|
||||
logger.log(
|
||||
'info',
|
||||
`Executing format operations${parallel ? ' in parallel' : ' sequentially'}...`,
|
||||
);
|
||||
await planner.executePlan(plan, activeFormatters, context, parallel);
|
||||
|
||||
// Finish statistics tracking
|
||||
context.getFormatStats().finish();
|
||||
|
||||
// Display statistics
|
||||
const showStats = npmextraConfig.dataFor('gitzone.format.showStats', true);
|
||||
if (showStats) {
|
||||
context.getFormatStats().displayStats();
|
||||
}
|
||||
|
||||
// Save stats if requested
|
||||
if (options.detailed) {
|
||||
const statsPath = `.nogit/format-stats-${Date.now()}.json`;
|
||||
await context.getFormatStats().saveReport(statsPath);
|
||||
}
|
||||
|
||||
logger.log('success', 'Format operations completed successfully!');
|
||||
} catch (error) {
|
||||
logger.log('error', `Format operation failed: ${error.message}`);
|
||||
|
||||
// Rollback system has been removed for stability
|
||||
|
||||
throw error;
|
||||
}
|
||||
};
|
||||
|
||||
// Export CLI command handlers
|
||||
export const handleRollback = async (operationId?: string): Promise<void> => {
|
||||
logger.log('info', 'Rollback system has been disabled for stability');
|
||||
};
|
||||
|
||||
export const handleListBackups = async (): Promise<void> => {
|
||||
logger.log('info', 'Backup system has been disabled for stability');
|
||||
};
|
||||
|
||||
export const handleCleanBackups = async (): Promise<void> => {
|
||||
logger.log(
|
||||
'info',
|
||||
'Backup cleaning has been disabled - backup system removed',
|
||||
);
|
||||
};
|
||||
|
45
ts/mod_format/interfaces.format.ts
Normal file
45
ts/mod_format/interfaces.format.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
export type IFormatOperation = {
|
||||
id: string;
|
||||
timestamp: number;
|
||||
files: Array<{
|
||||
path: string;
|
||||
originalContent: string;
|
||||
checksum: string;
|
||||
permissions: string;
|
||||
}>;
|
||||
status: 'pending' | 'in-progress' | 'completed' | 'failed' | 'rolled-back';
|
||||
error?: Error;
|
||||
};
|
||||
|
||||
export type IFormatPlan = {
|
||||
summary: {
|
||||
totalFiles: number;
|
||||
filesAdded: number;
|
||||
filesModified: number;
|
||||
filesRemoved: number;
|
||||
estimatedTime: number;
|
||||
};
|
||||
changes: Array<{
|
||||
type: 'create' | 'modify' | 'delete';
|
||||
path: string;
|
||||
module: string;
|
||||
description: string;
|
||||
diff?: string;
|
||||
size?: number;
|
||||
}>;
|
||||
warnings: Array<{
|
||||
level: 'info' | 'warning' | 'error';
|
||||
message: string;
|
||||
module: string;
|
||||
}>;
|
||||
};
|
||||
|
||||
export type IPlannedChange = {
|
||||
type: 'create' | 'modify' | 'delete';
|
||||
path: string;
|
||||
module: string;
|
||||
description: string;
|
||||
content?: string; // For create/modify operations
|
||||
diff?: string;
|
||||
size?: number;
|
||||
};
|
@@ -1,5 +1,7 @@
|
||||
export * from '../plugins.js';
|
||||
|
||||
import * as crypto from 'crypto';
|
||||
import * as path from 'path';
|
||||
import * as lik from '@push.rocks/lik';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as smartgulp from '@push.rocks/smartgulp';
|
||||
@@ -9,8 +11,12 @@ import * as smartobject from '@push.rocks/smartobject';
|
||||
import * as smartnpm from '@push.rocks/smartnpm';
|
||||
import * as smartstream from '@push.rocks/smartstream';
|
||||
import * as through2 from 'through2';
|
||||
import * as npmextra from '@push.rocks/npmextra';
|
||||
import * as smartdiff from '@push.rocks/smartdiff';
|
||||
|
||||
export {
|
||||
crypto,
|
||||
path,
|
||||
lik,
|
||||
smartfile,
|
||||
smartgulp,
|
||||
@@ -20,4 +26,6 @@ export {
|
||||
smartnpm,
|
||||
smartstream,
|
||||
through2,
|
||||
npmextra,
|
||||
smartdiff,
|
||||
};
|
||||
|
@@ -35,7 +35,10 @@ export class Meta {
|
||||
* sorts the metaRepoData
|
||||
*/
|
||||
public async sortMetaRepoData() {
|
||||
const stringifiedMetadata = plugins.smartjson.stringify(this.metaRepoData, []);
|
||||
const stringifiedMetadata = plugins.smartjson.stringify(
|
||||
this.metaRepoData,
|
||||
[],
|
||||
);
|
||||
this.metaRepoData = plugins.smartjson.parse(stringifiedMetadata);
|
||||
}
|
||||
|
||||
@@ -45,11 +48,15 @@ export class Meta {
|
||||
public async readDirectory() {
|
||||
await this.syncToRemote(true);
|
||||
logger.log('info', `reading directory`);
|
||||
const metaFileExists = plugins.smartfile.fs.fileExistsSync(this.filePaths.metaJson);
|
||||
const metaFileExists = plugins.smartfile.fs.fileExistsSync(
|
||||
this.filePaths.metaJson,
|
||||
);
|
||||
if (!metaFileExists) {
|
||||
throw new Error(`meta file does not exist at ${this.filePaths.metaJson}`);
|
||||
}
|
||||
this.metaRepoData = plugins.smartfile.fs.toObjectSync(this.filePaths.metaJson);
|
||||
this.metaRepoData = plugins.smartfile.fs.toObjectSync(
|
||||
this.filePaths.metaJson,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -76,7 +83,10 @@ export class Meta {
|
||||
this.filePaths.metaJson,
|
||||
);
|
||||
// write .gitignore to disk
|
||||
plugins.smartfile.memory.toFsSync(await this.generateGitignore(), this.filePaths.gitIgnore);
|
||||
plugins.smartfile.memory.toFsSync(
|
||||
await this.generateGitignore(),
|
||||
this.filePaths.gitIgnore,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -84,13 +94,17 @@ export class Meta {
|
||||
*/
|
||||
public async syncToRemote(gitCleanArg = false) {
|
||||
logger.log('info', `syncing from origin master`);
|
||||
await this.smartshellInstance.exec(`cd ${this.cwd} && git pull origin master`);
|
||||
await this.smartshellInstance.exec(
|
||||
`cd ${this.cwd} && git pull origin master`,
|
||||
);
|
||||
if (gitCleanArg) {
|
||||
logger.log('info', `cleaning the repository from old directories`);
|
||||
await this.smartshellInstance.exec(`cd ${this.cwd} && git clean -fd`);
|
||||
}
|
||||
logger.log('info', `syncing to remote origin master`);
|
||||
await this.smartshellInstance.exec(`cd ${this.cwd} && git push origin master`);
|
||||
await this.smartshellInstance.exec(
|
||||
`cd ${this.cwd} && git push origin master`,
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -98,7 +112,9 @@ export class Meta {
|
||||
*/
|
||||
public async updateLocalRepos() {
|
||||
await this.syncToRemote();
|
||||
const projects = plugins.smartfile.fs.toObjectSync(this.filePaths.metaJson).projects;
|
||||
const projects = plugins.smartfile.fs.toObjectSync(
|
||||
this.filePaths.metaJson,
|
||||
).projects;
|
||||
const preExistingFolders = plugins.smartfile.fs.listFoldersSync(this.cwd);
|
||||
for (const preExistingFolderArg of preExistingFolders) {
|
||||
if (
|
||||
@@ -107,14 +123,18 @@ export class Meta {
|
||||
projectFolder.startsWith(preExistingFolderArg),
|
||||
)
|
||||
) {
|
||||
const response = await plugins.smartinteraction.SmartInteract.getCliConfirmation(
|
||||
`Do you want to delete superfluous directory >>${preExistingFolderArg}<< ?`,
|
||||
true,
|
||||
);
|
||||
const response =
|
||||
await plugins.smartinteraction.SmartInteract.getCliConfirmation(
|
||||
`Do you want to delete superfluous directory >>${preExistingFolderArg}<< ?`,
|
||||
true,
|
||||
);
|
||||
if (response) {
|
||||
logger.log('warn', `Deleting >>${preExistingFolderArg}<<!`);
|
||||
} else {
|
||||
logger.log('warn', `Not deleting ${preExistingFolderArg} by request!`);
|
||||
logger.log(
|
||||
'warn',
|
||||
`Not deleting ${preExistingFolderArg} by request!`,
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -160,7 +180,9 @@ export class Meta {
|
||||
*/
|
||||
public async initProject() {
|
||||
await this.syncToRemote(true);
|
||||
const fileExists = await plugins.smartfile.fs.fileExists(this.filePaths.metaJson);
|
||||
const fileExists = await plugins.smartfile.fs.fileExists(
|
||||
this.filePaths.metaJson,
|
||||
);
|
||||
if (!fileExists) {
|
||||
await plugins.smartfile.memory.toFs(
|
||||
JSON.stringify({
|
||||
@@ -168,7 +190,10 @@ export class Meta {
|
||||
}),
|
||||
this.filePaths.metaJson,
|
||||
);
|
||||
logger.log(`success`, `created a new .meta.json in directory ${this.cwd}`);
|
||||
logger.log(
|
||||
`success`,
|
||||
`created a new .meta.json in directory ${this.cwd}`,
|
||||
);
|
||||
await plugins.smartfile.memory.toFs(
|
||||
JSON.stringify({
|
||||
name: this.dirName,
|
||||
@@ -176,9 +201,15 @@ export class Meta {
|
||||
}),
|
||||
this.filePaths.packageJson,
|
||||
);
|
||||
logger.log(`success`, `created a new package.json in directory ${this.cwd}`);
|
||||
logger.log(
|
||||
`success`,
|
||||
`created a new package.json in directory ${this.cwd}`,
|
||||
);
|
||||
} else {
|
||||
logger.log(`error`, `directory ${this.cwd} already has a .metaJson file. Doing nothing.`);
|
||||
logger.log(
|
||||
`error`,
|
||||
`directory ${this.cwd} already has a .metaJson file. Doing nothing.`,
|
||||
);
|
||||
}
|
||||
await this.smartshellInstance.exec(
|
||||
`cd ${this.cwd} && git add -A && git commit -m "feat(project): init meta project for ${this.dirName}"`,
|
||||
@@ -195,7 +226,9 @@ export class Meta {
|
||||
const existingProject = this.metaRepoData.projects[projectNameArg];
|
||||
|
||||
if (existingProject) {
|
||||
throw new Error('Project already exists! Please remove it first before adding it again.');
|
||||
throw new Error(
|
||||
'Project already exists! Please remove it first before adding it again.',
|
||||
);
|
||||
}
|
||||
|
||||
this.metaRepoData.projects[projectNameArg] = gitUrlArg;
|
||||
@@ -217,7 +250,10 @@ export class Meta {
|
||||
const existingProject = this.metaRepoData.projects[projectNameArg];
|
||||
|
||||
if (!existingProject) {
|
||||
logger.log('error', `Project ${projectNameArg} does not exist! So it cannot be removed`);
|
||||
logger.log(
|
||||
'error',
|
||||
`Project ${projectNameArg} does not exist! So it cannot be removed`,
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -228,7 +264,9 @@ export class Meta {
|
||||
await this.writeToDisk();
|
||||
|
||||
logger.log('info', 'removing directory from cwd');
|
||||
await plugins.smartfile.fs.remove(plugins.path.join(paths.cwd, projectNameArg));
|
||||
await plugins.smartfile.fs.remove(
|
||||
plugins.path.join(paths.cwd, projectNameArg),
|
||||
);
|
||||
await this.updateLocalRepos();
|
||||
}
|
||||
}
|
||||
|
227
ts/mod_services/classes.dockercontainer.ts
Normal file
227
ts/mod_services/classes.dockercontainer.ts
Normal file
@@ -0,0 +1,227 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import * as helpers from './helpers.js';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
|
||||
export type ContainerStatus = 'running' | 'stopped' | 'not_exists';
|
||||
|
||||
export interface IDockerRunOptions {
|
||||
name: string;
|
||||
image: string;
|
||||
ports?: { [key: string]: string };
|
||||
volumes?: { [key: string]: string };
|
||||
environment?: { [key: string]: string };
|
||||
restart?: string;
|
||||
command?: string;
|
||||
}
|
||||
|
||||
export class DockerContainer {
|
||||
private smartshell: plugins.smartshell.Smartshell;
|
||||
|
||||
constructor() {
|
||||
this.smartshell = new plugins.smartshell.Smartshell({
|
||||
executor: 'bash',
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if Docker is installed and available
|
||||
*/
|
||||
public async checkDocker(): Promise<boolean> {
|
||||
try {
|
||||
const result = await this.smartshell.exec('docker --version');
|
||||
return result.exitCode === 0;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get container status
|
||||
*/
|
||||
public async getStatus(containerName: string): Promise<ContainerStatus> {
|
||||
try {
|
||||
// Check if running
|
||||
const runningResult = await this.smartshell.exec(
|
||||
`docker ps --format '{{.Names}}' | grep -q "^${containerName}$"`
|
||||
);
|
||||
|
||||
if (runningResult.exitCode === 0) {
|
||||
return 'running';
|
||||
}
|
||||
|
||||
// Check if exists but stopped
|
||||
const existsResult = await this.smartshell.exec(
|
||||
`docker ps -a --format '{{.Names}}' | grep -q "^${containerName}$"`
|
||||
);
|
||||
|
||||
if (existsResult.exitCode === 0) {
|
||||
return 'stopped';
|
||||
}
|
||||
|
||||
return 'not_exists';
|
||||
} catch (error) {
|
||||
return 'not_exists';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a container
|
||||
*/
|
||||
public async start(containerName: string): Promise<boolean> {
|
||||
try {
|
||||
const result = await this.smartshell.exec(`docker start ${containerName}`);
|
||||
return result.exitCode === 0;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop a container
|
||||
*/
|
||||
public async stop(containerName: string): Promise<boolean> {
|
||||
try {
|
||||
const result = await this.smartshell.exec(`docker stop ${containerName}`);
|
||||
return result.exitCode === 0;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a container
|
||||
*/
|
||||
public async remove(containerName: string, force: boolean = false): Promise<boolean> {
|
||||
try {
|
||||
const forceFlag = force ? '-f' : '';
|
||||
const result = await this.smartshell.exec(`docker rm ${forceFlag} ${containerName}`);
|
||||
return result.exitCode === 0;
|
||||
} catch (error) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a new container
|
||||
*/
|
||||
public async run(options: IDockerRunOptions): Promise<boolean> {
|
||||
let command = 'docker run -d';
|
||||
|
||||
// Add name
|
||||
command += ` --name ${options.name}`;
|
||||
|
||||
// Add ports
|
||||
if (options.ports) {
|
||||
for (const [hostPort, containerPort] of Object.entries(options.ports)) {
|
||||
command += ` -p ${hostPort}:${containerPort}`;
|
||||
}
|
||||
}
|
||||
|
||||
// Add volumes
|
||||
if (options.volumes) {
|
||||
for (const [hostPath, containerPath] of Object.entries(options.volumes)) {
|
||||
command += ` -v "${hostPath}:${containerPath}"`;
|
||||
}
|
||||
}
|
||||
|
||||
// Add environment variables
|
||||
if (options.environment) {
|
||||
for (const [key, value] of Object.entries(options.environment)) {
|
||||
command += ` -e ${key}="${value}"`;
|
||||
}
|
||||
}
|
||||
|
||||
// Add restart policy
|
||||
if (options.restart) {
|
||||
command += ` --restart ${options.restart}`;
|
||||
}
|
||||
|
||||
// Add image
|
||||
command += ` ${options.image}`;
|
||||
|
||||
// Add command if provided
|
||||
if (options.command) {
|
||||
command += ` ${options.command}`;
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await this.smartshell.exec(command);
|
||||
return result.exitCode === 0;
|
||||
} catch (error) {
|
||||
logger.log('error', `Failed to run container: ${error.message}`);
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute a command in a running container
|
||||
*/
|
||||
public async exec(containerName: string, command: string): Promise<string> {
|
||||
try {
|
||||
const result = await this.smartshell.exec(`docker exec ${containerName} ${command}`);
|
||||
if (result.exitCode === 0) {
|
||||
return result.stdout;
|
||||
}
|
||||
return '';
|
||||
} catch (error) {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get container logs
|
||||
*/
|
||||
public async logs(containerName: string, lines?: number): Promise<string> {
|
||||
try {
|
||||
const tailFlag = lines ? `--tail ${lines}` : '';
|
||||
const result = await this.smartshell.exec(`docker logs ${tailFlag} ${containerName}`);
|
||||
return result.stdout;
|
||||
} catch (error) {
|
||||
return `Error getting logs: ${error.message}`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a container exists
|
||||
*/
|
||||
public async exists(containerName: string): Promise<boolean> {
|
||||
const status = await this.getStatus(containerName);
|
||||
return status !== 'not_exists';
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a container is running
|
||||
*/
|
||||
public async isRunning(containerName: string): Promise<boolean> {
|
||||
const status = await this.getStatus(containerName);
|
||||
return status === 'running';
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait for a container to be ready
|
||||
*/
|
||||
public async waitForReady(containerName: string, maxAttempts: number = 30): Promise<boolean> {
|
||||
for (let i = 0; i < maxAttempts; i++) {
|
||||
if (await this.isRunning(containerName)) {
|
||||
return true;
|
||||
}
|
||||
await plugins.smartdelay.delayFor(1000);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get container information
|
||||
*/
|
||||
public async inspect(containerName: string): Promise<any> {
|
||||
try {
|
||||
const result = await this.smartshell.exec(`docker inspect ${containerName}`);
|
||||
if (result.exitCode === 0) {
|
||||
return JSON.parse(result.stdout);
|
||||
}
|
||||
return null;
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
246
ts/mod_services/classes.serviceconfiguration.ts
Normal file
246
ts/mod_services/classes.serviceconfiguration.ts
Normal file
@@ -0,0 +1,246 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import * as helpers from './helpers.js';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
|
||||
export interface IServiceConfig {
|
||||
PROJECT_NAME: string;
|
||||
MONGODB_HOST: string;
|
||||
MONGODB_NAME: string;
|
||||
MONGODB_PORT: string;
|
||||
MONGODB_USER: string;
|
||||
MONGODB_PASS: string;
|
||||
S3_HOST: string;
|
||||
S3_PORT: string;
|
||||
S3_CONSOLE_PORT: string;
|
||||
S3_USER: string;
|
||||
S3_PASS: string;
|
||||
S3_BUCKET: string;
|
||||
}
|
||||
|
||||
export class ServiceConfiguration {
|
||||
private configPath: string;
|
||||
private config: IServiceConfig;
|
||||
|
||||
constructor() {
|
||||
this.configPath = plugins.path.join(process.cwd(), '.nogit', 'env.json');
|
||||
}
|
||||
|
||||
/**
|
||||
* Load or create the configuration
|
||||
*/
|
||||
public async loadOrCreate(): Promise<IServiceConfig> {
|
||||
await this.ensureNogitDirectory();
|
||||
|
||||
if (await this.configExists()) {
|
||||
await this.loadConfig();
|
||||
await this.updateMissingFields();
|
||||
} else {
|
||||
await this.createDefaultConfig();
|
||||
}
|
||||
|
||||
return this.config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current configuration
|
||||
*/
|
||||
public getConfig(): IServiceConfig {
|
||||
return this.config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Save the configuration to file
|
||||
*/
|
||||
public async saveConfig(): Promise<void> {
|
||||
await plugins.smartfile.memory.toFs(
|
||||
JSON.stringify(this.config, null, 2),
|
||||
this.configPath
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure .nogit directory exists
|
||||
*/
|
||||
private async ensureNogitDirectory(): Promise<void> {
|
||||
const nogitPath = plugins.path.join(process.cwd(), '.nogit');
|
||||
await plugins.smartfile.fs.ensureDir(nogitPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if configuration file exists
|
||||
*/
|
||||
private async configExists(): Promise<boolean> {
|
||||
return plugins.smartfile.fs.fileExists(this.configPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Load configuration from file
|
||||
*/
|
||||
private async loadConfig(): Promise<void> {
|
||||
const configContent = await plugins.smartfile.fs.toStringSync(this.configPath);
|
||||
this.config = JSON.parse(configContent);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create default configuration
|
||||
*/
|
||||
private async createDefaultConfig(): Promise<void> {
|
||||
const projectName = helpers.getProjectName();
|
||||
const mongoPort = await helpers.getRandomAvailablePort();
|
||||
const s3Port = await helpers.getRandomAvailablePort();
|
||||
let s3ConsolePort = s3Port + 1;
|
||||
|
||||
// Ensure console port is also available
|
||||
while (!(await helpers.isPortAvailable(s3ConsolePort))) {
|
||||
s3ConsolePort++;
|
||||
}
|
||||
|
||||
this.config = {
|
||||
PROJECT_NAME: projectName,
|
||||
MONGODB_HOST: 'localhost',
|
||||
MONGODB_NAME: projectName,
|
||||
MONGODB_PORT: mongoPort.toString(),
|
||||
MONGODB_USER: 'defaultadmin',
|
||||
MONGODB_PASS: 'defaultpass',
|
||||
S3_HOST: 'localhost',
|
||||
S3_PORT: s3Port.toString(),
|
||||
S3_CONSOLE_PORT: s3ConsolePort.toString(),
|
||||
S3_USER: 'defaultadmin',
|
||||
S3_PASS: 'defaultpass',
|
||||
S3_BUCKET: `${projectName}-documents`
|
||||
};
|
||||
|
||||
await this.saveConfig();
|
||||
|
||||
logger.log('ok', '✅ Created .nogit/env.json with project defaults');
|
||||
logger.log('info', `📍 MongoDB port: ${mongoPort}`);
|
||||
logger.log('info', `📍 S3 API port: ${s3Port}`);
|
||||
logger.log('info', `📍 S3 Console port: ${s3ConsolePort}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update missing fields in existing configuration
|
||||
*/
|
||||
private async updateMissingFields(): Promise<void> {
|
||||
const projectName = helpers.getProjectName();
|
||||
let updated = false;
|
||||
const fieldsAdded: string[] = [];
|
||||
|
||||
// Check and add missing fields
|
||||
if (!this.config.PROJECT_NAME) {
|
||||
this.config.PROJECT_NAME = projectName;
|
||||
fieldsAdded.push('PROJECT_NAME');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.MONGODB_HOST) {
|
||||
this.config.MONGODB_HOST = 'localhost';
|
||||
fieldsAdded.push('MONGODB_HOST');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.MONGODB_NAME) {
|
||||
this.config.MONGODB_NAME = projectName;
|
||||
fieldsAdded.push('MONGODB_NAME');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.MONGODB_PORT) {
|
||||
const port = await helpers.getRandomAvailablePort();
|
||||
this.config.MONGODB_PORT = port.toString();
|
||||
fieldsAdded.push(`MONGODB_PORT(${port})`);
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.MONGODB_USER) {
|
||||
this.config.MONGODB_USER = 'defaultadmin';
|
||||
fieldsAdded.push('MONGODB_USER');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.MONGODB_PASS) {
|
||||
this.config.MONGODB_PASS = 'defaultpass';
|
||||
fieldsAdded.push('MONGODB_PASS');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.S3_HOST) {
|
||||
this.config.S3_HOST = 'localhost';
|
||||
fieldsAdded.push('S3_HOST');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.S3_PORT) {
|
||||
const port = await helpers.getRandomAvailablePort();
|
||||
this.config.S3_PORT = port.toString();
|
||||
fieldsAdded.push(`S3_PORT(${port})`);
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.S3_CONSOLE_PORT) {
|
||||
const s3Port = parseInt(this.config.S3_PORT);
|
||||
let consolePort = s3Port + 1;
|
||||
|
||||
while (!(await helpers.isPortAvailable(consolePort))) {
|
||||
consolePort++;
|
||||
}
|
||||
|
||||
this.config.S3_CONSOLE_PORT = consolePort.toString();
|
||||
fieldsAdded.push(`S3_CONSOLE_PORT(${consolePort})`);
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.S3_USER) {
|
||||
this.config.S3_USER = 'defaultadmin';
|
||||
fieldsAdded.push('S3_USER');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.S3_PASS) {
|
||||
this.config.S3_PASS = 'defaultpass';
|
||||
fieldsAdded.push('S3_PASS');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.S3_BUCKET) {
|
||||
this.config.S3_BUCKET = `${projectName}-documents`;
|
||||
fieldsAdded.push('S3_BUCKET');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (updated) {
|
||||
await this.saveConfig();
|
||||
logger.log('ok', `✅ Added missing fields: ${fieldsAdded.join(', ')}`);
|
||||
} else {
|
||||
logger.log('ok', '✅ Configuration complete');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get MongoDB connection string
|
||||
*/
|
||||
public getMongoConnectionString(useNetworkIp: boolean = false): string {
|
||||
const host = useNetworkIp ? '${networkIp}' : this.config.MONGODB_HOST;
|
||||
return `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${host}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get container names
|
||||
*/
|
||||
public getContainerNames() {
|
||||
return {
|
||||
mongo: `${this.config.PROJECT_NAME}-mongodb`,
|
||||
minio: `${this.config.PROJECT_NAME}-minio`
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get data directories
|
||||
*/
|
||||
public getDataDirectories() {
|
||||
return {
|
||||
mongo: plugins.path.join(process.cwd(), '.nogit', 'mongodata'),
|
||||
minio: plugins.path.join(process.cwd(), '.nogit', 'miniodata')
|
||||
};
|
||||
}
|
||||
}
|
423
ts/mod_services/classes.servicemanager.ts
Normal file
423
ts/mod_services/classes.servicemanager.ts
Normal file
@@ -0,0 +1,423 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import * as helpers from './helpers.js';
|
||||
import { ServiceConfiguration } from './classes.serviceconfiguration.js';
|
||||
import { DockerContainer } from './classes.dockercontainer.js';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
|
||||
export class ServiceManager {
|
||||
private config: ServiceConfiguration;
|
||||
private docker: DockerContainer;
|
||||
|
||||
constructor() {
|
||||
this.config = new ServiceConfiguration();
|
||||
this.docker = new DockerContainer();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the service manager
|
||||
*/
|
||||
public async init(): Promise<void> {
|
||||
// Check Docker availability
|
||||
if (!(await this.docker.checkDocker())) {
|
||||
logger.log('error', 'Error: Docker is not installed. Please install Docker first.');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Load or create configuration
|
||||
await this.config.loadOrCreate();
|
||||
logger.log('info', `📋 Project: ${this.config.getConfig().PROJECT_NAME}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Start MongoDB service
|
||||
*/
|
||||
public async startMongoDB(): Promise<void> {
|
||||
logger.log('note', '📦 MongoDB:');
|
||||
|
||||
const config = this.config.getConfig();
|
||||
const containers = this.config.getContainerNames();
|
||||
const directories = this.config.getDataDirectories();
|
||||
|
||||
// Ensure data directory exists
|
||||
await plugins.smartfile.fs.ensureDir(directories.mongo);
|
||||
|
||||
const status = await this.docker.getStatus(containers.mongo);
|
||||
|
||||
switch (status) {
|
||||
case 'running':
|
||||
logger.log('ok', ' Already running ✓');
|
||||
break;
|
||||
|
||||
case 'stopped':
|
||||
if (await this.docker.start(containers.mongo)) {
|
||||
logger.log('ok', ' Started ✓');
|
||||
} else {
|
||||
logger.log('error', ' Failed to start');
|
||||
}
|
||||
break;
|
||||
|
||||
case 'not_exists':
|
||||
logger.log('note', ' Creating container...');
|
||||
|
||||
const success = await this.docker.run({
|
||||
name: containers.mongo,
|
||||
image: 'mongo:7.0',
|
||||
ports: {
|
||||
[`0.0.0.0:${config.MONGODB_PORT}`]: '27017'
|
||||
},
|
||||
volumes: {
|
||||
[directories.mongo]: '/data/db'
|
||||
},
|
||||
environment: {
|
||||
MONGO_INITDB_ROOT_USERNAME: config.MONGODB_USER,
|
||||
MONGO_INITDB_ROOT_PASSWORD: config.MONGODB_PASS,
|
||||
MONGO_INITDB_DATABASE: config.MONGODB_NAME
|
||||
},
|
||||
restart: 'unless-stopped'
|
||||
});
|
||||
|
||||
if (success) {
|
||||
logger.log('ok', ' Created and started ✓');
|
||||
} else {
|
||||
logger.log('error', ' Failed to create container');
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
logger.log('info', ` Container: ${containers.mongo}`);
|
||||
logger.log('info', ` Port: ${config.MONGODB_PORT}`);
|
||||
logger.log('info', ` Connection: ${this.config.getMongoConnectionString()}`);
|
||||
|
||||
// Show Compass connection string
|
||||
const networkIp = await helpers.getLocalNetworkIp();
|
||||
const compassString = `mongodb://${config.MONGODB_USER}:${config.MONGODB_PASS}@${networkIp}:${config.MONGODB_PORT}/${config.MONGODB_NAME}?authSource=admin`;
|
||||
logger.log('ok', ` Compass: ${compassString}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Start MinIO service
|
||||
*/
|
||||
public async startMinIO(): Promise<void> {
|
||||
logger.log('note', '📦 S3/MinIO:');
|
||||
|
||||
const config = this.config.getConfig();
|
||||
const containers = this.config.getContainerNames();
|
||||
const directories = this.config.getDataDirectories();
|
||||
|
||||
// Ensure data directory exists
|
||||
await plugins.smartfile.fs.ensureDir(directories.minio);
|
||||
|
||||
const status = await this.docker.getStatus(containers.minio);
|
||||
|
||||
switch (status) {
|
||||
case 'running':
|
||||
logger.log('ok', ' Already running ✓');
|
||||
break;
|
||||
|
||||
case 'stopped':
|
||||
if (await this.docker.start(containers.minio)) {
|
||||
logger.log('ok', ' Started ✓');
|
||||
} else {
|
||||
logger.log('error', ' Failed to start');
|
||||
}
|
||||
break;
|
||||
|
||||
case 'not_exists':
|
||||
logger.log('note', ' Creating container...');
|
||||
|
||||
const success = await this.docker.run({
|
||||
name: containers.minio,
|
||||
image: 'minio/minio',
|
||||
ports: {
|
||||
[config.S3_PORT]: '9000',
|
||||
[config.S3_CONSOLE_PORT]: '9001'
|
||||
},
|
||||
volumes: {
|
||||
[directories.minio]: '/data'
|
||||
},
|
||||
environment: {
|
||||
MINIO_ROOT_USER: config.S3_USER,
|
||||
MINIO_ROOT_PASSWORD: config.S3_PASS
|
||||
},
|
||||
restart: 'unless-stopped',
|
||||
command: 'server /data --console-address ":9001"'
|
||||
});
|
||||
|
||||
if (success) {
|
||||
logger.log('ok', ' Created and started ✓');
|
||||
|
||||
// Wait for MinIO to be ready
|
||||
await plugins.smartdelay.delayFor(3000);
|
||||
|
||||
// Create default bucket
|
||||
await this.docker.exec(
|
||||
containers.minio,
|
||||
`mc alias set local http://localhost:9000 ${config.S3_USER} ${config.S3_PASS}`
|
||||
);
|
||||
|
||||
await this.docker.exec(
|
||||
containers.minio,
|
||||
`mc mb local/${config.S3_BUCKET}`
|
||||
);
|
||||
|
||||
logger.log('ok', ` Bucket '${config.S3_BUCKET}' created ✓`);
|
||||
} else {
|
||||
logger.log('error', ' Failed to create container');
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
logger.log('info', ` Container: ${containers.minio}`);
|
||||
logger.log('info', ` Port: ${config.S3_PORT}`);
|
||||
logger.log('info', ` Bucket: ${config.S3_BUCKET}`);
|
||||
logger.log('info', ` API: http://${config.S3_HOST}:${config.S3_PORT}`);
|
||||
logger.log('info', ` Console: http://${config.S3_HOST}:${config.S3_CONSOLE_PORT} (login: ${config.S3_USER}/***)`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop MongoDB service
|
||||
*/
|
||||
public async stopMongoDB(): Promise<void> {
|
||||
logger.log('note', '📦 MongoDB:');
|
||||
|
||||
const containers = this.config.getContainerNames();
|
||||
const status = await this.docker.getStatus(containers.mongo);
|
||||
|
||||
if (status === 'running') {
|
||||
if (await this.docker.stop(containers.mongo)) {
|
||||
logger.log('ok', ' Stopped ✓');
|
||||
} else {
|
||||
logger.log('error', ' Failed to stop');
|
||||
}
|
||||
} else {
|
||||
logger.log('note', ' Not running');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop MinIO service
|
||||
*/
|
||||
public async stopMinIO(): Promise<void> {
|
||||
logger.log('note', '📦 S3/MinIO:');
|
||||
|
||||
const containers = this.config.getContainerNames();
|
||||
const status = await this.docker.getStatus(containers.minio);
|
||||
|
||||
if (status === 'running') {
|
||||
if (await this.docker.stop(containers.minio)) {
|
||||
logger.log('ok', ' Stopped ✓');
|
||||
} else {
|
||||
logger.log('error', ' Failed to stop');
|
||||
}
|
||||
} else {
|
||||
logger.log('note', ' Not running');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Show service status
|
||||
*/
|
||||
public async showStatus(): Promise<void> {
|
||||
helpers.printHeader('Service Status');
|
||||
|
||||
const config = this.config.getConfig();
|
||||
const containers = this.config.getContainerNames();
|
||||
|
||||
logger.log('info', `Project: ${config.PROJECT_NAME}`);
|
||||
console.log();
|
||||
|
||||
// MongoDB status
|
||||
const mongoStatus = await this.docker.getStatus(containers.mongo);
|
||||
switch (mongoStatus) {
|
||||
case 'running':
|
||||
logger.log('ok', '📦 MongoDB: 🟢 Running');
|
||||
logger.log('info', ` ├─ Container: ${containers.mongo}`);
|
||||
logger.log('info', ` ├─ Connection: ${this.config.getMongoConnectionString()}`);
|
||||
|
||||
// Show Compass connection string
|
||||
const networkIp = await helpers.getLocalNetworkIp();
|
||||
const compassString = `mongodb://${config.MONGODB_USER}:${config.MONGODB_PASS}@${networkIp}:${config.MONGODB_PORT}/${config.MONGODB_NAME}?authSource=admin`;
|
||||
logger.log('ok', ` └─ Compass: ${compassString}`);
|
||||
break;
|
||||
case 'stopped':
|
||||
logger.log('note', '📦 MongoDB: 🟡 Stopped');
|
||||
logger.log('info', ` └─ Container: ${containers.mongo}`);
|
||||
break;
|
||||
case 'not_exists':
|
||||
logger.log('info', '📦 MongoDB: ⚪ Not installed');
|
||||
break;
|
||||
}
|
||||
|
||||
// MinIO status
|
||||
const minioStatus = await this.docker.getStatus(containers.minio);
|
||||
switch (minioStatus) {
|
||||
case 'running':
|
||||
logger.log('ok', '📦 S3/MinIO: 🟢 Running');
|
||||
logger.log('info', ` ├─ Container: ${containers.minio}`);
|
||||
logger.log('info', ` ├─ API: http://${config.S3_HOST}:${config.S3_PORT}`);
|
||||
logger.log('info', ` ├─ Console: http://${config.S3_HOST}:${config.S3_CONSOLE_PORT}`);
|
||||
logger.log('info', ` └─ Bucket: ${config.S3_BUCKET}`);
|
||||
break;
|
||||
case 'stopped':
|
||||
logger.log('note', '📦 S3/MinIO: 🟡 Stopped');
|
||||
logger.log('info', ` └─ Container: ${containers.minio}`);
|
||||
break;
|
||||
case 'not_exists':
|
||||
logger.log('info', '📦 S3/MinIO: ⚪ Not installed');
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Show configuration
|
||||
*/
|
||||
public async showConfig(): Promise<void> {
|
||||
helpers.printHeader('Current Configuration');
|
||||
|
||||
const config = this.config.getConfig();
|
||||
|
||||
logger.log('info', `Project: ${config.PROJECT_NAME}`);
|
||||
console.log();
|
||||
|
||||
logger.log('note', 'MongoDB:');
|
||||
logger.log('info', ` Host: ${config.MONGODB_HOST}:${config.MONGODB_PORT}`);
|
||||
logger.log('info', ` Database: ${config.MONGODB_NAME}`);
|
||||
logger.log('info', ` User: ${config.MONGODB_USER}`);
|
||||
logger.log('info', ' Password: ***');
|
||||
logger.log('info', ` Container: ${this.config.getContainerNames().mongo}`);
|
||||
logger.log('info', ` Data: ${this.config.getDataDirectories().mongo}`);
|
||||
logger.log('info', ` Connection: ${this.config.getMongoConnectionString()}`);
|
||||
|
||||
console.log();
|
||||
logger.log('note', 'S3/MinIO:');
|
||||
logger.log('info', ` Host: ${config.S3_HOST}`);
|
||||
logger.log('info', ` API Port: ${config.S3_PORT}`);
|
||||
logger.log('info', ` Console Port: ${config.S3_CONSOLE_PORT}`);
|
||||
logger.log('info', ` User: ${config.S3_USER}`);
|
||||
logger.log('info', ' Password: ***');
|
||||
logger.log('info', ` Bucket: ${config.S3_BUCKET}`);
|
||||
logger.log('info', ` Container: ${this.config.getContainerNames().minio}`);
|
||||
logger.log('info', ` Data: ${this.config.getDataDirectories().minio}`);
|
||||
logger.log('info', ` API URL: http://${config.S3_HOST}:${config.S3_PORT}`);
|
||||
logger.log('info', ` Console URL: http://${config.S3_HOST}:${config.S3_CONSOLE_PORT}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Show MongoDB Compass connection string
|
||||
*/
|
||||
public async showCompassConnection(): Promise<void> {
|
||||
helpers.printHeader('MongoDB Compass Connection');
|
||||
|
||||
const config = this.config.getConfig();
|
||||
const networkIp = await helpers.getLocalNetworkIp();
|
||||
|
||||
const connectionString = `mongodb://${config.MONGODB_USER}:${config.MONGODB_PASS}@${networkIp}:${config.MONGODB_PORT}/${config.MONGODB_NAME}?authSource=admin`;
|
||||
|
||||
logger.log('info', 'MongoDB Compass is a GUI tool for MongoDB. To connect:');
|
||||
console.log();
|
||||
logger.log('info', '1. Download MongoDB Compass from:');
|
||||
logger.log('info', ' https://www.mongodb.com/products/compass');
|
||||
console.log();
|
||||
logger.log('info', '2. Open Compass and paste this connection string:');
|
||||
logger.log('ok', ` ${connectionString}`);
|
||||
console.log();
|
||||
logger.log('note', 'Connection Details:');
|
||||
logger.log('info', ` Network IP: ${networkIp}`);
|
||||
logger.log('info', ` Port: ${config.MONGODB_PORT}`);
|
||||
logger.log('info', ` Database: ${config.MONGODB_NAME}`);
|
||||
logger.log('info', ` Username: ${config.MONGODB_USER}`);
|
||||
logger.log('info', ` Auth Source: admin`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Show logs for a service
|
||||
*/
|
||||
public async showLogs(service: string, lines: number = 20): Promise<void> {
|
||||
const containers = this.config.getContainerNames();
|
||||
|
||||
switch (service) {
|
||||
case 'mongo':
|
||||
case 'mongodb':
|
||||
if (await this.docker.isRunning(containers.mongo)) {
|
||||
helpers.printHeader(`MongoDB Logs (last ${lines} lines)`);
|
||||
const logs = await this.docker.logs(containers.mongo, lines);
|
||||
console.log(logs);
|
||||
} else {
|
||||
logger.log('note', 'MongoDB container is not running');
|
||||
}
|
||||
break;
|
||||
|
||||
case 'minio':
|
||||
case 's3':
|
||||
if (await this.docker.isRunning(containers.minio)) {
|
||||
helpers.printHeader(`S3/MinIO Logs (last ${lines} lines)`);
|
||||
const logs = await this.docker.logs(containers.minio, lines);
|
||||
console.log(logs);
|
||||
} else {
|
||||
logger.log('note', 'S3/MinIO container is not running');
|
||||
}
|
||||
break;
|
||||
|
||||
case 'all':
|
||||
case '':
|
||||
await this.showLogs('mongo', lines);
|
||||
console.log();
|
||||
await this.showLogs('minio', lines);
|
||||
break;
|
||||
|
||||
default:
|
||||
logger.log('note', 'Usage: gitzone services logs [mongo|s3|all] [lines]');
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove containers
|
||||
*/
|
||||
public async removeContainers(): Promise<void> {
|
||||
const containers = this.config.getContainerNames();
|
||||
let removed = false;
|
||||
|
||||
if (await this.docker.exists(containers.mongo)) {
|
||||
if (await this.docker.remove(containers.mongo, true)) {
|
||||
logger.log('ok', ' MongoDB container removed ✓');
|
||||
removed = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (await this.docker.exists(containers.minio)) {
|
||||
if (await this.docker.remove(containers.minio, true)) {
|
||||
logger.log('ok', ' S3/MinIO container removed ✓');
|
||||
removed = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!removed) {
|
||||
logger.log('note', ' No containers to remove');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean data directories
|
||||
*/
|
||||
public async cleanData(): Promise<void> {
|
||||
const directories = this.config.getDataDirectories();
|
||||
let cleaned = false;
|
||||
|
||||
if (await plugins.smartfile.fs.fileExists(directories.mongo)) {
|
||||
await plugins.smartfile.fs.remove(directories.mongo);
|
||||
logger.log('ok', ' MongoDB data removed ✓');
|
||||
cleaned = true;
|
||||
}
|
||||
|
||||
if (await plugins.smartfile.fs.fileExists(directories.minio)) {
|
||||
await plugins.smartfile.fs.remove(directories.minio);
|
||||
logger.log('ok', ' S3/MinIO data removed ✓');
|
||||
cleaned = true;
|
||||
}
|
||||
|
||||
if (!cleaned) {
|
||||
logger.log('note', ' No data to clean');
|
||||
}
|
||||
}
|
||||
}
|
123
ts/mod_services/helpers.ts
Normal file
123
ts/mod_services/helpers.ts
Normal file
@@ -0,0 +1,123 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import * as net from 'net';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
|
||||
/**
|
||||
* Check if a port is available
|
||||
*/
|
||||
export const isPortAvailable = async (port: number): Promise<boolean> => {
|
||||
return new Promise((resolve) => {
|
||||
const server = net.createServer();
|
||||
|
||||
server.once('error', () => {
|
||||
resolve(false);
|
||||
});
|
||||
|
||||
server.once('listening', () => {
|
||||
server.close();
|
||||
resolve(true);
|
||||
});
|
||||
|
||||
server.listen(port, '0.0.0.0');
|
||||
});
|
||||
};
|
||||
|
||||
/**
|
||||
* Get a random available port between 20000 and 30000
|
||||
*/
|
||||
export const getRandomAvailablePort = async (): Promise<number> => {
|
||||
const maxAttempts = 100;
|
||||
|
||||
for (let i = 0; i < maxAttempts; i++) {
|
||||
const port = Math.floor(Math.random() * 10001) + 20000;
|
||||
if (await isPortAvailable(port)) {
|
||||
return port;
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: let the system assign a port
|
||||
return 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the project name from package.json or directory
|
||||
*/
|
||||
export const getProjectName = (): string => {
|
||||
try {
|
||||
const packageJsonPath = plugins.path.join(process.cwd(), 'package.json');
|
||||
if (plugins.smartfile.fs.fileExistsSync(packageJsonPath)) {
|
||||
const packageJson = plugins.smartfile.fs.toObjectSync(packageJsonPath);
|
||||
if (packageJson.name) {
|
||||
// Sanitize: @fin.cx/skr → fin-cx-skr
|
||||
return packageJson.name.replace(/@/g, '').replace(/[\/\.]/g, '-');
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Ignore errors and fall back to directory name
|
||||
}
|
||||
|
||||
return plugins.path.basename(process.cwd());
|
||||
};
|
||||
|
||||
/**
|
||||
* Print a header with decorative lines
|
||||
*/
|
||||
export const printHeader = (title: string) => {
|
||||
console.log();
|
||||
logger.log('info', '═══════════════════════════════════════════════════════════════');
|
||||
logger.log('info', ` ${title}`);
|
||||
logger.log('info', '═══════════════════════════════════════════════════════════════');
|
||||
console.log();
|
||||
};
|
||||
|
||||
/**
|
||||
* Format bytes to human readable string
|
||||
*/
|
||||
export const formatBytes = (bytes: number): string => {
|
||||
const units = ['B', 'KB', 'MB', 'GB', 'TB'];
|
||||
let size = bytes;
|
||||
let unitIndex = 0;
|
||||
|
||||
while (size >= 1024 && unitIndex < units.length - 1) {
|
||||
size /= 1024;
|
||||
unitIndex++;
|
||||
}
|
||||
|
||||
return `${size.toFixed(2)} ${units[unitIndex]}`;
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the local network IP address
|
||||
*/
|
||||
export const getLocalNetworkIp = async (): Promise<string> => {
|
||||
const smartnetworkInstance = new plugins.smartnetwork.SmartNetwork();
|
||||
const gateways = await smartnetworkInstance.getGateways();
|
||||
|
||||
// Find the best local IP from network interfaces
|
||||
for (const interfaceName of Object.keys(gateways)) {
|
||||
const interfaces = gateways[interfaceName];
|
||||
for (const iface of interfaces) {
|
||||
// Skip loopback and internal interfaces
|
||||
if (!iface.internal && iface.family === 'IPv4') {
|
||||
const address = iface.address;
|
||||
// Prefer LAN IPs
|
||||
if (address.startsWith('192.168.') || address.startsWith('10.') || address.startsWith('172.')) {
|
||||
return address;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback: try to get any non-internal IPv4
|
||||
for (const interfaceName of Object.keys(gateways)) {
|
||||
const interfaces = gateways[interfaceName];
|
||||
for (const iface of interfaces) {
|
||||
if (!iface.internal && iface.family === 'IPv4') {
|
||||
return iface.address;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Last resort: localhost
|
||||
return 'localhost';
|
||||
};
|
219
ts/mod_services/index.ts
Normal file
219
ts/mod_services/index.ts
Normal file
@@ -0,0 +1,219 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import * as helpers from './helpers.js';
|
||||
import { ServiceManager } from './classes.servicemanager.js';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
|
||||
export const run = async (argvArg: any) => {
|
||||
const serviceManager = new ServiceManager();
|
||||
await serviceManager.init();
|
||||
|
||||
const command = argvArg._[1] || 'help';
|
||||
const service = argvArg._[2] || 'all';
|
||||
|
||||
switch (command) {
|
||||
case 'start':
|
||||
await handleStart(serviceManager, service);
|
||||
break;
|
||||
|
||||
case 'stop':
|
||||
await handleStop(serviceManager, service);
|
||||
break;
|
||||
|
||||
case 'restart':
|
||||
await handleRestart(serviceManager, service);
|
||||
break;
|
||||
|
||||
case 'status':
|
||||
await serviceManager.showStatus();
|
||||
break;
|
||||
|
||||
case 'config':
|
||||
await serviceManager.showConfig();
|
||||
break;
|
||||
|
||||
case 'compass':
|
||||
await serviceManager.showCompassConnection();
|
||||
break;
|
||||
|
||||
case 'logs':
|
||||
const lines = parseInt(argvArg._[3]) || 20;
|
||||
await serviceManager.showLogs(service, lines);
|
||||
break;
|
||||
|
||||
case 'remove':
|
||||
await handleRemove(serviceManager);
|
||||
break;
|
||||
|
||||
case 'clean':
|
||||
await handleClean(serviceManager);
|
||||
break;
|
||||
|
||||
case 'help':
|
||||
default:
|
||||
showHelp();
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
async function handleStart(serviceManager: ServiceManager, service: string) {
|
||||
helpers.printHeader('Starting Services');
|
||||
|
||||
switch (service) {
|
||||
case 'mongo':
|
||||
case 'mongodb':
|
||||
await serviceManager.startMongoDB();
|
||||
break;
|
||||
|
||||
case 'minio':
|
||||
case 's3':
|
||||
await serviceManager.startMinIO();
|
||||
break;
|
||||
|
||||
case 'all':
|
||||
case '':
|
||||
await serviceManager.startMongoDB();
|
||||
console.log();
|
||||
await serviceManager.startMinIO();
|
||||
break;
|
||||
|
||||
default:
|
||||
logger.log('error', `Unknown service: ${service}`);
|
||||
logger.log('note', 'Use: mongo, s3, or all');
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
async function handleStop(serviceManager: ServiceManager, service: string) {
|
||||
helpers.printHeader('Stopping Services');
|
||||
|
||||
switch (service) {
|
||||
case 'mongo':
|
||||
case 'mongodb':
|
||||
await serviceManager.stopMongoDB();
|
||||
break;
|
||||
|
||||
case 'minio':
|
||||
case 's3':
|
||||
await serviceManager.stopMinIO();
|
||||
break;
|
||||
|
||||
case 'all':
|
||||
case '':
|
||||
await serviceManager.stopMongoDB();
|
||||
console.log();
|
||||
await serviceManager.stopMinIO();
|
||||
break;
|
||||
|
||||
default:
|
||||
logger.log('error', `Unknown service: ${service}`);
|
||||
logger.log('note', 'Use: mongo, s3, or all');
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
async function handleRestart(serviceManager: ServiceManager, service: string) {
|
||||
helpers.printHeader('Restarting Services');
|
||||
|
||||
switch (service) {
|
||||
case 'mongo':
|
||||
case 'mongodb':
|
||||
await serviceManager.stopMongoDB();
|
||||
await plugins.smartdelay.delayFor(2000);
|
||||
await serviceManager.startMongoDB();
|
||||
break;
|
||||
|
||||
case 'minio':
|
||||
case 's3':
|
||||
await serviceManager.stopMinIO();
|
||||
await plugins.smartdelay.delayFor(2000);
|
||||
await serviceManager.startMinIO();
|
||||
break;
|
||||
|
||||
case 'all':
|
||||
case '':
|
||||
await serviceManager.stopMongoDB();
|
||||
await serviceManager.stopMinIO();
|
||||
await plugins.smartdelay.delayFor(2000);
|
||||
await serviceManager.startMongoDB();
|
||||
console.log();
|
||||
await serviceManager.startMinIO();
|
||||
break;
|
||||
|
||||
default:
|
||||
logger.log('error', `Unknown service: ${service}`);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
async function handleRemove(serviceManager: ServiceManager) {
|
||||
helpers.printHeader('Removing Containers');
|
||||
logger.log('note', '⚠️ This will remove containers but preserve data');
|
||||
|
||||
const shouldContinue = await plugins.smartinteract.SmartInteract.getCliConfirmation('Continue?', false);
|
||||
|
||||
if (shouldContinue) {
|
||||
await serviceManager.removeContainers();
|
||||
} else {
|
||||
logger.log('note', 'Cancelled');
|
||||
}
|
||||
}
|
||||
|
||||
async function handleClean(serviceManager: ServiceManager) {
|
||||
helpers.printHeader('Clean All');
|
||||
logger.log('error', '⚠️ WARNING: This will remove all containers and data!');
|
||||
logger.log('error', 'This action cannot be undone!');
|
||||
|
||||
const smartinteraction = new plugins.smartinteract.SmartInteract();
|
||||
const confirmAnswer = await smartinteraction.askQuestion({
|
||||
name: 'confirm',
|
||||
type: 'input',
|
||||
message: 'Type "yes" to confirm:',
|
||||
default: 'no'
|
||||
});
|
||||
|
||||
if (confirmAnswer.value === 'yes') {
|
||||
await serviceManager.removeContainers();
|
||||
console.log();
|
||||
await serviceManager.cleanData();
|
||||
logger.log('ok', 'All cleaned ✓');
|
||||
} else {
|
||||
logger.log('note', 'Cancelled');
|
||||
}
|
||||
}
|
||||
|
||||
function showHelp() {
|
||||
helpers.printHeader('GitZone Services Manager');
|
||||
|
||||
logger.log('ok', 'Usage: gitzone services [command] [options]');
|
||||
console.log();
|
||||
|
||||
logger.log('note', 'Commands:');
|
||||
logger.log('info', ' start [service] Start services (mongo|s3|all)');
|
||||
logger.log('info', ' stop [service] Stop services (mongo|s3|all)');
|
||||
logger.log('info', ' restart [service] Restart services (mongo|s3|all)');
|
||||
logger.log('info', ' status Show service status');
|
||||
logger.log('info', ' config Show current configuration');
|
||||
logger.log('info', ' compass Show MongoDB Compass connection string');
|
||||
logger.log('info', ' logs [service] Show logs (mongo|s3|all) [lines]');
|
||||
logger.log('info', ' remove Remove all containers');
|
||||
logger.log('info', ' clean Remove all containers and data ⚠️');
|
||||
logger.log('info', ' help Show this help message');
|
||||
console.log();
|
||||
|
||||
logger.log('note', 'Features:');
|
||||
logger.log('info', ' • Auto-creates .nogit/env.json with smart defaults');
|
||||
logger.log('info', ' • Random ports (20000-30000) to avoid conflicts');
|
||||
logger.log('info', ' • Project-specific containers for multi-project support');
|
||||
logger.log('info', ' • Preserves custom configuration values');
|
||||
logger.log('info', ' • MongoDB Compass connection support');
|
||||
console.log();
|
||||
|
||||
logger.log('note', 'Examples:');
|
||||
logger.log('info', ' gitzone services start # Start all services');
|
||||
logger.log('info', ' gitzone services start mongo # Start only MongoDB');
|
||||
logger.log('info', ' gitzone services stop # Stop all services');
|
||||
logger.log('info', ' gitzone services status # Check service status');
|
||||
logger.log('info', ' gitzone services config # Show configuration');
|
||||
logger.log('info', ' gitzone services compass # Get MongoDB Compass connection');
|
||||
logger.log('info', ' gitzone services logs mongo 50 # Show last 50 lines of MongoDB logs');
|
||||
}
|
9
ts/mod_services/mod.plugins.ts
Normal file
9
ts/mod_services/mod.plugins.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
export * from '../plugins.js';
|
||||
|
||||
import * as smartshell from '@push.rocks/smartshell';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as smartinteract from '@push.rocks/smartinteract';
|
||||
import * as smartnetwork from '@push.rocks/smartnetwork';
|
||||
import * as smartdelay from '@push.rocks/smartdelay';
|
||||
|
||||
export { smartshell, smartfile, smartinteract, smartnetwork, smartdelay };
|
@@ -16,7 +16,9 @@ export let run = () => {
|
||||
* create a new project with 'gitzone template [template]'
|
||||
the following templates exist: ${(() => {
|
||||
let projects = `\n`;
|
||||
for (const template of plugins.smartfile.fs.listFoldersSync(paths.templatesDir)) {
|
||||
for (const template of plugins.smartfile.fs.listFoldersSync(
|
||||
paths.templatesDir,
|
||||
)) {
|
||||
projects += ` - ${template}\n`;
|
||||
}
|
||||
return projects;
|
||||
|
@@ -15,7 +15,9 @@ export const run = async (argvArg: any) => {
|
||||
});
|
||||
|
||||
await smartshellInstance.execStrict(`cd ${paths.cwd} && git checkout master`);
|
||||
await smartshellInstance.execStrict(`cd ${paths.cwd} && git pull origin master`);
|
||||
await smartshellInstance.execStrict(
|
||||
`cd ${paths.cwd} && git pull origin master`,
|
||||
);
|
||||
await smartshellInstance.execStrict(`cd ${paths.cwd} && npm ci`);
|
||||
|
||||
await provideNoGitFiles();
|
||||
|
@@ -16,7 +16,9 @@ export const isTemplate = async (templateNameArg: string) => {
|
||||
|
||||
export const getTemplate = async (templateNameArg: string) => {
|
||||
if (isTemplate(templateNameArg)) {
|
||||
const localScafTemplate = new plugins.smartscaf.ScafTemplate(getTemplatePath(templateNameArg));
|
||||
const localScafTemplate = new plugins.smartscaf.ScafTemplate(
|
||||
getTemplatePath(templateNameArg),
|
||||
);
|
||||
await localScafTemplate.readTemplateFromDir();
|
||||
return localScafTemplate;
|
||||
} else {
|
||||
@@ -32,7 +34,8 @@ export const run = async (argvArg: any) => {
|
||||
const answerBucket = await smartinteract.askQuestion({
|
||||
type: 'list',
|
||||
default: 'npm',
|
||||
message: 'What template do you want to scaffold? (Only showing mpost common options)',
|
||||
message:
|
||||
'What template do you want to scaffold? (Only showing mpost common options)',
|
||||
name: 'templateName',
|
||||
choices: ['npm', 'service', 'wcc', 'website'],
|
||||
});
|
||||
|
@@ -7,6 +7,11 @@ import * as smartcli from '@push.rocks/smartcli';
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartpromise from '@push.rocks/smartpromise';
|
||||
import * as smartupdate from '@push.rocks/smartupdate';
|
||||
import * as smartshell from '@push.rocks/smartshell';
|
||||
import * as smartnetwork from '@push.rocks/smartnetwork';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as smartinteract from '@push.rocks/smartinteract';
|
||||
import * as smartdelay from '@push.rocks/smartdelay';
|
||||
|
||||
export {
|
||||
smartlog,
|
||||
@@ -18,4 +23,9 @@ export {
|
||||
smartpath,
|
||||
smartpromise,
|
||||
smartupdate,
|
||||
smartshell,
|
||||
smartnetwork,
|
||||
smartfile,
|
||||
smartinteract,
|
||||
smartdelay,
|
||||
};
|
||||
|
@@ -10,7 +10,5 @@
|
||||
"baseUrl": ".",
|
||||
"paths": {}
|
||||
},
|
||||
"exclude": [
|
||||
"dist_*/**/*.d.ts"
|
||||
]
|
||||
}
|
||||
"exclude": ["dist_*/**/*.d.ts"]
|
||||
}
|
||||
|
Reference in New Issue
Block a user