Compare commits
29 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 366c4a0bc2 | |||
| 0d3b10bd00 | |||
| a41e3d5d2c | |||
| c45cff89de | |||
| 7bb43ad478 | |||
| 8dcaf1c631 | |||
| 422761806d | |||
| 31360240a9 | |||
| e338ee584f | |||
| 31d2e18830 | |||
| a162ddabbb | |||
| 5dfa1d72aa | |||
| 7074a19a7f | |||
| 5774fb4da2 | |||
| be45ce765d | |||
| 2a250b8823 | |||
| 9a436cb4be | |||
| 86782c39dd | |||
| fba3e9d2b0 | |||
| cc37f70185 | |||
| dbc1a1ba18 | |||
| ff57f8a322 | |||
| 968e67330d | |||
| 935ee20e83 | |||
| c205180991 | |||
| 4a53bc4abc | |||
| a86fb3bb8e | |||
| b187000ae4 | |||
| c715adfd6c |
@@ -19,4 +19,8 @@ node_modules/
|
||||
dist/
|
||||
dist_*/
|
||||
|
||||
# AI
|
||||
.claude/
|
||||
.serena/
|
||||
|
||||
#------# custom
|
||||
114
changelog.md
114
changelog.md
@@ -1,5 +1,119 @@
|
||||
# Changelog
|
||||
|
||||
## 2025-10-23 - 1.19.0 - feat(mod_commit)
|
||||
Add CLI UI helpers and improve commit workflow with progress, recommendations and summary
|
||||
|
||||
- Introduce ts/mod_commit/mod.ui.ts: reusable CLI UI helpers (pretty headers, sections, AI recommendation box, step printer, commit summary and helpers for consistent messaging).
|
||||
- Refactor ts/mod_commit/index.ts: use new UI functions to display AI recommendations, show step-by-step progress for baking commit info, generating changelog, staging, committing, bumping version and optional push; include commit SHA in final summary.
|
||||
- Enhance ts/mod_commit/mod.helpers.ts: bumpProjectVersion now accepts currentStep/totalSteps to report progress and returns a consistent newVersion after handling npm/deno/both cases.
|
||||
- Add .claude/settings.local.json: local permissions configuration for development tooling.
|
||||
|
||||
## 2025-10-23 - 1.18.9 - fix(mod_commit)
|
||||
Stage and commit deno.json when bumping/syncing versions and create/update git tags
|
||||
|
||||
- bumpDenoVersion now creates a Smartshell instance and runs git add deno.json, git commit -m "v<newVersion>", and git tag v<newVersion> to persist the version bump
|
||||
- syncVersionToDenoJson now stages deno.json, amends the npm version commit with --no-edit, and recreates the tag with -fa to keep package.json and deno.json in sync
|
||||
- Added informative logger messages after creating commits and tags
|
||||
|
||||
## 2025-10-23 - 1.18.8 - fix(mod_commit)
|
||||
Improve commit workflow: detect project type and current branch; add robust version bump helpers for npm/deno
|
||||
|
||||
- Add mod_commit/mod.helpers.ts with utilities: detectCurrentBranch(), detectProjectType(), bumpProjectVersion(), bumpDenoVersion(), bumpNpmVersion(), syncVersionToDenoJson(), and calculateNewVersion()
|
||||
- Refactor ts/mod_commit/index.ts to use the new helpers: bumpProjectVersion(projectType, ... ) instead of a hard npm version call and push the actual current branch instead of hardcoding 'master'
|
||||
- Support bumping versions for npm-only, deno-only, and hybrid (both) projects and synchronize versions from package.json to deno.json when applicable
|
||||
- Improve branch detection with a fallback to 'master' and informative logging on detection failures
|
||||
- Add local Claude settings file (.claude/settings.local.json) (editor/CI config) — no code behavior change but included in diff
|
||||
|
||||
## 2025-09-07 - 1.18.7 - fix(claude)
|
||||
Add .claude local settings to whitelist dev tool permissions
|
||||
|
||||
- Add .claude/settings.local.json to configure allowed permissions for local AI/tooling helpers (Bash commands, WebFetch, and mcp_serena actions).
|
||||
- Disable enableAllProjectMcpServers (set to false) to limit automatic project MCP server usage.
|
||||
|
||||
## 2025-09-07 - 1.18.6 - fix(deps)
|
||||
Bump dependency versions and add local Claude settings
|
||||
|
||||
- Updated devDependencies: @git.zone/tsbuild ^2.6.4 → ^2.6.8, @git.zone/tstest ^2.3.4 → ^2.3.6, @push.rocks/smartfile ^11.2.5 → ^11.2.7
|
||||
- Updated dependencies: @git.zone/tsdoc ^1.5.1 → ^1.5.2, @git.zone/tspublish ^1.10.1 → ^1.10.3, @push.rocks/smartlog ^3.1.8 → ^3.1.9, @push.rocks/smartnpm ^2.0.4 → ^2.0.6, @push.rocks/smartscaf ^4.0.17 → ^4.0.19
|
||||
- Added .claude/settings.local.json to configure local Claude permissions/settings
|
||||
|
||||
## 2025-08-17 - 1.18.5 - fix(dependencies)
|
||||
Bump smartshell and smartscaf versions; add .claude local settings
|
||||
|
||||
- Update @push.rocks/smartshell from ^3.2.4 to ^3.3.0 in package.json
|
||||
- Update @push.rocks/smartscaf from ^4.0.16 to ^4.0.17 in package.json
|
||||
- Add .claude/settings.local.json for local assistant permissions/configuration
|
||||
|
||||
## 2025-08-17 - 1.18.4 - fix(cli)
|
||||
Update dependencies, add local Claude settings, and update gitignore template
|
||||
|
||||
- Bump several dependencies: @git.zone/tsbuild -> ^2.6.4, @git.zone/tspublish -> ^1.10.1, @git.zone/tstest -> ^2.3.4, @push.rocks/smartfile -> ^11.2.5, @push.rocks/npmextra -> ^5.3.3, @push.rocks/smartchok -> ^1.1.1, @push.rocks/smartlog -> ^3.1.8, @push.rocks/smartpath -> ^6.0.0, prettier -> ^3.6.2
|
||||
- Add .claude/settings.local.json with local permissions configuration for AI tooling
|
||||
- Update assets/templates/gitignore to ignore .claude/ and .serena/ directories
|
||||
- Add pnpm onlyBuiltDependencies entries: esbuild and mongodb-memory-server
|
||||
|
||||
## 2025-08-16 - 1.18.3 - fix(services)
|
||||
Simplify S3 endpoint handling in ServiceConfiguration to store host only
|
||||
|
||||
- S3_ENDPOINT now stores the raw host (e.g. 'localhost') instead of a full URL with protocol and port.
|
||||
- Default .nogit/env.json creation uses the host-only S3_ENDPOINT.
|
||||
- Sync/update logic (when syncing with Docker or reconfiguring ports) sets S3_ENDPOINT to the host only.
|
||||
- Consumers that previously relied on S3_ENDPOINT containing protocol and port should now construct the full endpoint URL using S3_USESSL, S3_HOST and S3_PORT.
|
||||
|
||||
## 2025-08-16 - 1.18.1 - fix(services)
|
||||
Improve services and commit flow: stop AiDoc, use silent docker inspect, sync ports with logging, fix config loading, and bump deps
|
||||
|
||||
- Ensure AiDoc is stopped after building commit recommendation to avoid resource leaks
|
||||
- Use execSilent for `docker inspect` in DockerContainer to avoid shell noise and improve JSON parsing
|
||||
- Sync Docker-exposed ports into service configuration with explicit notes (logs) when MongoDB / S3 ports are updated
|
||||
- Fix synchronous config loading by removing an unnecessary await in ServiceConfiguration.loadConfig
|
||||
- Bump dependencies: @push.rocks/smartshell -> ^3.2.4, @git.zone/tsdoc -> ^1.5.1
|
||||
- Add pnpm.onlyBuiltDependencies for puppeteer and sharp to package.json
|
||||
- Add local Claude settings file (.claude/settings.local.json) with development permissions
|
||||
|
||||
## 2025-08-16 - 1.18.0 - feat(services)
|
||||
Add Docker port mapping sync and reconfigure workflow for local services
|
||||
|
||||
- Add getPortMappings to DockerContainer to extract port bindings from docker inspect output
|
||||
- Sync existing container port mappings into .nogit/env.json when loading/creating service configuration
|
||||
- Validate and automatically update ports only when containers are not present; preserve container ports when containers exist
|
||||
- Recreate containers automatically if detected container port mappings differ from configuration (MongoDB and MinIO)
|
||||
- Add reconfigure method and new CLI command to reassign ports and optionally restart services
|
||||
- Improve status output to show configured ports and port availability information
|
||||
- Minor helpers and imports updated (DockerContainer injected into ServiceConfiguration)
|
||||
- Add .claude/settings.local.json (local permissions config) to repository
|
||||
|
||||
## 2025-08-15 - 1.17.5 - fix(services)
|
||||
Update S3 credentials naming and add S3_ENDPOINT/S3_USESSL support for improved MinIO integration
|
||||
|
||||
- Replaced S3_USER/S3_PASS with S3_ACCESSKEY/S3_SECRETKEY in ServiceConfiguration
|
||||
- Added S3_ENDPOINT field with automatic protocol selection based on S3_USESSL
|
||||
- Introduced S3_USESSL boolean field for SSL/TLS configuration
|
||||
- Updated ServiceManager logging to display new S3_USESSL configuration
|
||||
- Added .claude/settings.local.json for local permission settings
|
||||
|
||||
## 2025-08-15 - 1.17.4 - fix(services)
|
||||
Update S3 credentials naming and add S3_ENDPOINT/S3_USESSL support for improved MinIO integration
|
||||
|
||||
- Replaced S3_USER/S3_PASS with S3_ACCESSKEY/S3_SECRETKEY in ServiceConfiguration
|
||||
- Added S3_ENDPOINT field with automatic protocol selection based on S3_USESSL
|
||||
- Added S3_USESSL boolean field for SSL/TLS configuration support
|
||||
- Updated ServiceManager to use new credential names in container setup and logging
|
||||
|
||||
## 2025-08-15 - 1.17.3 - fix(serviceconfig)
|
||||
Update service configuration to include dynamic MongoDB connection string and add local permissions settings
|
||||
|
||||
- Added .claude/settings.local.json for local permissions configuration
|
||||
- Updated ServiceConfiguration to compute and update MONGODB_URL based on current config values
|
||||
|
||||
## 2025-08-15 - 1.17.2 - fix(ci-test-services)
|
||||
Update CI/CD configurations, test settings, and Docker service for MongoDB.
|
||||
|
||||
- Add .claude/settings.local.json with updated permission settings
|
||||
- Introduce new GitLab CI, VSCode launch and settings, and updated test configuration files (.gitignore, .npmrc, npmextra.json, package.json, qenv.yml, readme.md)
|
||||
- Update test scripts in test/test and test/ts to improve project validation
|
||||
- Fix MongoDB Docker container command by adding '--bind_ip_all' for proper network binding
|
||||
|
||||
## 2025-08-15 - 1.17.1 - fix(services)
|
||||
Improve services module logging and enhance MongoDB Compass integration
|
||||
|
||||
|
||||
36
package.json
36
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@git.zone/cli",
|
||||
"private": false,
|
||||
"version": "1.17.1",
|
||||
"version": "1.19.0",
|
||||
"description": "A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.",
|
||||
"main": "dist_ts/index.ts",
|
||||
"typings": "dist_ts/index.d.ts",
|
||||
@@ -57,45 +57,45 @@
|
||||
},
|
||||
"homepage": "https://gitlab.com/gitzone/private/gitzone#readme",
|
||||
"devDependencies": {
|
||||
"@git.zone/tsbuild": "^2.3.2",
|
||||
"@git.zone/tsbuild": "^2.6.8",
|
||||
"@git.zone/tsrun": "^1.3.3",
|
||||
"@git.zone/tstest": "^1.0.96",
|
||||
"@git.zone/tstest": "^2.3.6",
|
||||
"@push.rocks/smartdelay": "^3.0.5",
|
||||
"@push.rocks/smartfile": "^11.2.0",
|
||||
"@push.rocks/smartfile": "^11.2.7",
|
||||
"@push.rocks/smartinteract": "^2.0.16",
|
||||
"@push.rocks/smartnetwork": "^4.1.2",
|
||||
"@push.rocks/smartshell": "^3.2.3",
|
||||
"@push.rocks/smartshell": "^3.3.0",
|
||||
"@types/node": "^22.15.18"
|
||||
},
|
||||
"dependencies": {
|
||||
"@git.zone/tsdoc": "^1.5.0",
|
||||
"@git.zone/tspublish": "^1.9.1",
|
||||
"@git.zone/tsdoc": "^1.5.2",
|
||||
"@git.zone/tspublish": "^1.10.3",
|
||||
"@push.rocks/commitinfo": "^1.0.12",
|
||||
"@push.rocks/early": "^4.0.4",
|
||||
"@push.rocks/gulp-function": "^3.0.7",
|
||||
"@push.rocks/lik": "^6.2.2",
|
||||
"@push.rocks/npmextra": "^5.1.2",
|
||||
"@push.rocks/npmextra": "^5.3.3",
|
||||
"@push.rocks/projectinfo": "^5.0.2",
|
||||
"@push.rocks/smartchok": "^1.0.34",
|
||||
"@push.rocks/smartchok": "^1.1.1",
|
||||
"@push.rocks/smartcli": "^4.0.11",
|
||||
"@push.rocks/smartdiff": "^1.0.3",
|
||||
"@push.rocks/smartgulp": "^3.0.4",
|
||||
"@push.rocks/smartjson": "^5.0.20",
|
||||
"@push.rocks/smartlegal": "^1.0.27",
|
||||
"@push.rocks/smartlog": "^3.0.9",
|
||||
"@push.rocks/smartlog": "^3.1.9",
|
||||
"@push.rocks/smartlog-destination-local": "^9.0.2",
|
||||
"@push.rocks/smartmustache": "^3.0.2",
|
||||
"@push.rocks/smartnpm": "^2.0.4",
|
||||
"@push.rocks/smartnpm": "^2.0.6",
|
||||
"@push.rocks/smartobject": "^1.0.12",
|
||||
"@push.rocks/smartopen": "^2.0.0",
|
||||
"@push.rocks/smartpath": "^5.0.18",
|
||||
"@push.rocks/smartpath": "^6.0.0",
|
||||
"@push.rocks/smartpromise": "^4.2.3",
|
||||
"@push.rocks/smartscaf": "^4.0.16",
|
||||
"@push.rocks/smartscaf": "^4.0.19",
|
||||
"@push.rocks/smartstream": "^3.2.5",
|
||||
"@push.rocks/smartunique": "^3.0.9",
|
||||
"@push.rocks/smartupdate": "^2.0.6",
|
||||
"@types/through2": "^2.0.41",
|
||||
"prettier": "^3.5.3",
|
||||
"prettier": "^3.6.2",
|
||||
"through2": "^4.0.2"
|
||||
},
|
||||
"files": [
|
||||
@@ -114,7 +114,13 @@
|
||||
"last 1 chrome versions"
|
||||
],
|
||||
"pnpm": {
|
||||
"overrides": {}
|
||||
"overrides": {},
|
||||
"onlyBuiltDependencies": [
|
||||
"esbuild",
|
||||
"mongodb-memory-server",
|
||||
"puppeteer",
|
||||
"sharp"
|
||||
]
|
||||
},
|
||||
"packageManager": "pnpm@10.7.0+sha512.6b865ad4b62a1d9842b61d674a393903b871d9244954f652b8842c2b553c72176b278f64c463e52d40fff8aba385c235c8c9ecf5cc7de4fd78b8bb6d49633ab6"
|
||||
}
|
||||
|
||||
8069
pnpm-lock.yaml
generated
8069
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
1
test
Submodule
1
test
Submodule
Submodule test added at 0b89443584
@@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@git.zone/cli',
|
||||
version: '1.16.10',
|
||||
version: '1.19.0',
|
||||
description: 'A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.'
|
||||
}
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import * as paths from '../paths.js';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
import * as helpers from './mod.helpers.js';
|
||||
import * as ui from './mod.ui.js';
|
||||
|
||||
export const run = async (argvArg: any) => {
|
||||
if (argvArg.format) {
|
||||
@@ -10,22 +12,21 @@ export const run = async (argvArg: any) => {
|
||||
await formatMod.run();
|
||||
}
|
||||
|
||||
logger.log('info', `gathering facts...`);
|
||||
ui.printHeader('🔍 Analyzing repository changes...');
|
||||
|
||||
const aidoc = new plugins.tsdoc.AiDoc();
|
||||
await aidoc.start();
|
||||
|
||||
const nextCommitObject = await aidoc.buildNextCommitObject(paths.cwd);
|
||||
|
||||
logger.log(
|
||||
'info',
|
||||
`---------
|
||||
Next recommended commit would be:
|
||||
===========
|
||||
-> ${nextCommitObject.recommendedNextVersion}:
|
||||
-> ${nextCommitObject.recommendedNextVersionLevel}(${nextCommitObject.recommendedNextVersionScope}): ${nextCommitObject.recommendedNextVersionMessage}
|
||||
===========
|
||||
`,
|
||||
);
|
||||
await aidoc.stop();
|
||||
|
||||
ui.printRecommendation({
|
||||
recommendedNextVersion: nextCommitObject.recommendedNextVersion,
|
||||
recommendedNextVersionLevel: nextCommitObject.recommendedNextVersionLevel,
|
||||
recommendedNextVersionScope: nextCommitObject.recommendedNextVersionScope,
|
||||
recommendedNextVersionMessage: nextCommitObject.recommendedNextVersionMessage,
|
||||
});
|
||||
const commitInteract = new plugins.smartinteract.SmartInteract();
|
||||
commitInteract.addQuestions([
|
||||
{
|
||||
@@ -67,20 +68,30 @@ export const run = async (argvArg: any) => {
|
||||
}
|
||||
})();
|
||||
|
||||
logger.log('info', `OK! Creating commit with message '${commitString}'`);
|
||||
ui.printHeader('✨ Creating Semantic Commit');
|
||||
ui.printCommitMessage(commitString);
|
||||
const smartshellInstance = new plugins.smartshell.Smartshell({
|
||||
executor: 'bash',
|
||||
sourceFilePaths: [],
|
||||
});
|
||||
|
||||
logger.log('info', `Baking commitinfo into code ...`);
|
||||
// Determine total steps (6 if pushing, 5 if not)
|
||||
const totalSteps = answerBucket.getAnswerFor('pushToOrigin') && !(process.env.CI === 'true') ? 6 : 5;
|
||||
let currentStep = 0;
|
||||
|
||||
// Step 1: Baking commitinfo
|
||||
currentStep++;
|
||||
ui.printStep(currentStep, totalSteps, '🔧 Baking commit info into code', 'in-progress');
|
||||
const commitInfo = new plugins.commitinfo.CommitInfo(
|
||||
paths.cwd,
|
||||
commitVersionType,
|
||||
);
|
||||
await commitInfo.writeIntoPotentialDirs();
|
||||
ui.printStep(currentStep, totalSteps, '🔧 Baking commit info into code', 'done');
|
||||
|
||||
logger.log('info', `Writing changelog.md ...`);
|
||||
// Step 2: Writing changelog
|
||||
currentStep++;
|
||||
ui.printStep(currentStep, totalSteps, '📄 Generating changelog.md', 'in-progress');
|
||||
let changelog = nextCommitObject.changelog;
|
||||
changelog = changelog.replaceAll(
|
||||
'{{nextVersion}}',
|
||||
@@ -107,17 +118,54 @@ export const run = async (argvArg: any) => {
|
||||
changelog,
|
||||
plugins.path.join(paths.cwd, `changelog.md`),
|
||||
);
|
||||
ui.printStep(currentStep, totalSteps, '📄 Generating changelog.md', 'done');
|
||||
|
||||
logger.log('info', `Staging files for commit:`);
|
||||
// Step 3: Staging files
|
||||
currentStep++;
|
||||
ui.printStep(currentStep, totalSteps, '📦 Staging files', 'in-progress');
|
||||
await smartshellInstance.exec(`git add -A`);
|
||||
ui.printStep(currentStep, totalSteps, '📦 Staging files', 'done');
|
||||
|
||||
// Step 4: Creating commit
|
||||
currentStep++;
|
||||
ui.printStep(currentStep, totalSteps, '💾 Creating git commit', 'in-progress');
|
||||
await smartshellInstance.exec(`git commit -m "${commitString}"`);
|
||||
await smartshellInstance.exec(`npm version ${commitVersionType}`);
|
||||
ui.printStep(currentStep, totalSteps, '💾 Creating git commit', 'done');
|
||||
|
||||
// Step 5: Bumping version
|
||||
currentStep++;
|
||||
const projectType = await helpers.detectProjectType();
|
||||
const newVersion = await helpers.bumpProjectVersion(projectType, commitVersionType, currentStep, totalSteps);
|
||||
|
||||
// Step 6: Push to remote (optional)
|
||||
const currentBranch = await helpers.detectCurrentBranch();
|
||||
if (
|
||||
answerBucket.getAnswerFor('pushToOrigin') &&
|
||||
!(process.env.CI === 'true')
|
||||
) {
|
||||
await smartshellInstance.exec(`git push origin master --follow-tags`);
|
||||
currentStep++;
|
||||
ui.printStep(currentStep, totalSteps, `🚀 Pushing to origin/${currentBranch}`, 'in-progress');
|
||||
await smartshellInstance.exec(`git push origin ${currentBranch} --follow-tags`);
|
||||
ui.printStep(currentStep, totalSteps, `🚀 Pushing to origin/${currentBranch}`, 'done');
|
||||
}
|
||||
|
||||
console.log(''); // Add spacing before summary
|
||||
|
||||
// Get commit SHA for summary
|
||||
const commitShaResult = await smartshellInstance.exec('git rev-parse --short HEAD');
|
||||
const commitSha = commitShaResult.stdout.trim();
|
||||
|
||||
// Print final summary
|
||||
ui.printSummary({
|
||||
projectType,
|
||||
branch: currentBranch,
|
||||
commitType: answerBucket.getAnswerFor('commitType'),
|
||||
commitScope: answerBucket.getAnswerFor('commitScope'),
|
||||
commitMessage: answerBucket.getAnswerFor('commitDescription'),
|
||||
newVersion: newVersion,
|
||||
commitSha: commitSha,
|
||||
pushed: answerBucket.getAnswerFor('pushToOrigin') && !(process.env.CI === 'true'),
|
||||
});
|
||||
};
|
||||
|
||||
const createCommitStringFromAnswerBucket = (
|
||||
|
||||
256
ts/mod_commit/mod.helpers.ts
Normal file
256
ts/mod_commit/mod.helpers.ts
Normal file
@@ -0,0 +1,256 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import * as paths from '../paths.js';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
import * as ui from './mod.ui.js';
|
||||
|
||||
export type ProjectType = 'npm' | 'deno' | 'both' | 'none';
|
||||
export type VersionType = 'patch' | 'minor' | 'major';
|
||||
|
||||
/**
|
||||
* Detects the current git branch
|
||||
* @returns The current branch name, defaults to 'master' if detection fails
|
||||
*/
|
||||
export async function detectCurrentBranch(): Promise<string> {
|
||||
try {
|
||||
const smartshellInstance = new plugins.smartshell.Smartshell({
|
||||
executor: 'bash',
|
||||
sourceFilePaths: [],
|
||||
});
|
||||
const result = await smartshellInstance.exec('git branch --show-current');
|
||||
const branchName = result.stdout.trim();
|
||||
|
||||
if (!branchName) {
|
||||
logger.log('warn', 'Could not detect current branch, falling back to "master"');
|
||||
return 'master';
|
||||
}
|
||||
|
||||
logger.log('info', `Detected current branch: ${branchName}`);
|
||||
return branchName;
|
||||
} catch (error) {
|
||||
logger.log('warn', `Failed to detect branch: ${error.message}, falling back to "master"`);
|
||||
return 'master';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Detects the project type based on presence of package.json and/or deno.json
|
||||
* @returns The project type
|
||||
*/
|
||||
export async function detectProjectType(): Promise<ProjectType> {
|
||||
const packageJsonPath = plugins.path.join(paths.cwd, 'package.json');
|
||||
const denoJsonPath = plugins.path.join(paths.cwd, 'deno.json');
|
||||
|
||||
const hasPackageJson = await plugins.smartfile.fs.fileExists(packageJsonPath);
|
||||
const hasDenoJson = await plugins.smartfile.fs.fileExists(denoJsonPath);
|
||||
|
||||
if (hasPackageJson && hasDenoJson) {
|
||||
logger.log('info', 'Detected dual project (npm + deno)');
|
||||
return 'both';
|
||||
} else if (hasPackageJson) {
|
||||
logger.log('info', 'Detected npm project');
|
||||
return 'npm';
|
||||
} else if (hasDenoJson) {
|
||||
logger.log('info', 'Detected deno project');
|
||||
return 'deno';
|
||||
} else {
|
||||
throw new Error('No package.json or deno.json found in current directory');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a semantic version string and bumps it according to the version type
|
||||
* @param currentVersion Current version string (e.g., "1.2.3")
|
||||
* @param versionType Type of version bump
|
||||
* @returns New version string
|
||||
*/
|
||||
function calculateNewVersion(currentVersion: string, versionType: VersionType): string {
|
||||
const versionMatch = currentVersion.match(/^(\d+)\.(\d+)\.(\d+)/);
|
||||
|
||||
if (!versionMatch) {
|
||||
throw new Error(`Invalid version format: ${currentVersion}`);
|
||||
}
|
||||
|
||||
let [, major, minor, patch] = versionMatch.map(Number);
|
||||
|
||||
switch (versionType) {
|
||||
case 'major':
|
||||
major += 1;
|
||||
minor = 0;
|
||||
patch = 0;
|
||||
break;
|
||||
case 'minor':
|
||||
minor += 1;
|
||||
patch = 0;
|
||||
break;
|
||||
case 'patch':
|
||||
patch += 1;
|
||||
break;
|
||||
}
|
||||
|
||||
return `${major}.${minor}.${patch}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Bumps the version in deno.json, commits the change, and creates a tag
|
||||
* @param versionType Type of version bump
|
||||
* @returns The new version string
|
||||
*/
|
||||
export async function bumpDenoVersion(versionType: VersionType): Promise<string> {
|
||||
const denoJsonPath = plugins.path.join(paths.cwd, 'deno.json');
|
||||
const smartshellInstance = new plugins.smartshell.Smartshell({
|
||||
executor: 'bash',
|
||||
sourceFilePaths: [],
|
||||
});
|
||||
|
||||
try {
|
||||
// Read deno.json
|
||||
const denoConfig = plugins.smartfile.fs.toObjectSync(
|
||||
denoJsonPath
|
||||
) as { version?: string };
|
||||
|
||||
if (!denoConfig.version) {
|
||||
throw new Error('deno.json does not contain a version field');
|
||||
}
|
||||
|
||||
const currentVersion = denoConfig.version;
|
||||
const newVersion = calculateNewVersion(currentVersion, versionType);
|
||||
|
||||
logger.log('info', `Bumping deno.json version: ${currentVersion} → ${newVersion}`);
|
||||
|
||||
// Update version
|
||||
denoConfig.version = newVersion;
|
||||
|
||||
// Write back to disk
|
||||
await plugins.smartfile.memory.toFs(
|
||||
JSON.stringify(denoConfig, null, 2) + '\n',
|
||||
denoJsonPath
|
||||
);
|
||||
|
||||
// Stage the deno.json file
|
||||
await smartshellInstance.exec('git add deno.json');
|
||||
|
||||
// Commit the version bump
|
||||
await smartshellInstance.exec(`git commit -m "v${newVersion}"`);
|
||||
|
||||
// Create the version tag
|
||||
await smartshellInstance.exec(`git tag v${newVersion} -m "v${newVersion}"`);
|
||||
|
||||
logger.log('info', `Created commit and tag v${newVersion}`);
|
||||
|
||||
return newVersion;
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to bump deno.json version: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Bumps the version in package.json using npm version command
|
||||
* @param versionType Type of version bump
|
||||
* @returns The new version string
|
||||
*/
|
||||
async function bumpNpmVersion(versionType: VersionType): Promise<string> {
|
||||
const smartshellInstance = new plugins.smartshell.Smartshell({
|
||||
executor: 'bash',
|
||||
sourceFilePaths: [],
|
||||
});
|
||||
|
||||
logger.log('info', `Bumping package.json version using npm version ${versionType}`);
|
||||
const result = await smartshellInstance.exec(`npm version ${versionType}`);
|
||||
|
||||
// npm version returns the new version with a 'v' prefix, e.g., "v1.2.3"
|
||||
const newVersion = result.stdout.trim().replace(/^v/, '');
|
||||
return newVersion;
|
||||
}
|
||||
|
||||
/**
|
||||
* Syncs the version from package.json to deno.json and amends the npm commit
|
||||
* @param version The version to sync
|
||||
*/
|
||||
async function syncVersionToDenoJson(version: string): Promise<void> {
|
||||
const denoJsonPath = plugins.path.join(paths.cwd, 'deno.json');
|
||||
const smartshellInstance = new plugins.smartshell.Smartshell({
|
||||
executor: 'bash',
|
||||
sourceFilePaths: [],
|
||||
});
|
||||
|
||||
try {
|
||||
const denoConfig = plugins.smartfile.fs.toObjectSync(
|
||||
denoJsonPath
|
||||
) as { version?: string };
|
||||
|
||||
logger.log('info', `Syncing version to deno.json: ${version}`);
|
||||
denoConfig.version = version;
|
||||
|
||||
await plugins.smartfile.memory.toFs(
|
||||
JSON.stringify(denoConfig, null, 2) + '\n',
|
||||
denoJsonPath
|
||||
);
|
||||
|
||||
// Stage the deno.json file
|
||||
await smartshellInstance.exec('git add deno.json');
|
||||
|
||||
// Amend the npm version commit to include deno.json
|
||||
await smartshellInstance.exec('git commit --amend --no-edit');
|
||||
|
||||
// Re-create the tag with force to update it
|
||||
await smartshellInstance.exec(`git tag -fa v${version} -m "v${version}"`);
|
||||
|
||||
logger.log('info', `Amended commit to include deno.json and updated tag v${version}`);
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to sync version to deno.json: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Bumps the project version based on project type
|
||||
* @param projectType The detected project type
|
||||
* @param versionType The type of version bump
|
||||
* @param currentStep The current step number for progress display
|
||||
* @param totalSteps The total number of steps for progress display
|
||||
* @returns The new version string
|
||||
*/
|
||||
export async function bumpProjectVersion(
|
||||
projectType: ProjectType,
|
||||
versionType: VersionType,
|
||||
currentStep?: number,
|
||||
totalSteps?: number
|
||||
): Promise<string> {
|
||||
const projectEmoji = projectType === 'npm' ? '📦' : projectType === 'deno' ? '🦕' : '🔀';
|
||||
const description = `🏷️ Bumping version (${projectEmoji} ${projectType})`;
|
||||
|
||||
if (currentStep && totalSteps) {
|
||||
ui.printStep(currentStep, totalSteps, description, 'in-progress');
|
||||
}
|
||||
|
||||
let newVersion: string;
|
||||
|
||||
switch (projectType) {
|
||||
case 'npm':
|
||||
newVersion = await bumpNpmVersion(versionType);
|
||||
break;
|
||||
|
||||
case 'deno':
|
||||
newVersion = await bumpDenoVersion(versionType);
|
||||
break;
|
||||
|
||||
case 'both': {
|
||||
// Bump npm version first (it handles git tags)
|
||||
newVersion = await bumpNpmVersion(versionType);
|
||||
// Then sync to deno.json
|
||||
await syncVersionToDenoJson(newVersion);
|
||||
break;
|
||||
}
|
||||
|
||||
case 'none':
|
||||
throw new Error('Cannot bump version: no package.json or deno.json found');
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown project type: ${projectType}`);
|
||||
}
|
||||
|
||||
if (currentStep && totalSteps) {
|
||||
ui.printStep(currentStep, totalSteps, description, 'done');
|
||||
}
|
||||
|
||||
return newVersion;
|
||||
}
|
||||
196
ts/mod_commit/mod.ui.ts
Normal file
196
ts/mod_commit/mod.ui.ts
Normal file
@@ -0,0 +1,196 @@
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
|
||||
/**
|
||||
* UI helper module for beautiful CLI output
|
||||
*/
|
||||
|
||||
interface ICommitSummary {
|
||||
projectType: string;
|
||||
branch: string;
|
||||
commitType: string;
|
||||
commitScope: string;
|
||||
commitMessage: string;
|
||||
newVersion: string;
|
||||
commitSha?: string;
|
||||
pushed: boolean;
|
||||
repoUrl?: string;
|
||||
}
|
||||
|
||||
interface IRecommendation {
|
||||
recommendedNextVersion: string;
|
||||
recommendedNextVersionLevel: string;
|
||||
recommendedNextVersionScope: string;
|
||||
recommendedNextVersionMessage: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Print a header with a box around it
|
||||
*/
|
||||
export function printHeader(title: string): void {
|
||||
const width = 57;
|
||||
const padding = Math.max(0, width - title.length - 2);
|
||||
const leftPad = Math.floor(padding / 2);
|
||||
const rightPad = padding - leftPad;
|
||||
|
||||
console.log('');
|
||||
console.log('╭─' + '─'.repeat(width) + '─╮');
|
||||
console.log('│ ' + title + ' '.repeat(rightPad + leftPad) + ' │');
|
||||
console.log('╰─' + '─'.repeat(width) + '─╯');
|
||||
console.log('');
|
||||
}
|
||||
|
||||
/**
|
||||
* Print a section with a border
|
||||
*/
|
||||
export function printSection(title: string, lines: string[]): void {
|
||||
const width = 59;
|
||||
|
||||
console.log('┌─ ' + title + ' ' + '─'.repeat(Math.max(0, width - title.length - 3)) + '┐');
|
||||
console.log('│' + ' '.repeat(width) + '│');
|
||||
|
||||
for (const line of lines) {
|
||||
const padding = width - line.length;
|
||||
console.log('│ ' + line + ' '.repeat(Math.max(0, padding - 2)) + '│');
|
||||
}
|
||||
|
||||
console.log('│' + ' '.repeat(width) + '│');
|
||||
console.log('└─' + '─'.repeat(width) + '─┘');
|
||||
console.log('');
|
||||
}
|
||||
|
||||
/**
|
||||
* Print AI recommendations in a nice box
|
||||
*/
|
||||
export function printRecommendation(recommendation: IRecommendation): void {
|
||||
const lines = [
|
||||
`Suggested Version: v${recommendation.recommendedNextVersion}`,
|
||||
`Suggested Type: ${recommendation.recommendedNextVersionLevel}`,
|
||||
`Suggested Scope: ${recommendation.recommendedNextVersionScope}`,
|
||||
`Suggested Message: ${recommendation.recommendedNextVersionMessage}`,
|
||||
];
|
||||
|
||||
printSection('📊 AI Recommendations', lines);
|
||||
}
|
||||
|
||||
/**
|
||||
* Print a progress step
|
||||
*/
|
||||
export function printStep(
|
||||
current: number,
|
||||
total: number,
|
||||
description: string,
|
||||
status: 'in-progress' | 'done' | 'error'
|
||||
): void {
|
||||
const statusIcon = status === 'done' ? '✓' : status === 'error' ? '✗' : '⏳';
|
||||
const dots = '.'.repeat(Math.max(0, 40 - description.length));
|
||||
|
||||
console.log(` [${current}/${total}] ${description}${dots} ${statusIcon}`);
|
||||
|
||||
// Clear the line on next update if in progress
|
||||
if (status === 'in-progress') {
|
||||
process.stdout.write('\x1b[1A'); // Move cursor up one line
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get emoji for project type
|
||||
*/
|
||||
function getProjectTypeEmoji(projectType: string): string {
|
||||
switch (projectType) {
|
||||
case 'npm':
|
||||
return '📦 npm';
|
||||
case 'deno':
|
||||
return '🦕 Deno';
|
||||
case 'both':
|
||||
return '🔀 npm + Deno';
|
||||
default:
|
||||
return '❓ Unknown';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get emoji for commit type
|
||||
*/
|
||||
function getCommitTypeEmoji(commitType: string): string {
|
||||
switch (commitType) {
|
||||
case 'fix':
|
||||
return '🔧 fix';
|
||||
case 'feat':
|
||||
return '✨ feat';
|
||||
case 'BREAKING CHANGE':
|
||||
return '💥 BREAKING CHANGE';
|
||||
default:
|
||||
return commitType;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Print final commit summary
|
||||
*/
|
||||
export function printSummary(summary: ICommitSummary): void {
|
||||
const lines = [
|
||||
`Project Type: ${getProjectTypeEmoji(summary.projectType)}`,
|
||||
`Branch: 🌿 ${summary.branch}`,
|
||||
`Commit Type: ${getCommitTypeEmoji(summary.commitType)}`,
|
||||
`Scope: 📍 ${summary.commitScope}`,
|
||||
`New Version: 🏷️ v${summary.newVersion}`,
|
||||
];
|
||||
|
||||
if (summary.commitSha) {
|
||||
lines.push(`Commit SHA: 📌 ${summary.commitSha}`);
|
||||
}
|
||||
|
||||
if (summary.pushed) {
|
||||
lines.push(`Remote: ✓ Pushed successfully`);
|
||||
} else {
|
||||
lines.push(`Remote: ⊘ Not pushed (local only)`);
|
||||
}
|
||||
|
||||
if (summary.repoUrl && summary.commitSha) {
|
||||
lines.push('');
|
||||
lines.push(`View at: ${summary.repoUrl}/commit/${summary.commitSha}`);
|
||||
}
|
||||
|
||||
printSection('✅ Commit Summary', lines);
|
||||
|
||||
if (summary.pushed) {
|
||||
console.log('🎉 All done! Your changes are committed and pushed.\n');
|
||||
} else {
|
||||
console.log('✓ Commit created successfully.\n');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Print an info message with consistent formatting
|
||||
*/
|
||||
export function printInfo(message: string): void {
|
||||
console.log(` ℹ️ ${message}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Print a success message
|
||||
*/
|
||||
export function printSuccess(message: string): void {
|
||||
console.log(` ✓ ${message}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Print a warning message
|
||||
*/
|
||||
export function printWarning(message: string): void {
|
||||
logger.log('warn', `⚠️ ${message}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Print an error message
|
||||
*/
|
||||
export function printError(message: string): void {
|
||||
logger.log('error', `✗ ${message}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Print commit message being created
|
||||
*/
|
||||
export function printCommitMessage(commitString: string): void {
|
||||
console.log(`\n 📝 Commit: ${commitString}\n`);
|
||||
}
|
||||
@@ -215,7 +215,7 @@ export class DockerContainer {
|
||||
*/
|
||||
public async inspect(containerName: string): Promise<any> {
|
||||
try {
|
||||
const result = await this.smartshell.exec(`docker inspect ${containerName}`);
|
||||
const result = await this.smartshell.execSilent(`docker inspect ${containerName}`);
|
||||
if (result.exitCode === 0) {
|
||||
return JSON.parse(result.stdout);
|
||||
}
|
||||
@@ -224,4 +224,38 @@ export class DockerContainer {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get port mappings for a container
|
||||
*/
|
||||
public async getPortMappings(containerName: string): Promise<{ [key: string]: string } | null> {
|
||||
try {
|
||||
// Use docker inspect without format to get full JSON, then extract PortBindings
|
||||
const result = await this.smartshell.execSilent(`docker inspect ${containerName}`);
|
||||
|
||||
if (result.exitCode === 0 && result.stdout) {
|
||||
const inspectData = JSON.parse(result.stdout);
|
||||
if (inspectData && inspectData[0] && inspectData[0].HostConfig && inspectData[0].HostConfig.PortBindings) {
|
||||
const portBindings = inspectData[0].HostConfig.PortBindings;
|
||||
const mappings: { [key: string]: string } = {};
|
||||
|
||||
// Convert Docker's port binding format to simple host:container mapping
|
||||
for (const [containerPort, hostBindings] of Object.entries(portBindings)) {
|
||||
if (Array.isArray(hostBindings) && hostBindings.length > 0) {
|
||||
const hostPort = (hostBindings[0] as any).HostPort;
|
||||
if (hostPort) {
|
||||
mappings[containerPort.replace('/tcp', '').replace('/udp', '')] = hostPort;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return mappings;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
} catch (error) {
|
||||
// Silently fail - container might not exist
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import * as helpers from './helpers.js';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
import { DockerContainer } from './classes.dockercontainer.js';
|
||||
|
||||
export interface IServiceConfig {
|
||||
PROJECT_NAME: string;
|
||||
@@ -9,20 +10,25 @@ export interface IServiceConfig {
|
||||
MONGODB_PORT: string;
|
||||
MONGODB_USER: string;
|
||||
MONGODB_PASS: string;
|
||||
MONGODB_URL: string;
|
||||
S3_HOST: string;
|
||||
S3_PORT: string;
|
||||
S3_CONSOLE_PORT: string;
|
||||
S3_USER: string;
|
||||
S3_PASS: string;
|
||||
S3_ACCESSKEY: string;
|
||||
S3_SECRETKEY: string;
|
||||
S3_BUCKET: string;
|
||||
S3_ENDPOINT: string;
|
||||
S3_USESSL: boolean;
|
||||
}
|
||||
|
||||
export class ServiceConfiguration {
|
||||
private configPath: string;
|
||||
private config: IServiceConfig;
|
||||
private docker: DockerContainer;
|
||||
|
||||
constructor() {
|
||||
this.configPath = plugins.path.join(process.cwd(), '.nogit', 'env.json');
|
||||
this.docker = new DockerContainer();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -38,6 +44,9 @@ export class ServiceConfiguration {
|
||||
await this.createDefaultConfig();
|
||||
}
|
||||
|
||||
// Sync ports from existing Docker containers if they exist
|
||||
await this.syncPortsFromDocker();
|
||||
|
||||
return this.config;
|
||||
}
|
||||
|
||||
@@ -77,7 +86,7 @@ export class ServiceConfiguration {
|
||||
* Load configuration from file
|
||||
*/
|
||||
private async loadConfig(): Promise<void> {
|
||||
const configContent = await plugins.smartfile.fs.toStringSync(this.configPath);
|
||||
const configContent = plugins.smartfile.fs.toStringSync(this.configPath);
|
||||
this.config = JSON.parse(configContent);
|
||||
}
|
||||
|
||||
@@ -95,19 +104,30 @@ export class ServiceConfiguration {
|
||||
s3ConsolePort++;
|
||||
}
|
||||
|
||||
const mongoUser = 'defaultadmin';
|
||||
const mongoPass = 'defaultpass';
|
||||
const mongoHost = 'localhost';
|
||||
const mongoName = projectName;
|
||||
const mongoPortStr = mongoPort.toString();
|
||||
const s3Host = 'localhost';
|
||||
const s3PortStr = s3Port.toString();
|
||||
|
||||
this.config = {
|
||||
PROJECT_NAME: projectName,
|
||||
MONGODB_HOST: 'localhost',
|
||||
MONGODB_NAME: projectName,
|
||||
MONGODB_PORT: mongoPort.toString(),
|
||||
MONGODB_USER: 'defaultadmin',
|
||||
MONGODB_PASS: 'defaultpass',
|
||||
S3_HOST: 'localhost',
|
||||
S3_PORT: s3Port.toString(),
|
||||
MONGODB_HOST: mongoHost,
|
||||
MONGODB_NAME: mongoName,
|
||||
MONGODB_PORT: mongoPortStr,
|
||||
MONGODB_USER: mongoUser,
|
||||
MONGODB_PASS: mongoPass,
|
||||
MONGODB_URL: `mongodb://${mongoUser}:${mongoPass}@${mongoHost}:${mongoPortStr}/${mongoName}?authSource=admin`,
|
||||
S3_HOST: s3Host,
|
||||
S3_PORT: s3PortStr,
|
||||
S3_CONSOLE_PORT: s3ConsolePort.toString(),
|
||||
S3_USER: 'defaultadmin',
|
||||
S3_PASS: 'defaultpass',
|
||||
S3_BUCKET: `${projectName}-documents`
|
||||
S3_ACCESSKEY: 'defaultadmin',
|
||||
S3_SECRETKEY: 'defaultpass',
|
||||
S3_BUCKET: `${projectName}-documents`,
|
||||
S3_ENDPOINT: s3Host,
|
||||
S3_USESSL: false
|
||||
};
|
||||
|
||||
await this.saveConfig();
|
||||
@@ -164,6 +184,14 @@ export class ServiceConfiguration {
|
||||
updated = true;
|
||||
}
|
||||
|
||||
// Always update MONGODB_URL based on current settings
|
||||
const oldUrl = this.config.MONGODB_URL;
|
||||
this.config.MONGODB_URL = `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${this.config.MONGODB_HOST}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
||||
if (oldUrl !== this.config.MONGODB_URL) {
|
||||
fieldsAdded.push('MONGODB_URL');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.S3_HOST) {
|
||||
this.config.S3_HOST = 'localhost';
|
||||
fieldsAdded.push('S3_HOST');
|
||||
@@ -190,15 +218,15 @@ export class ServiceConfiguration {
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.S3_USER) {
|
||||
this.config.S3_USER = 'defaultadmin';
|
||||
fieldsAdded.push('S3_USER');
|
||||
if (!this.config.S3_ACCESSKEY) {
|
||||
this.config.S3_ACCESSKEY = 'defaultadmin';
|
||||
fieldsAdded.push('S3_ACCESSKEY');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.S3_PASS) {
|
||||
this.config.S3_PASS = 'defaultpass';
|
||||
fieldsAdded.push('S3_PASS');
|
||||
if (!this.config.S3_SECRETKEY) {
|
||||
this.config.S3_SECRETKEY = 'defaultpass';
|
||||
fieldsAdded.push('S3_SECRETKEY');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
@@ -208,6 +236,20 @@ export class ServiceConfiguration {
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.S3_USESSL) {
|
||||
this.config.S3_USESSL = false;
|
||||
fieldsAdded.push('S3_USESSL');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
// Always update S3_ENDPOINT based on current settings
|
||||
const oldEndpoint = this.config.S3_ENDPOINT;
|
||||
this.config.S3_ENDPOINT = this.config.S3_HOST;
|
||||
if (oldEndpoint !== this.config.S3_ENDPOINT) {
|
||||
fieldsAdded.push('S3_ENDPOINT');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (updated) {
|
||||
await this.saveConfig();
|
||||
logger.log('ok', `✅ Added missing fields: ${fieldsAdded.join(', ')}`);
|
||||
@@ -243,4 +285,148 @@ export class ServiceConfiguration {
|
||||
minio: plugins.path.join(process.cwd(), '.nogit', 'miniodata')
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Sync port configuration from existing Docker containers
|
||||
*/
|
||||
private async syncPortsFromDocker(): Promise<void> {
|
||||
const containers = this.getContainerNames();
|
||||
let updated = false;
|
||||
|
||||
// Check MongoDB container
|
||||
const mongoStatus = await this.docker.getStatus(containers.mongo);
|
||||
if (mongoStatus !== 'not_exists') {
|
||||
const portMappings = await this.docker.getPortMappings(containers.mongo);
|
||||
if (portMappings && portMappings['27017']) {
|
||||
const dockerPort = portMappings['27017'];
|
||||
if (this.config.MONGODB_PORT !== dockerPort) {
|
||||
logger.log('note', `📍 Syncing MongoDB port from Docker: ${dockerPort}`);
|
||||
this.config.MONGODB_PORT = dockerPort;
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check MinIO container
|
||||
const minioStatus = await this.docker.getStatus(containers.minio);
|
||||
if (minioStatus !== 'not_exists') {
|
||||
const portMappings = await this.docker.getPortMappings(containers.minio);
|
||||
if (portMappings) {
|
||||
if (portMappings['9000']) {
|
||||
const dockerPort = portMappings['9000'];
|
||||
if (this.config.S3_PORT !== dockerPort) {
|
||||
logger.log('note', `📍 Syncing S3 API port from Docker: ${dockerPort}`);
|
||||
this.config.S3_PORT = dockerPort;
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
if (portMappings['9001']) {
|
||||
const dockerPort = portMappings['9001'];
|
||||
if (this.config.S3_CONSOLE_PORT !== dockerPort) {
|
||||
logger.log('note', `📍 Syncing S3 Console port from Docker: ${dockerPort}`);
|
||||
this.config.S3_CONSOLE_PORT = dockerPort;
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (updated) {
|
||||
// Update derived fields
|
||||
this.config.MONGODB_URL = `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${this.config.MONGODB_HOST}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
||||
this.config.S3_ENDPOINT = this.config.S3_HOST;
|
||||
|
||||
await this.saveConfig();
|
||||
logger.log('ok', '✅ Configuration synced with Docker containers');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate and update ports if they're not available
|
||||
*/
|
||||
public async validateAndUpdatePorts(): Promise<boolean> {
|
||||
let updated = false;
|
||||
const containers = this.getContainerNames();
|
||||
|
||||
// Check if containers exist - if they do, ports are fine
|
||||
const mongoExists = await this.docker.exists(containers.mongo);
|
||||
const minioExists = await this.docker.exists(containers.minio);
|
||||
|
||||
// Only check port availability if containers don't exist
|
||||
if (!mongoExists) {
|
||||
const mongoPort = parseInt(this.config.MONGODB_PORT);
|
||||
if (!(await helpers.isPortAvailable(mongoPort))) {
|
||||
logger.log('note', `⚠️ MongoDB port ${mongoPort} is in use, finding new port...`);
|
||||
const newPort = await helpers.getRandomAvailablePort();
|
||||
this.config.MONGODB_PORT = newPort.toString();
|
||||
logger.log('ok', `✅ New MongoDB port: ${newPort}`);
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!minioExists) {
|
||||
const s3Port = parseInt(this.config.S3_PORT);
|
||||
const s3ConsolePort = parseInt(this.config.S3_CONSOLE_PORT);
|
||||
|
||||
if (!(await helpers.isPortAvailable(s3Port))) {
|
||||
logger.log('note', `⚠️ S3 API port ${s3Port} is in use, finding new port...`);
|
||||
const newPort = await helpers.getRandomAvailablePort();
|
||||
this.config.S3_PORT = newPort.toString();
|
||||
logger.log('ok', `✅ New S3 API port: ${newPort}`);
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!(await helpers.isPortAvailable(s3ConsolePort))) {
|
||||
logger.log('note', `⚠️ S3 Console port ${s3ConsolePort} is in use, finding new port...`);
|
||||
let newPort = parseInt(this.config.S3_PORT) + 1;
|
||||
while (!(await helpers.isPortAvailable(newPort))) {
|
||||
newPort++;
|
||||
}
|
||||
this.config.S3_CONSOLE_PORT = newPort.toString();
|
||||
logger.log('ok', `✅ New S3 Console port: ${newPort}`);
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (updated) {
|
||||
// Update derived fields
|
||||
this.config.MONGODB_URL = `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${this.config.MONGODB_HOST}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
||||
this.config.S3_ENDPOINT = this.config.S3_HOST;
|
||||
|
||||
await this.saveConfig();
|
||||
}
|
||||
|
||||
return updated;
|
||||
}
|
||||
|
||||
/**
|
||||
* Force reconfigure all ports with new available ones
|
||||
*/
|
||||
public async reconfigurePorts(): Promise<void> {
|
||||
logger.log('note', '🔄 Finding new available ports...');
|
||||
|
||||
const mongoPort = await helpers.getRandomAvailablePort();
|
||||
const s3Port = await helpers.getRandomAvailablePort();
|
||||
let s3ConsolePort = s3Port + 1;
|
||||
|
||||
// Ensure console port is also available
|
||||
while (!(await helpers.isPortAvailable(s3ConsolePort))) {
|
||||
s3ConsolePort++;
|
||||
}
|
||||
|
||||
this.config.MONGODB_PORT = mongoPort.toString();
|
||||
this.config.S3_PORT = s3Port.toString();
|
||||
this.config.S3_CONSOLE_PORT = s3ConsolePort.toString();
|
||||
|
||||
// Update derived fields
|
||||
this.config.MONGODB_URL = `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${this.config.MONGODB_HOST}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
||||
this.config.S3_ENDPOINT = this.config.S3_HOST;
|
||||
|
||||
await this.saveConfig();
|
||||
|
||||
logger.log('ok', '✅ New port configuration:');
|
||||
logger.log('info', ` 📍 MongoDB: ${mongoPort}`);
|
||||
logger.log('info', ` 📍 S3 API: ${s3Port}`);
|
||||
logger.log('info', ` 📍 S3 Console: ${s3ConsolePort}`);
|
||||
}
|
||||
}
|
||||
@@ -26,6 +26,9 @@ export class ServiceManager {
|
||||
// Load or create configuration
|
||||
await this.config.loadOrCreate();
|
||||
logger.log('info', `📋 Project: ${this.config.getConfig().PROJECT_NAME}`);
|
||||
|
||||
// Validate and update ports if needed
|
||||
await this.config.validateAndUpdatePorts();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -49,10 +52,42 @@ export class ServiceManager {
|
||||
break;
|
||||
|
||||
case 'stopped':
|
||||
if (await this.docker.start(containers.mongo)) {
|
||||
logger.log('ok', ' Started ✓');
|
||||
// Check if port mapping matches config
|
||||
const mongoPortMappings = await this.docker.getPortMappings(containers.mongo);
|
||||
if (mongoPortMappings && mongoPortMappings['27017'] !== config.MONGODB_PORT) {
|
||||
logger.log('note', ' Port configuration changed, recreating container...');
|
||||
await this.docker.remove(containers.mongo, true);
|
||||
// Fall through to create new container
|
||||
const success = await this.docker.run({
|
||||
name: containers.mongo,
|
||||
image: 'mongo:7.0',
|
||||
ports: {
|
||||
[`0.0.0.0:${config.MONGODB_PORT}`]: '27017'
|
||||
},
|
||||
volumes: {
|
||||
[directories.mongo]: '/data/db'
|
||||
},
|
||||
environment: {
|
||||
MONGO_INITDB_ROOT_USERNAME: config.MONGODB_USER,
|
||||
MONGO_INITDB_ROOT_PASSWORD: config.MONGODB_PASS,
|
||||
MONGO_INITDB_DATABASE: config.MONGODB_NAME
|
||||
},
|
||||
restart: 'unless-stopped',
|
||||
command: '--bind_ip_all'
|
||||
});
|
||||
|
||||
if (success) {
|
||||
logger.log('ok', ' Recreated with new port ✓');
|
||||
} else {
|
||||
logger.log('error', ' Failed to recreate container');
|
||||
}
|
||||
} else {
|
||||
logger.log('error', ' Failed to start');
|
||||
// Ports match, just start the container
|
||||
if (await this.docker.start(containers.mongo)) {
|
||||
logger.log('ok', ' Started ✓');
|
||||
} else {
|
||||
logger.log('error', ' Failed to start');
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
@@ -73,7 +108,8 @@ export class ServiceManager {
|
||||
MONGO_INITDB_ROOT_PASSWORD: config.MONGODB_PASS,
|
||||
MONGO_INITDB_DATABASE: config.MONGODB_NAME
|
||||
},
|
||||
restart: 'unless-stopped'
|
||||
restart: 'unless-stopped',
|
||||
command: '--bind_ip_all'
|
||||
});
|
||||
|
||||
if (success) {
|
||||
@@ -115,10 +151,60 @@ export class ServiceManager {
|
||||
break;
|
||||
|
||||
case 'stopped':
|
||||
if (await this.docker.start(containers.minio)) {
|
||||
logger.log('ok', ' Started ✓');
|
||||
// Check if port mapping matches config
|
||||
const minioPortMappings = await this.docker.getPortMappings(containers.minio);
|
||||
if (minioPortMappings &&
|
||||
(minioPortMappings['9000'] !== config.S3_PORT ||
|
||||
minioPortMappings['9001'] !== config.S3_CONSOLE_PORT)) {
|
||||
logger.log('note', ' Port configuration changed, recreating container...');
|
||||
await this.docker.remove(containers.minio, true);
|
||||
// Fall through to create new container
|
||||
const success = await this.docker.run({
|
||||
name: containers.minio,
|
||||
image: 'minio/minio',
|
||||
ports: {
|
||||
[config.S3_PORT]: '9000',
|
||||
[config.S3_CONSOLE_PORT]: '9001'
|
||||
},
|
||||
volumes: {
|
||||
[directories.minio]: '/data'
|
||||
},
|
||||
environment: {
|
||||
MINIO_ROOT_USER: config.S3_ACCESSKEY,
|
||||
MINIO_ROOT_PASSWORD: config.S3_SECRETKEY
|
||||
},
|
||||
restart: 'unless-stopped',
|
||||
command: 'server /data --console-address ":9001"'
|
||||
});
|
||||
|
||||
if (success) {
|
||||
logger.log('ok', ' Recreated with new ports ✓');
|
||||
|
||||
// Wait for MinIO to be ready
|
||||
await plugins.smartdelay.delayFor(3000);
|
||||
|
||||
// Create default bucket
|
||||
await this.docker.exec(
|
||||
containers.minio,
|
||||
`mc alias set local http://localhost:9000 ${config.S3_ACCESSKEY} ${config.S3_SECRETKEY}`
|
||||
);
|
||||
|
||||
await this.docker.exec(
|
||||
containers.minio,
|
||||
`mc mb local/${config.S3_BUCKET}`
|
||||
);
|
||||
|
||||
logger.log('ok', ` Bucket '${config.S3_BUCKET}' created ✓`);
|
||||
} else {
|
||||
logger.log('error', ' Failed to recreate container');
|
||||
}
|
||||
} else {
|
||||
logger.log('error', ' Failed to start');
|
||||
// Ports match, just start the container
|
||||
if (await this.docker.start(containers.minio)) {
|
||||
logger.log('ok', ' Started ✓');
|
||||
} else {
|
||||
logger.log('error', ' Failed to start');
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
@@ -136,8 +222,8 @@ export class ServiceManager {
|
||||
[directories.minio]: '/data'
|
||||
},
|
||||
environment: {
|
||||
MINIO_ROOT_USER: config.S3_USER,
|
||||
MINIO_ROOT_PASSWORD: config.S3_PASS
|
||||
MINIO_ROOT_USER: config.S3_ACCESSKEY,
|
||||
MINIO_ROOT_PASSWORD: config.S3_SECRETKEY
|
||||
},
|
||||
restart: 'unless-stopped',
|
||||
command: 'server /data --console-address ":9001"'
|
||||
@@ -152,7 +238,7 @@ export class ServiceManager {
|
||||
// Create default bucket
|
||||
await this.docker.exec(
|
||||
containers.minio,
|
||||
`mc alias set local http://localhost:9000 ${config.S3_USER} ${config.S3_PASS}`
|
||||
`mc alias set local http://localhost:9000 ${config.S3_ACCESSKEY} ${config.S3_SECRETKEY}`
|
||||
);
|
||||
|
||||
await this.docker.exec(
|
||||
@@ -171,7 +257,7 @@ export class ServiceManager {
|
||||
logger.log('info', ` Port: ${config.S3_PORT}`);
|
||||
logger.log('info', ` Bucket: ${config.S3_BUCKET}`);
|
||||
logger.log('info', ` API: http://${config.S3_HOST}:${config.S3_PORT}`);
|
||||
logger.log('info', ` Console: http://${config.S3_HOST}:${config.S3_CONSOLE_PORT} (login: ${config.S3_USER}/***)`);
|
||||
logger.log('info', ` Console: http://${config.S3_HOST}:${config.S3_CONSOLE_PORT} (login: ${config.S3_ACCESSKEY}/***)`);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -232,6 +318,7 @@ export class ServiceManager {
|
||||
case 'running':
|
||||
logger.log('ok', '📦 MongoDB: 🟢 Running');
|
||||
logger.log('info', ` ├─ Container: ${containers.mongo}`);
|
||||
logger.log('info', ` ├─ Port: ${config.MONGODB_PORT}`);
|
||||
logger.log('info', ` ├─ Connection: ${this.config.getMongoConnectionString()}`);
|
||||
|
||||
// Show Compass connection string
|
||||
@@ -241,10 +328,19 @@ export class ServiceManager {
|
||||
break;
|
||||
case 'stopped':
|
||||
logger.log('note', '📦 MongoDB: 🟡 Stopped');
|
||||
logger.log('info', ` └─ Container: ${containers.mongo}`);
|
||||
logger.log('info', ` ├─ Container: ${containers.mongo}`);
|
||||
logger.log('info', ` └─ Port: ${config.MONGODB_PORT}`);
|
||||
break;
|
||||
case 'not_exists':
|
||||
logger.log('info', '📦 MongoDB: ⚪ Not installed');
|
||||
// Check port availability
|
||||
const mongoPort = parseInt(config.MONGODB_PORT);
|
||||
const mongoAvailable = await helpers.isPortAvailable(mongoPort);
|
||||
if (!mongoAvailable) {
|
||||
logger.log('error', ` └─ ⚠️ Port ${mongoPort} is in use by another process`);
|
||||
} else {
|
||||
logger.log('info', ` └─ Port ${mongoPort} is available`);
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
@@ -260,10 +356,33 @@ export class ServiceManager {
|
||||
break;
|
||||
case 'stopped':
|
||||
logger.log('note', '📦 S3/MinIO: 🟡 Stopped');
|
||||
logger.log('info', ` └─ Container: ${containers.minio}`);
|
||||
logger.log('info', ` ├─ Container: ${containers.minio}`);
|
||||
logger.log('info', ` ├─ API Port: ${config.S3_PORT}`);
|
||||
logger.log('info', ` └─ Console Port: ${config.S3_CONSOLE_PORT}`);
|
||||
break;
|
||||
case 'not_exists':
|
||||
logger.log('info', '📦 S3/MinIO: ⚪ Not installed');
|
||||
// Check port availability
|
||||
const s3Port = parseInt(config.S3_PORT);
|
||||
const s3ConsolePort = parseInt(config.S3_CONSOLE_PORT);
|
||||
const s3Available = await helpers.isPortAvailable(s3Port);
|
||||
const consoleAvailable = await helpers.isPortAvailable(s3ConsolePort);
|
||||
|
||||
if (!s3Available || !consoleAvailable) {
|
||||
if (!s3Available) {
|
||||
logger.log('error', ` ├─ ⚠️ API Port ${s3Port} is in use`);
|
||||
} else {
|
||||
logger.log('info', ` ├─ API Port ${s3Port} is available`);
|
||||
}
|
||||
if (!consoleAvailable) {
|
||||
logger.log('error', ` └─ ⚠️ Console Port ${s3ConsolePort} is in use`);
|
||||
} else {
|
||||
logger.log('info', ` └─ Console Port ${s3ConsolePort} is available`);
|
||||
}
|
||||
} else {
|
||||
logger.log('info', ` ├─ API Port ${s3Port} is available`);
|
||||
logger.log('info', ` └─ Console Port ${s3ConsolePort} is available`);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -293,12 +412,13 @@ export class ServiceManager {
|
||||
logger.log('info', ` Host: ${config.S3_HOST}`);
|
||||
logger.log('info', ` API Port: ${config.S3_PORT}`);
|
||||
logger.log('info', ` Console Port: ${config.S3_CONSOLE_PORT}`);
|
||||
logger.log('info', ` User: ${config.S3_USER}`);
|
||||
logger.log('info', ' Password: ***');
|
||||
logger.log('info', ` Access Key: ${config.S3_ACCESSKEY}`);
|
||||
logger.log('info', ' Secret Key: ***');
|
||||
logger.log('info', ` Bucket: ${config.S3_BUCKET}`);
|
||||
logger.log('info', ` Use SSL: ${config.S3_USESSL}`);
|
||||
logger.log('info', ` Container: ${this.config.getContainerNames().minio}`);
|
||||
logger.log('info', ` Data: ${this.config.getDataDirectories().minio}`);
|
||||
logger.log('info', ` API URL: http://${config.S3_HOST}:${config.S3_PORT}`);
|
||||
logger.log('info', ` Endpoint: ${config.S3_ENDPOINT}`);
|
||||
logger.log('info', ` Console URL: http://${config.S3_HOST}:${config.S3_CONSOLE_PORT}`);
|
||||
}
|
||||
|
||||
@@ -420,4 +540,44 @@ export class ServiceManager {
|
||||
logger.log('note', ' No data to clean');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reconfigure services with new ports
|
||||
*/
|
||||
public async reconfigure(): Promise<void> {
|
||||
helpers.printHeader('Reconfiguring Services');
|
||||
|
||||
const containers = this.config.getContainerNames();
|
||||
|
||||
// Stop existing containers
|
||||
logger.log('note', '🛑 Stopping existing containers...');
|
||||
|
||||
if (await this.docker.exists(containers.mongo)) {
|
||||
await this.docker.stop(containers.mongo);
|
||||
logger.log('ok', ' MongoDB stopped ✓');
|
||||
}
|
||||
|
||||
if (await this.docker.exists(containers.minio)) {
|
||||
await this.docker.stop(containers.minio);
|
||||
logger.log('ok', ' S3/MinIO stopped ✓');
|
||||
}
|
||||
|
||||
// Reconfigure ports
|
||||
await this.config.reconfigurePorts();
|
||||
|
||||
// Ask if user wants to restart services
|
||||
const smartinteract = new plugins.smartinteract.SmartInteract();
|
||||
const response = await smartinteract.askQuestion({
|
||||
name: 'restart',
|
||||
type: 'confirm',
|
||||
message: 'Do you want to start services with new ports?',
|
||||
default: true
|
||||
});
|
||||
|
||||
if (response.value) {
|
||||
console.log();
|
||||
await this.startMongoDB();
|
||||
await this.startMinIO();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -48,6 +48,10 @@ export const run = async (argvArg: any) => {
|
||||
await handleClean(serviceManager);
|
||||
break;
|
||||
|
||||
case 'reconfigure':
|
||||
await serviceManager.reconfigure();
|
||||
break;
|
||||
|
||||
case 'help':
|
||||
default:
|
||||
showHelp();
|
||||
@@ -195,6 +199,7 @@ function showHelp() {
|
||||
logger.log('info', ' config Show current configuration');
|
||||
logger.log('info', ' compass Show MongoDB Compass connection string');
|
||||
logger.log('info', ' logs [service] Show logs (mongo|s3|all) [lines]');
|
||||
logger.log('info', ' reconfigure Reassign ports and restart services');
|
||||
logger.log('info', ' remove Remove all containers');
|
||||
logger.log('info', ' clean Remove all containers and data ⚠️');
|
||||
logger.log('info', ' help Show this help message');
|
||||
|
||||
Reference in New Issue
Block a user