Compare commits
8 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| b44624f2e7 | |||
| 847e679e92 | |||
| ddf5023ecb | |||
| e1d28bc10a | |||
| 2f3d67f9e3 | |||
| 6304953234 | |||
| 8d84620bc4 | |||
| efd6f04e63 |
@@ -1,8 +1,5 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"experimentalDecorators": true,
|
||||
"emitDecoratorMetadata": true,
|
||||
"useDefineForClassFields": false,
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
|
||||
33
changelog.md
33
changelog.md
@@ -1,5 +1,38 @@
|
||||
# Changelog
|
||||
|
||||
## 2025-11-29 - 2.1.0 - feat(mod_services)
|
||||
Add global service registry and global commands for managing project containers
|
||||
|
||||
- Introduce GlobalRegistry class to track registered projects, their containers, ports and last activity (ts/mod_services/classes.globalregistry.ts)
|
||||
- Add global CLI mode for services (use -g/--global) with commands: list, status, stop, cleanup (ts/mod_services/index.ts)
|
||||
- ServiceManager now registers the current project with the global registry when starting services and unregisters when all containers are removed (ts/mod_services/classes.servicemanager.ts)
|
||||
- Global handlers to list projects, show aggregated status, stop containers across projects and cleanup stale entries
|
||||
- Bump dependency @push.rocks/smartfile to ^13.1.0 in package.json
|
||||
|
||||
## 2025-11-27 - 2.0.0 - BREAKING CHANGE(core)
|
||||
Migrate filesystem to smartfs (async) and add Elasticsearch service support; refactor format/commit/meta modules
|
||||
|
||||
- Replace @push.rocks/smartfile usage with @push.rocks/smartfs across the codebase; all filesystem operations are now async (SmartFs.file(...).read()/write(), SmartFs.directory(...).list()/create()/delete(), etc.)
|
||||
- Convert formerly synchronous helpers and APIs to async (notable: detectProjectType, getProjectName, readCurrentVersion and related version bumping logic). Callers updated accordingly.
|
||||
- Add Elasticsearch support to services: new config fields (ELASTICSEARCH_*), Docker run/start/stop/logs/status handling, and ELASTICSEARCH_URL in service configuration.
|
||||
- Refactor formatting subsystem: cache and rollback/backup systems removed/disabled for stability, format planner execution simplified (sequential), diff/stats reporting updated to use smartfs.
|
||||
- Update package.json dependencies: bump @git.zone/tsbuild, tsrun, tstest; upgrade @push.rocks/smartfile to v13 and add @push.rocks/smartfs dependency; update @types/node.
|
||||
- Update commit flow and changelog generation to use smartfs for reading/writing files and to await version/branch detection where necessary.
|
||||
- Expose a SmartFs instance via plugins and adjust all mod.* plugin files to import/use smartfs where required.
|
||||
- Breaking change: Public and internal APIs that previously used synchronous smartfile APIs are now asynchronous. Consumers and scripts must await these functions and use the new smartfs API.
|
||||
|
||||
## 2025-11-17 - 1.21.5 - fix(tsconfig)
|
||||
Remove emitDecoratorMetadata from tsconfig template
|
||||
|
||||
- Removed the "emitDecoratorMetadata" compiler option from assets/templates/tsconfig_update/tsconfig.json
|
||||
- This updates the tsconfig template to avoid emitting decorator metadata when targeting ES2022
|
||||
|
||||
## 2025-11-17 - 1.21.4 - fix(tsconfig template)
|
||||
Remove experimentalDecorators and useDefineForClassFields from tsconfig template
|
||||
|
||||
- Removed experimentalDecorators option from assets/templates/tsconfig_update/tsconfig.json
|
||||
- Removed useDefineForClassFields option from assets/templates/tsconfig_update/tsconfig.json
|
||||
|
||||
## 2025-11-17 - 1.21.3 - fix(assets/templates/multienv)
|
||||
Remove unused Bun configuration template (assets/templates/multienv/bunfig.toml)
|
||||
|
||||
|
||||
13
package.json
13
package.json
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@git.zone/cli",
|
||||
"private": false,
|
||||
"version": "1.21.3",
|
||||
"version": "2.1.0",
|
||||
"description": "A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.",
|
||||
"main": "dist_ts/index.ts",
|
||||
"typings": "dist_ts/index.d.ts",
|
||||
@@ -57,15 +57,14 @@
|
||||
},
|
||||
"homepage": "https://gitlab.com/gitzone/private/gitzone#readme",
|
||||
"devDependencies": {
|
||||
"@git.zone/tsbuild": "^2.7.1",
|
||||
"@git.zone/tsrun": "^1.6.2",
|
||||
"@git.zone/tstest": "^2.7.0",
|
||||
"@git.zone/tsbuild": "^3.1.0",
|
||||
"@git.zone/tsrun": "^2.0.0",
|
||||
"@git.zone/tstest": "^3.1.3",
|
||||
"@push.rocks/smartdelay": "^3.0.5",
|
||||
"@push.rocks/smartfile": "^11.2.7",
|
||||
"@push.rocks/smartinteract": "^2.0.16",
|
||||
"@push.rocks/smartnetwork": "^4.4.0",
|
||||
"@push.rocks/smartshell": "^3.3.0",
|
||||
"@types/node": "^22.15.18"
|
||||
"@types/node": "^24.10.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"@git.zone/tsdoc": "^1.9.2",
|
||||
@@ -79,6 +78,8 @@
|
||||
"@push.rocks/smartchok": "^1.1.1",
|
||||
"@push.rocks/smartcli": "^4.0.19",
|
||||
"@push.rocks/smartdiff": "^1.0.3",
|
||||
"@push.rocks/smartfile": "^13.1.0",
|
||||
"@push.rocks/smartfs": "^1.1.0",
|
||||
"@push.rocks/smartgulp": "^3.0.4",
|
||||
"@push.rocks/smartjson": "^5.2.0",
|
||||
"@push.rocks/smartlegal": "^1.0.27",
|
||||
|
||||
1974
pnpm-lock.yaml
generated
1974
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -243,7 +243,60 @@ gitzone format --clean-backups
|
||||
|
||||
## API Changes
|
||||
|
||||
- smartfile API updated to use fs._ and memory._ namespaces
|
||||
### Smartfile v13 Migration (Latest - Completed)
|
||||
|
||||
The project has been fully migrated from @push.rocks/smartfile v11 to v13, which introduced a major breaking change where filesystem operations were split into two separate packages:
|
||||
|
||||
**Packages:**
|
||||
- `@push.rocks/smartfile` v13.0.1 - File representation classes (SmartFile, StreamFile, VirtualDirectory)
|
||||
- `@push.rocks/smartfs` v1.1.0 - Filesystem operations (read, write, exists, stat, etc.)
|
||||
|
||||
**Key API Changes:**
|
||||
1. **File Reading**:
|
||||
- Old: `plugins.smartfile.fs.toStringSync(path)` or `plugins.smartfile.fs.toObjectSync(path)`
|
||||
- New: `await plugins.smartfs.file(path).encoding('utf8').read()` + JSON.parse if needed
|
||||
- Important: `read()` returns `string | Buffer` - use `as string` type assertion when encoding is set
|
||||
|
||||
2. **File Writing**:
|
||||
- Old: `plugins.smartfile.memory.toFs(content, path)` or `plugins.smartfile.memory.toFsSync(content, path)`
|
||||
- New: `await plugins.smartfs.file(path).encoding('utf8').write(content)`
|
||||
|
||||
3. **File Existence**:
|
||||
- Old: `plugins.smartfile.fs.fileExists(path)` or `plugins.smartfile.fs.fileExistsSync(path)`
|
||||
- New: `await plugins.smartfs.file(path).exists()`
|
||||
|
||||
4. **Directory Operations**:
|
||||
- Old: `plugins.smartfile.fs.ensureDir(path)`
|
||||
- New: `await plugins.smartfs.directory(path).recursive().create()`
|
||||
- Old: `plugins.smartfile.fs.remove(path)`
|
||||
- New: `await plugins.smartfs.directory(path).recursive().delete()` or `await plugins.smartfs.file(path).delete()`
|
||||
|
||||
5. **Directory Listing**:
|
||||
- Old: `plugins.smartfile.fs.listFolders(path)` or `plugins.smartfile.fs.listFoldersSync(path)`
|
||||
- New: `await plugins.smartfs.directory(path).list()` then filter by `stats.isDirectory`
|
||||
- Note: `list()` returns `IDirectoryEntry[]` with `path` and `name` properties - use `stat()` to check if directory
|
||||
|
||||
6. **File Stats**:
|
||||
- Old: `stats.isDirectory()` (method)
|
||||
- New: `stats.isDirectory` (boolean property)
|
||||
- Old: `stats.mtimeMs`
|
||||
- New: `stats.mtime.getTime()`
|
||||
|
||||
7. **SmartFile Factory**:
|
||||
- Old: Direct SmartFile instantiation
|
||||
- New: `plugins.smartfile.SmartFileFactory.nodeFs()` then factory methods
|
||||
|
||||
**Migration Pattern:**
|
||||
All sync methods must become async. Functions that were previously synchronous (like `getProjectName()`) now return `Promise<T>` and must be awaited.
|
||||
|
||||
**Affected Modules:**
|
||||
- ts/mod_format/* (largest area - 15+ files)
|
||||
- ts/mod_commit/* (version bumping)
|
||||
- ts/mod_services/* (configuration management)
|
||||
- ts/mod_meta/* (meta repository management)
|
||||
- ts/mod_standard/* (template listing)
|
||||
- ts/mod_template/* (template operations)
|
||||
|
||||
**Previous API Changes:**
|
||||
- smartnpm requires instance creation: `new NpmRegistry()`
|
||||
- All file operations now use updated APIs
|
||||
- Type imports use `import type` for proper verbatim module syntax
|
||||
|
||||
@@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@git.zone/cli',
|
||||
version: '1.21.3',
|
||||
version: '2.1.0',
|
||||
description: 'A comprehensive CLI tool for enhancing and managing local development workflows with gitzone utilities, focusing on project setup, version control, code formatting, and template management.'
|
||||
}
|
||||
|
||||
@@ -142,10 +142,10 @@ export const run = async (argvArg: any) => {
|
||||
changelog = changelog.replaceAll('\n{{nextVersionDetails}}', '');
|
||||
}
|
||||
|
||||
await plugins.smartfile.memory.toFs(
|
||||
changelog,
|
||||
plugins.path.join(paths.cwd, `changelog.md`),
|
||||
);
|
||||
await plugins.smartfs
|
||||
.file(plugins.path.join(paths.cwd, `changelog.md`))
|
||||
.encoding('utf8')
|
||||
.write(changelog);
|
||||
ui.printStep(currentStep, totalSteps, '📄 Generating changelog.md', 'done');
|
||||
|
||||
// Step 3: Staging files
|
||||
|
||||
@@ -40,8 +40,8 @@ export async function detectProjectType(): Promise<ProjectType> {
|
||||
const packageJsonPath = plugins.path.join(paths.cwd, 'package.json');
|
||||
const denoJsonPath = plugins.path.join(paths.cwd, 'deno.json');
|
||||
|
||||
const hasPackageJson = await plugins.smartfile.fs.fileExists(packageJsonPath);
|
||||
const hasDenoJson = await plugins.smartfile.fs.fileExists(denoJsonPath);
|
||||
const hasPackageJson = await plugins.smartfs.file(packageJsonPath).exists();
|
||||
const hasDenoJson = await plugins.smartfs.file(denoJsonPath).exists();
|
||||
|
||||
if (hasPackageJson && hasDenoJson) {
|
||||
logger.log('info', 'Detected dual project (npm + deno)');
|
||||
@@ -95,10 +95,14 @@ function calculateNewVersion(currentVersion: string, versionType: VersionType):
|
||||
* @param projectType The project type to determine which file to read
|
||||
* @returns The current version string
|
||||
*/
|
||||
function readCurrentVersion(projectType: ProjectType): string {
|
||||
async function readCurrentVersion(projectType: ProjectType): Promise<string> {
|
||||
if (projectType === 'npm' || projectType === 'both') {
|
||||
const packageJsonPath = plugins.path.join(paths.cwd, 'package.json');
|
||||
const packageJson = plugins.smartfile.fs.toObjectSync(packageJsonPath) as { version?: string };
|
||||
const content = (await plugins.smartfs
|
||||
.file(packageJsonPath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const packageJson = JSON.parse(content) as { version?: string };
|
||||
|
||||
if (!packageJson.version) {
|
||||
throw new Error('package.json does not contain a version field');
|
||||
@@ -106,7 +110,11 @@ function readCurrentVersion(projectType: ProjectType): string {
|
||||
return packageJson.version;
|
||||
} else {
|
||||
const denoJsonPath = plugins.path.join(paths.cwd, 'deno.json');
|
||||
const denoConfig = plugins.smartfile.fs.toObjectSync(denoJsonPath) as { version?: string };
|
||||
const content = (await plugins.smartfs
|
||||
.file(denoJsonPath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const denoConfig = JSON.parse(content) as { version?: string };
|
||||
|
||||
if (!denoConfig.version) {
|
||||
throw new Error('deno.json does not contain a version field');
|
||||
@@ -121,12 +129,16 @@ function readCurrentVersion(projectType: ProjectType): string {
|
||||
* @param newVersion The new version to write
|
||||
*/
|
||||
async function updateVersionFile(filePath: string, newVersion: string): Promise<void> {
|
||||
const config = plugins.smartfile.fs.toObjectSync(filePath) as { version?: string };
|
||||
const content = (await plugins.smartfs
|
||||
.file(filePath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const config = JSON.parse(content) as { version?: string };
|
||||
config.version = newVersion;
|
||||
await plugins.smartfile.memory.toFs(
|
||||
JSON.stringify(config, null, 2) + '\n',
|
||||
filePath
|
||||
);
|
||||
await plugins.smartfs
|
||||
.file(filePath)
|
||||
.encoding('utf8')
|
||||
.write(JSON.stringify(config, null, 2) + '\n');
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -162,7 +174,7 @@ export async function bumpProjectVersion(
|
||||
|
||||
try {
|
||||
// 1. Read current version
|
||||
const currentVersion = readCurrentVersion(projectType);
|
||||
const currentVersion = await readCurrentVersion(projectType);
|
||||
|
||||
// 2. Calculate new version (reuse existing function!)
|
||||
const newVersion = calculateNewVersion(currentVersion, versionType);
|
||||
|
||||
@@ -65,15 +65,15 @@ export abstract class BaseFormatter {
|
||||
normalizedPath = './' + filepath;
|
||||
}
|
||||
|
||||
await plugins.smartfile.memory.toFs(content, normalizedPath);
|
||||
await plugins.smartfs.file(normalizedPath).encoding('utf8').write(content);
|
||||
}
|
||||
|
||||
protected async createFile(filepath: string, content: string): Promise<void> {
|
||||
await plugins.smartfile.memory.toFs(content, filepath);
|
||||
await plugins.smartfs.file(filepath).encoding('utf8').write(content);
|
||||
}
|
||||
|
||||
protected async deleteFile(filepath: string): Promise<void> {
|
||||
await plugins.smartfile.fs.remove(filepath);
|
||||
await plugins.smartfs.file(filepath).delete();
|
||||
}
|
||||
|
||||
protected async shouldProcessFile(filepath: string): Promise<boolean> {
|
||||
|
||||
@@ -25,7 +25,7 @@ export class ChangeCache {
|
||||
}
|
||||
|
||||
async initialize(): Promise<void> {
|
||||
await plugins.smartfile.fs.ensureDir(this.cacheDir);
|
||||
await plugins.smartfs.directory(this.cacheDir).recursive().create();
|
||||
}
|
||||
|
||||
async getManifest(): Promise<ICacheManifest> {
|
||||
@@ -35,13 +35,16 @@ export class ChangeCache {
|
||||
files: [],
|
||||
};
|
||||
|
||||
const exists = await plugins.smartfile.fs.fileExists(this.manifestPath);
|
||||
const exists = await plugins.smartfs.file(this.manifestPath).exists();
|
||||
if (!exists) {
|
||||
return defaultManifest;
|
||||
}
|
||||
|
||||
try {
|
||||
const content = plugins.smartfile.fs.toStringSync(this.manifestPath);
|
||||
const content = (await plugins.smartfs
|
||||
.file(this.manifestPath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const manifest = JSON.parse(content);
|
||||
|
||||
// Validate the manifest structure
|
||||
@@ -57,7 +60,7 @@ export class ChangeCache {
|
||||
);
|
||||
// Try to delete the corrupted file
|
||||
try {
|
||||
await plugins.smartfile.fs.remove(this.manifestPath);
|
||||
await plugins.smartfs.file(this.manifestPath).delete();
|
||||
} catch (removeError) {
|
||||
// Ignore removal errors
|
||||
}
|
||||
@@ -72,11 +75,14 @@ export class ChangeCache {
|
||||
}
|
||||
|
||||
// Ensure directory exists
|
||||
await plugins.smartfile.fs.ensureDir(this.cacheDir);
|
||||
await plugins.smartfs.directory(this.cacheDir).recursive().create();
|
||||
|
||||
// Write directly with proper JSON stringification
|
||||
const jsonContent = JSON.stringify(manifest, null, 2);
|
||||
await plugins.smartfile.memory.toFs(jsonContent, this.manifestPath);
|
||||
await plugins.smartfs
|
||||
.file(this.manifestPath)
|
||||
.encoding('utf8')
|
||||
.write(jsonContent);
|
||||
}
|
||||
|
||||
async hasFileChanged(filePath: string): Promise<boolean> {
|
||||
@@ -85,20 +91,23 @@ export class ChangeCache {
|
||||
: plugins.path.join(paths.cwd, filePath);
|
||||
|
||||
// Check if file exists
|
||||
const exists = await plugins.smartfile.fs.fileExists(absolutePath);
|
||||
const exists = await plugins.smartfs.file(absolutePath).exists();
|
||||
if (!exists) {
|
||||
return true; // File doesn't exist, so it's "changed" (will be created)
|
||||
}
|
||||
|
||||
// Get current file stats
|
||||
const stats = await plugins.smartfile.fs.stat(absolutePath);
|
||||
const stats = await plugins.smartfs.file(absolutePath).stat();
|
||||
|
||||
// Skip directories
|
||||
if (stats.isDirectory()) {
|
||||
if (stats.isDirectory) {
|
||||
return false; // Directories are not processed
|
||||
}
|
||||
|
||||
const content = plugins.smartfile.fs.toStringSync(absolutePath);
|
||||
const content = (await plugins.smartfs
|
||||
.file(absolutePath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const currentChecksum = this.calculateChecksum(content);
|
||||
|
||||
// Get cached info
|
||||
@@ -113,7 +122,7 @@ export class ChangeCache {
|
||||
return (
|
||||
cachedFile.checksum !== currentChecksum ||
|
||||
cachedFile.size !== stats.size ||
|
||||
cachedFile.modified !== stats.mtimeMs
|
||||
cachedFile.modified !== stats.mtime.getTime()
|
||||
);
|
||||
}
|
||||
|
||||
@@ -123,14 +132,17 @@ export class ChangeCache {
|
||||
: plugins.path.join(paths.cwd, filePath);
|
||||
|
||||
// Get current file stats
|
||||
const stats = await plugins.smartfile.fs.stat(absolutePath);
|
||||
const stats = await plugins.smartfs.file(absolutePath).stat();
|
||||
|
||||
// Skip directories
|
||||
if (stats.isDirectory()) {
|
||||
if (stats.isDirectory) {
|
||||
return; // Don't cache directories
|
||||
}
|
||||
|
||||
const content = plugins.smartfile.fs.toStringSync(absolutePath);
|
||||
const content = (await plugins.smartfs
|
||||
.file(absolutePath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const checksum = this.calculateChecksum(content);
|
||||
|
||||
// Update manifest
|
||||
@@ -140,7 +152,7 @@ export class ChangeCache {
|
||||
const cacheEntry: IFileCache = {
|
||||
path: filePath,
|
||||
checksum,
|
||||
modified: stats.mtimeMs,
|
||||
modified: stats.mtime.getTime(),
|
||||
size: stats.size,
|
||||
};
|
||||
|
||||
@@ -176,7 +188,7 @@ export class ChangeCache {
|
||||
? file.path
|
||||
: plugins.path.join(paths.cwd, file.path);
|
||||
|
||||
if (await plugins.smartfile.fs.fileExists(absolutePath)) {
|
||||
if (await plugins.smartfs.file(absolutePath).exists()) {
|
||||
validFiles.push(file);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,14 +21,15 @@ export class DiffReporter {
|
||||
}
|
||||
|
||||
try {
|
||||
const exists = await plugins.smartfile.fs.fileExists(change.path);
|
||||
const exists = await plugins.smartfs.file(change.path).exists();
|
||||
if (!exists) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const currentContent = await plugins.smartfile.fs.toStringSync(
|
||||
change.path,
|
||||
);
|
||||
const currentContent = (await plugins.smartfs
|
||||
.file(change.path)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
|
||||
// For planned changes, we need the new content
|
||||
if (!change.content) {
|
||||
@@ -107,10 +108,10 @@ export class DiffReporter {
|
||||
})),
|
||||
};
|
||||
|
||||
await plugins.smartfile.memory.toFs(
|
||||
JSON.stringify(report, null, 2),
|
||||
outputPath,
|
||||
);
|
||||
await plugins.smartfs
|
||||
.file(outputPath)
|
||||
.encoding('utf8')
|
||||
.write(JSON.stringify(report, null, 2));
|
||||
logger.log('info', `Diff report saved to ${outputPath}`);
|
||||
}
|
||||
|
||||
|
||||
@@ -192,10 +192,10 @@ export class FormatStats {
|
||||
moduleStats: Array.from(this.stats.moduleStats.values()),
|
||||
};
|
||||
|
||||
await plugins.smartfile.memory.toFs(
|
||||
JSON.stringify(report, null, 2),
|
||||
outputPath,
|
||||
);
|
||||
await plugins.smartfs
|
||||
.file(outputPath)
|
||||
.encoding('utf8')
|
||||
.write(JSON.stringify(report, null, 2));
|
||||
logger.log('info', `Statistics report saved to ${outputPath}`);
|
||||
}
|
||||
|
||||
|
||||
@@ -36,21 +36,27 @@ export class RollbackManager {
|
||||
: plugins.path.join(paths.cwd, filepath);
|
||||
|
||||
// Check if file exists
|
||||
const exists = await plugins.smartfile.fs.fileExists(absolutePath);
|
||||
const exists = await plugins.smartfs.file(absolutePath).exists();
|
||||
if (!exists) {
|
||||
// File doesn't exist yet (will be created), so we skip backup
|
||||
return;
|
||||
}
|
||||
|
||||
// Read file content and metadata
|
||||
const content = plugins.smartfile.fs.toStringSync(absolutePath);
|
||||
const stats = await plugins.smartfile.fs.stat(absolutePath);
|
||||
const content = (await plugins.smartfs
|
||||
.file(absolutePath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const stats = await plugins.smartfs.file(absolutePath).stat();
|
||||
const checksum = this.calculateChecksum(content);
|
||||
|
||||
// Create backup
|
||||
const backupPath = this.getBackupPath(operationId, filepath);
|
||||
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(backupPath));
|
||||
await plugins.smartfile.memory.toFs(content, backupPath);
|
||||
await plugins.smartfs
|
||||
.directory(plugins.path.dirname(backupPath))
|
||||
.recursive()
|
||||
.create();
|
||||
await plugins.smartfs.file(backupPath).encoding('utf8').write(content);
|
||||
|
||||
// Update operation
|
||||
operation.files.push({
|
||||
@@ -84,7 +90,10 @@ export class RollbackManager {
|
||||
|
||||
// Verify backup integrity
|
||||
const backupPath = this.getBackupPath(operationId, file.path);
|
||||
const backupContent = plugins.smartfile.fs.toStringSync(backupPath);
|
||||
const backupContent = await plugins.smartfs
|
||||
.file(backupPath)
|
||||
.encoding('utf8')
|
||||
.read();
|
||||
const backupChecksum = this.calculateChecksum(backupContent);
|
||||
|
||||
if (backupChecksum !== file.checksum) {
|
||||
@@ -92,7 +101,10 @@ export class RollbackManager {
|
||||
}
|
||||
|
||||
// Restore file
|
||||
await plugins.smartfile.memory.toFs(file.originalContent, absolutePath);
|
||||
await plugins.smartfs
|
||||
.file(absolutePath)
|
||||
.encoding('utf8')
|
||||
.write(file.originalContent);
|
||||
|
||||
// Restore permissions
|
||||
const mode = parseInt(file.permissions, 8);
|
||||
@@ -129,7 +141,7 @@ export class RollbackManager {
|
||||
'operations',
|
||||
operation.id,
|
||||
);
|
||||
await plugins.smartfile.fs.remove(operationDir);
|
||||
await plugins.smartfs.directory(operationDir).recursive().delete();
|
||||
|
||||
// Remove from manifest
|
||||
manifest.operations = manifest.operations.filter(
|
||||
@@ -148,13 +160,16 @@ export class RollbackManager {
|
||||
|
||||
for (const file of operation.files) {
|
||||
const backupPath = this.getBackupPath(operationId, file.path);
|
||||
const exists = await plugins.smartfile.fs.fileExists(backupPath);
|
||||
const exists = await plugins.smartfs.file(backupPath).exists();
|
||||
|
||||
if (!exists) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const content = plugins.smartfile.fs.toStringSync(backupPath);
|
||||
const content = await plugins.smartfs
|
||||
.file(backupPath)
|
||||
.encoding('utf8')
|
||||
.read();
|
||||
const checksum = this.calculateChecksum(content);
|
||||
|
||||
if (checksum !== file.checksum) {
|
||||
@@ -171,10 +186,11 @@ export class RollbackManager {
|
||||
}
|
||||
|
||||
private async ensureBackupDir(): Promise<void> {
|
||||
await plugins.smartfile.fs.ensureDir(this.backupDir);
|
||||
await plugins.smartfile.fs.ensureDir(
|
||||
plugins.path.join(this.backupDir, 'operations'),
|
||||
);
|
||||
await plugins.smartfs.directory(this.backupDir).recursive().create();
|
||||
await plugins.smartfs
|
||||
.directory(plugins.path.join(this.backupDir, 'operations'))
|
||||
.recursive()
|
||||
.create();
|
||||
}
|
||||
|
||||
private generateOperationId(): string {
|
||||
@@ -204,13 +220,16 @@ export class RollbackManager {
|
||||
private async getManifest(): Promise<{ operations: IFormatOperation[] }> {
|
||||
const defaultManifest = { operations: [] };
|
||||
|
||||
const exists = await plugins.smartfile.fs.fileExists(this.manifestPath);
|
||||
const exists = await plugins.smartfs.file(this.manifestPath).exists();
|
||||
if (!exists) {
|
||||
return defaultManifest;
|
||||
}
|
||||
|
||||
try {
|
||||
const content = plugins.smartfile.fs.toStringSync(this.manifestPath);
|
||||
const content = (await plugins.smartfs
|
||||
.file(this.manifestPath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const manifest = JSON.parse(content);
|
||||
|
||||
// Validate the manifest structure
|
||||
@@ -228,7 +247,7 @@ export class RollbackManager {
|
||||
);
|
||||
// Try to delete the corrupted file
|
||||
try {
|
||||
await plugins.smartfile.fs.remove(this.manifestPath);
|
||||
await plugins.smartfs.file(this.manifestPath).delete();
|
||||
} catch (removeError) {
|
||||
// Ignore removal errors
|
||||
}
|
||||
@@ -249,7 +268,10 @@ export class RollbackManager {
|
||||
|
||||
// Write directly with proper JSON stringification
|
||||
const jsonContent = JSON.stringify(manifest, null, 2);
|
||||
await plugins.smartfile.memory.toFs(jsonContent, this.manifestPath);
|
||||
await plugins.smartfs
|
||||
.file(this.manifestPath)
|
||||
.encoding('utf8')
|
||||
.write(jsonContent);
|
||||
}
|
||||
|
||||
private async getOperation(
|
||||
|
||||
@@ -13,12 +13,12 @@ const filesToDelete = [
|
||||
|
||||
export const run = async (projectArg: Project) => {
|
||||
for (const relativeFilePath of filesToDelete) {
|
||||
const fileExists = plugins.smartfile.fs.fileExistsSync(relativeFilePath);
|
||||
const fileExists = await plugins.smartfs.file(relativeFilePath).exists();
|
||||
if (fileExists) {
|
||||
logger.log('info', `Found ${relativeFilePath}! Removing it!`);
|
||||
plugins.smartfile.fs.removeSync(
|
||||
plugins.path.join(paths.cwd, relativeFilePath),
|
||||
);
|
||||
await plugins.smartfs
|
||||
.file(plugins.path.join(paths.cwd, relativeFilePath))
|
||||
.delete();
|
||||
} else {
|
||||
logger.log('info', `Project is free of ${relativeFilePath}`);
|
||||
}
|
||||
|
||||
@@ -24,7 +24,12 @@ export const run = async (projectArg: Project) => {
|
||||
|
||||
try {
|
||||
// Handle glob patterns
|
||||
const files = await plugins.smartfile.fs.listFileTree('.', pattern.from);
|
||||
const entries = await plugins.smartfs
|
||||
.directory('.')
|
||||
.recursive()
|
||||
.filter(pattern.from)
|
||||
.list();
|
||||
const files = entries.map((entry) => entry.path);
|
||||
|
||||
for (const file of files) {
|
||||
const sourcePath = file;
|
||||
@@ -46,10 +51,13 @@ export const run = async (projectArg: Project) => {
|
||||
}
|
||||
|
||||
// Ensure destination directory exists
|
||||
await plugins.smartfile.fs.ensureDir(plugins.path.dirname(destPath));
|
||||
await plugins.smartfs
|
||||
.directory(plugins.path.dirname(destPath))
|
||||
.recursive()
|
||||
.create();
|
||||
|
||||
// Copy file
|
||||
await plugins.smartfile.fs.copy(sourcePath, destPath);
|
||||
await plugins.smartfs.file(sourcePath).copy(destPath);
|
||||
logger.log('info', `Copied ${sourcePath} to ${destPath}`);
|
||||
}
|
||||
} catch (error) {
|
||||
|
||||
@@ -7,13 +7,15 @@ import { logger } from '../gitzone.logging.js';
|
||||
const gitignorePath = plugins.path.join(paths.cwd, './.gitignore');
|
||||
|
||||
export const run = async (projectArg: Project) => {
|
||||
const gitignoreExists = await plugins.smartfile.fs.fileExists(gitignorePath);
|
||||
const gitignoreExists = await plugins.smartfs.file(gitignorePath).exists();
|
||||
let customContent = '';
|
||||
|
||||
if (gitignoreExists) {
|
||||
// lets get the existing gitignore file
|
||||
const existingGitIgnoreString =
|
||||
plugins.smartfile.fs.toStringSync(gitignorePath);
|
||||
const existingGitIgnoreString = (await plugins.smartfs
|
||||
.file(gitignorePath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
|
||||
// Check for different custom section markers
|
||||
const customMarkers = ['#------# custom', '# custom'];
|
||||
@@ -34,12 +36,17 @@ export const run = async (projectArg: Project) => {
|
||||
|
||||
// Append the custom content if it exists
|
||||
if (customContent) {
|
||||
const newGitignoreContent =
|
||||
plugins.smartfile.fs.toStringSync(gitignorePath);
|
||||
const newGitignoreContent = (await plugins.smartfs
|
||||
.file(gitignorePath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
// The template already ends with "#------# custom", so just append the content
|
||||
const finalContent =
|
||||
newGitignoreContent.trimEnd() + '\n' + customContent + '\n';
|
||||
await plugins.smartfile.fs.toFs(finalContent, gitignorePath);
|
||||
await plugins.smartfs
|
||||
.file(gitignorePath)
|
||||
.encoding('utf8')
|
||||
.write(finalContent);
|
||||
logger.log('info', 'Updated .gitignore while preserving custom section!');
|
||||
} else {
|
||||
logger.log('info', 'Added a .gitignore!');
|
||||
|
||||
@@ -7,9 +7,9 @@ import { logger } from '../gitzone.logging.js';
|
||||
const incompatibleLicenses: string[] = ['AGPL', 'GPL', 'SSPL'];
|
||||
|
||||
export const run = async (projectArg: Project) => {
|
||||
const nodeModulesInstalled = await plugins.smartfile.fs.isDirectory(
|
||||
plugins.path.join(paths.cwd, 'node_modules'),
|
||||
);
|
||||
const nodeModulesInstalled = await plugins.smartfs
|
||||
.directory(plugins.path.join(paths.cwd, 'node_modules'))
|
||||
.exists();
|
||||
if (!nodeModulesInstalled) {
|
||||
logger.log('warn', 'No node_modules found. Skipping license check');
|
||||
return;
|
||||
|
||||
@@ -174,9 +174,11 @@ export const run = async (projectArg: Project) => {
|
||||
);
|
||||
|
||||
// set overrides
|
||||
const overrides = plugins.smartfile.fs.toObjectSync(
|
||||
plugins.path.join(paths.assetsDir, 'overrides.json'),
|
||||
);
|
||||
const overridesContent = (await plugins.smartfs
|
||||
.file(plugins.path.join(paths.assetsDir, 'overrides.json'))
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const overrides = JSON.parse(overridesContent);
|
||||
packageJson.pnpm = packageJson.pnpm || {};
|
||||
packageJson.pnpm.overrides = overrides;
|
||||
|
||||
|
||||
@@ -6,25 +6,22 @@ export const run = async () => {
|
||||
const readmeHintsPath = plugins.path.join(paths.cwd, 'readme.hints.md');
|
||||
|
||||
// Check and initialize readme.md if it doesn't exist
|
||||
const readmeExists = await plugins.smartfile.fs.fileExists(readmePath);
|
||||
const readmeExists = await plugins.smartfs.file(readmePath).exists();
|
||||
if (!readmeExists) {
|
||||
await plugins.smartfile.fs.toFs(
|
||||
'# Project Readme\n\nThis is the initial readme file.',
|
||||
readmePath,
|
||||
);
|
||||
await plugins.smartfs.file(readmePath)
|
||||
.encoding('utf8')
|
||||
.write('# Project Readme\n\nThis is the initial readme file.');
|
||||
console.log('Initialized readme.md');
|
||||
} else {
|
||||
console.log('readme.md already exists');
|
||||
}
|
||||
|
||||
// Check and initialize readme.hints.md if it doesn't exist
|
||||
const readmeHintsExists =
|
||||
await plugins.smartfile.fs.fileExists(readmeHintsPath);
|
||||
const readmeHintsExists = await plugins.smartfs.file(readmeHintsPath).exists();
|
||||
if (!readmeHintsExists) {
|
||||
await plugins.smartfile.fs.toFs(
|
||||
'# Project Readme Hints\n\nThis is the initial readme hints file.',
|
||||
readmeHintsPath,
|
||||
);
|
||||
await plugins.smartfs.file(readmeHintsPath)
|
||||
.encoding('utf8')
|
||||
.write('# Project Readme Hints\n\nThis is the initial readme hints file.');
|
||||
console.log('Initialized readme.hints.md');
|
||||
} else {
|
||||
console.log('readme.hints.md already exists');
|
||||
|
||||
@@ -7,10 +7,11 @@ import { Project } from '../classes.project.js';
|
||||
export const run = async (projectArg: Project) => {
|
||||
// lets care about tsconfig.json
|
||||
logger.log('info', 'Formatting tsconfig.json...');
|
||||
const tsconfigSmartfile = await plugins.smartfile.SmartFile.fromFilePath(
|
||||
const factory = plugins.smartfile.SmartFileFactory.nodeFs();
|
||||
const tsconfigSmartfile = await factory.fromFilePath(
|
||||
plugins.path.join(paths.cwd, 'tsconfig.json'),
|
||||
);
|
||||
const tsconfigObject = JSON.parse(tsconfigSmartfile.contentBuffer.toString());
|
||||
const tsconfigObject = JSON.parse(tsconfigSmartfile.parseContentAsString());
|
||||
tsconfigObject.compilerOptions = tsconfigObject.compilerOptions || {};
|
||||
tsconfigObject.compilerOptions.baseUrl = '.';
|
||||
tsconfigObject.compilerOptions.paths = {};
|
||||
@@ -23,8 +24,8 @@ export const run = async (projectArg: Project) => {
|
||||
`./${publishModule}/index.js`,
|
||||
];
|
||||
}
|
||||
tsconfigSmartfile.setContentsFromString(
|
||||
JSON.stringify(tsconfigObject, null, 2),
|
||||
);
|
||||
await tsconfigSmartfile.editContentAsString(async () => {
|
||||
return JSON.stringify(tsconfigObject, null, 2);
|
||||
});
|
||||
await tsconfigSmartfile.write();
|
||||
};
|
||||
|
||||
@@ -20,7 +20,7 @@ export class CleanupFormatter extends BaseFormatter {
|
||||
];
|
||||
|
||||
for (const file of filesToRemove) {
|
||||
const exists = await plugins.smartfile.fs.fileExists(file);
|
||||
const exists = await plugins.smartfs.file(file).exists();
|
||||
if (exists) {
|
||||
changes.push({
|
||||
type: 'delete',
|
||||
|
||||
@@ -41,16 +41,23 @@ export class PrettierFormatter extends BaseFormatter {
|
||||
// Add files from TypeScript directories
|
||||
for (const dir of includeDirs) {
|
||||
const globPattern = `${dir}/**/*.${extensions}`;
|
||||
const dirFiles = await plugins.smartfile.fs.listFileTree(
|
||||
'.',
|
||||
globPattern,
|
||||
);
|
||||
const dirEntries = await plugins.smartfs
|
||||
.directory('.')
|
||||
.recursive()
|
||||
.filter(globPattern)
|
||||
.list();
|
||||
const dirFiles = dirEntries.map((entry) => entry.path);
|
||||
allFiles.push(...dirFiles);
|
||||
}
|
||||
|
||||
// Add root config files
|
||||
for (const pattern of rootConfigFiles) {
|
||||
const rootFiles = await plugins.smartfile.fs.listFileTree('.', pattern);
|
||||
const rootEntries = await plugins.smartfs
|
||||
.directory('.')
|
||||
.recursive()
|
||||
.filter(pattern)
|
||||
.list();
|
||||
const rootFiles = rootEntries.map((entry) => entry.path);
|
||||
// Only include files at root level (no slashes in path)
|
||||
const rootLevelFiles = rootFiles.filter((f) => !f.includes('/'));
|
||||
allFiles.push(...rootLevelFiles);
|
||||
@@ -66,8 +73,8 @@ export class PrettierFormatter extends BaseFormatter {
|
||||
const validFiles: string[] = [];
|
||||
for (const file of files) {
|
||||
try {
|
||||
const stats = await plugins.smartfile.fs.stat(file);
|
||||
if (!stats.isDirectory()) {
|
||||
const stats = await plugins.smartfs.file(file).stat();
|
||||
if (!stats.isDirectory) {
|
||||
validFiles.push(file);
|
||||
}
|
||||
} catch (error) {
|
||||
@@ -148,7 +155,10 @@ export class PrettierFormatter extends BaseFormatter {
|
||||
}
|
||||
|
||||
// Read current content
|
||||
const content = plugins.smartfile.fs.toStringSync(change.path);
|
||||
const content = (await plugins.smartfs
|
||||
.file(change.path)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
|
||||
// Format with prettier
|
||||
const prettier = await import('prettier');
|
||||
|
||||
@@ -101,7 +101,12 @@ export let run = async (
|
||||
// Plan phase
|
||||
logger.log('info', 'Analyzing project for format operations...');
|
||||
let plan = options.fromPlan
|
||||
? JSON.parse(await plugins.smartfile.fs.toStringSync(options.fromPlan))
|
||||
? JSON.parse(
|
||||
(await plugins.smartfs
|
||||
.file(options.fromPlan)
|
||||
.encoding('utf8')
|
||||
.read()) as string,
|
||||
)
|
||||
: await planner.planFormat(activeFormatters);
|
||||
|
||||
// Display plan
|
||||
@@ -109,10 +114,10 @@ export let run = async (
|
||||
|
||||
// Save plan if requested
|
||||
if (options.savePlan) {
|
||||
await plugins.smartfile.memory.toFs(
|
||||
JSON.stringify(plan, null, 2),
|
||||
options.savePlan,
|
||||
);
|
||||
await plugins.smartfs
|
||||
.file(options.savePlan)
|
||||
.encoding('utf8')
|
||||
.write(JSON.stringify(plan, null, 2));
|
||||
logger.log('info', `Plan saved to ${options.savePlan}`);
|
||||
}
|
||||
|
||||
|
||||
@@ -48,15 +48,17 @@ export class Meta {
|
||||
public async readDirectory() {
|
||||
await this.syncToRemote(true);
|
||||
logger.log('info', `reading directory`);
|
||||
const metaFileExists = plugins.smartfile.fs.fileExistsSync(
|
||||
this.filePaths.metaJson,
|
||||
);
|
||||
const metaFileExists = await plugins.smartfs
|
||||
.file(this.filePaths.metaJson)
|
||||
.exists();
|
||||
if (!metaFileExists) {
|
||||
throw new Error(`meta file does not exist at ${this.filePaths.metaJson}`);
|
||||
}
|
||||
this.metaRepoData = plugins.smartfile.fs.toObjectSync(
|
||||
this.filePaths.metaJson,
|
||||
);
|
||||
const content = (await plugins.smartfs
|
||||
.file(this.filePaths.metaJson)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
this.metaRepoData = JSON.parse(content);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -78,15 +80,15 @@ export class Meta {
|
||||
*/
|
||||
public async writeToDisk() {
|
||||
// write .meta.json to disk
|
||||
plugins.smartfile.memory.toFsSync(
|
||||
JSON.stringify(this.metaRepoData, null, 2),
|
||||
this.filePaths.metaJson,
|
||||
);
|
||||
await plugins.smartfs
|
||||
.file(this.filePaths.metaJson)
|
||||
.encoding('utf8')
|
||||
.write(JSON.stringify(this.metaRepoData, null, 2));
|
||||
// write .gitignore to disk
|
||||
plugins.smartfile.memory.toFsSync(
|
||||
await this.generateGitignore(),
|
||||
this.filePaths.gitIgnore,
|
||||
);
|
||||
await plugins.smartfs
|
||||
.file(this.filePaths.gitIgnore)
|
||||
.encoding('utf8')
|
||||
.write(await this.generateGitignore());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -112,10 +114,25 @@ export class Meta {
|
||||
*/
|
||||
public async updateLocalRepos() {
|
||||
await this.syncToRemote();
|
||||
const projects = plugins.smartfile.fs.toObjectSync(
|
||||
this.filePaths.metaJson,
|
||||
).projects;
|
||||
const preExistingFolders = plugins.smartfile.fs.listFoldersSync(this.cwd);
|
||||
const metaContent = (await plugins.smartfs
|
||||
.file(this.filePaths.metaJson)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const projects = JSON.parse(metaContent).projects;
|
||||
const entries = await plugins.smartfs.directory(this.cwd).list();
|
||||
const preExistingFolders: string[] = [];
|
||||
for (const entry of entries) {
|
||||
try {
|
||||
const stats = await plugins.smartfs
|
||||
.file(plugins.path.join(this.cwd, entry.path))
|
||||
.stat();
|
||||
if (stats.isDirectory) {
|
||||
preExistingFolders.push(entry.name);
|
||||
}
|
||||
} catch {
|
||||
// Skip entries that can't be accessed
|
||||
}
|
||||
}
|
||||
for (const preExistingFolderArg of preExistingFolders) {
|
||||
if (
|
||||
preExistingFolderArg !== '.git' &&
|
||||
@@ -143,9 +160,17 @@ export class Meta {
|
||||
await this.sortMetaRepoData();
|
||||
const missingRepos: string[] = [];
|
||||
for (const key of Object.keys(this.metaRepoData.projects)) {
|
||||
plugins.smartfile.fs.isDirectory(key)
|
||||
? logger.log('ok', `${key} -> is already cloned`)
|
||||
: missingRepos.push(key);
|
||||
const fullPath = plugins.path.join(this.cwd, key);
|
||||
try {
|
||||
const stats = await plugins.smartfs.file(fullPath).stat();
|
||||
if (stats.isDirectory) {
|
||||
logger.log('ok', `${key} -> is already cloned`);
|
||||
} else {
|
||||
missingRepos.push(key);
|
||||
}
|
||||
} catch {
|
||||
missingRepos.push(key);
|
||||
}
|
||||
}
|
||||
|
||||
logger.log('info', `found ${missingRepos.length} missing repos`);
|
||||
@@ -165,7 +190,20 @@ export class Meta {
|
||||
await this.syncToRemote();
|
||||
|
||||
// go recursive
|
||||
const folders = await plugins.smartfile.fs.listFolders(this.cwd);
|
||||
const listEntries = await plugins.smartfs.directory(this.cwd).list();
|
||||
const folders: string[] = [];
|
||||
for (const entry of listEntries) {
|
||||
try {
|
||||
const stats = await plugins.smartfs
|
||||
.file(plugins.path.join(this.cwd, entry.path))
|
||||
.stat();
|
||||
if (stats.isDirectory) {
|
||||
folders.push(entry.name);
|
||||
}
|
||||
} catch {
|
||||
// Skip entries that can't be accessed
|
||||
}
|
||||
}
|
||||
const childMetaRepositories: string[] = [];
|
||||
for (const folder of folders) {
|
||||
logger.log('info', folder);
|
||||
@@ -180,26 +218,30 @@ export class Meta {
|
||||
*/
|
||||
public async initProject() {
|
||||
await this.syncToRemote(true);
|
||||
const fileExists = await plugins.smartfile.fs.fileExists(
|
||||
this.filePaths.metaJson,
|
||||
);
|
||||
const fileExists = await plugins.smartfs
|
||||
.file(this.filePaths.metaJson)
|
||||
.exists();
|
||||
if (!fileExists) {
|
||||
await plugins.smartfile.memory.toFs(
|
||||
await plugins.smartfs
|
||||
.file(this.filePaths.metaJson)
|
||||
.encoding('utf8')
|
||||
.write(
|
||||
JSON.stringify({
|
||||
projects: {},
|
||||
}),
|
||||
this.filePaths.metaJson,
|
||||
);
|
||||
logger.log(
|
||||
`success`,
|
||||
`created a new .meta.json in directory ${this.cwd}`,
|
||||
);
|
||||
await plugins.smartfile.memory.toFs(
|
||||
await plugins.smartfs
|
||||
.file(this.filePaths.packageJson)
|
||||
.encoding('utf8')
|
||||
.write(
|
||||
JSON.stringify({
|
||||
name: this.dirName,
|
||||
version: '1.0.0',
|
||||
}),
|
||||
this.filePaths.packageJson,
|
||||
);
|
||||
logger.log(
|
||||
`success`,
|
||||
@@ -264,9 +306,10 @@ export class Meta {
|
||||
await this.writeToDisk();
|
||||
|
||||
logger.log('info', 'removing directory from cwd');
|
||||
await plugins.smartfile.fs.remove(
|
||||
plugins.path.join(paths.cwd, projectNameArg),
|
||||
);
|
||||
await plugins.smartfs
|
||||
.directory(plugins.path.join(paths.cwd, projectNameArg))
|
||||
.recursive()
|
||||
.delete();
|
||||
await this.updateLocalRepos();
|
||||
}
|
||||
}
|
||||
|
||||
190
ts/mod_services/classes.globalregistry.ts
Normal file
190
ts/mod_services/classes.globalregistry.ts
Normal file
@@ -0,0 +1,190 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
import { DockerContainer } from './classes.dockercontainer.js';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
|
||||
export interface IRegisteredProject {
|
||||
projectPath: string;
|
||||
projectName: string;
|
||||
containers: {
|
||||
mongo?: string;
|
||||
minio?: string;
|
||||
elasticsearch?: string;
|
||||
};
|
||||
ports: {
|
||||
mongo?: number;
|
||||
s3?: number;
|
||||
s3Console?: number;
|
||||
elasticsearch?: number;
|
||||
};
|
||||
enabledServices: string[];
|
||||
lastActive: number;
|
||||
}
|
||||
|
||||
export interface IGlobalRegistryData {
|
||||
projects: { [projectPath: string]: IRegisteredProject };
|
||||
}
|
||||
|
||||
export class GlobalRegistry {
|
||||
private static instance: GlobalRegistry | null = null;
|
||||
private kvStore: plugins.npmextra.KeyValueStore<IGlobalRegistryData>;
|
||||
private docker: DockerContainer;
|
||||
|
||||
private constructor() {
|
||||
this.kvStore = new plugins.npmextra.KeyValueStore({
|
||||
typeArg: 'userHomeDir',
|
||||
identityArg: 'gitzone-services',
|
||||
});
|
||||
this.docker = new DockerContainer();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the singleton instance
|
||||
*/
|
||||
public static getInstance(): GlobalRegistry {
|
||||
if (!GlobalRegistry.instance) {
|
||||
GlobalRegistry.instance = new GlobalRegistry();
|
||||
}
|
||||
return GlobalRegistry.instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register or update a project in the global registry
|
||||
*/
|
||||
public async registerProject(data: Omit<IRegisteredProject, 'lastActive'>): Promise<void> {
|
||||
const allData = await this.kvStore.readAll();
|
||||
const projects = allData.projects || {};
|
||||
|
||||
projects[data.projectPath] = {
|
||||
...data,
|
||||
lastActive: Date.now(),
|
||||
};
|
||||
|
||||
await this.kvStore.writeKey('projects', projects);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a project from the registry
|
||||
*/
|
||||
public async unregisterProject(projectPath: string): Promise<void> {
|
||||
const allData = await this.kvStore.readAll();
|
||||
const projects = allData.projects || {};
|
||||
|
||||
if (projects[projectPath]) {
|
||||
delete projects[projectPath];
|
||||
await this.kvStore.writeKey('projects', projects);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the lastActive timestamp for a project
|
||||
*/
|
||||
public async touchProject(projectPath: string): Promise<void> {
|
||||
const allData = await this.kvStore.readAll();
|
||||
const projects = allData.projects || {};
|
||||
|
||||
if (projects[projectPath]) {
|
||||
projects[projectPath].lastActive = Date.now();
|
||||
await this.kvStore.writeKey('projects', projects);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all registered projects
|
||||
*/
|
||||
public async getAllProjects(): Promise<{ [path: string]: IRegisteredProject }> {
|
||||
const allData = await this.kvStore.readAll();
|
||||
return allData.projects || {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a project is registered
|
||||
*/
|
||||
public async isRegistered(projectPath: string): Promise<boolean> {
|
||||
const projects = await this.getAllProjects();
|
||||
return !!projects[projectPath];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get status of all containers across all registered projects
|
||||
*/
|
||||
public async getGlobalStatus(): Promise<
|
||||
Array<{
|
||||
projectPath: string;
|
||||
projectName: string;
|
||||
containers: Array<{ name: string; status: string }>;
|
||||
lastActive: number;
|
||||
}>
|
||||
> {
|
||||
const projects = await this.getAllProjects();
|
||||
const result: Array<{
|
||||
projectPath: string;
|
||||
projectName: string;
|
||||
containers: Array<{ name: string; status: string }>;
|
||||
lastActive: number;
|
||||
}> = [];
|
||||
|
||||
for (const [path, project] of Object.entries(projects)) {
|
||||
const containerStatuses: Array<{ name: string; status: string }> = [];
|
||||
|
||||
for (const containerName of Object.values(project.containers)) {
|
||||
if (containerName) {
|
||||
const status = await this.docker.getStatus(containerName);
|
||||
containerStatuses.push({ name: containerName, status });
|
||||
}
|
||||
}
|
||||
|
||||
result.push({
|
||||
projectPath: path,
|
||||
projectName: project.projectName,
|
||||
containers: containerStatuses,
|
||||
lastActive: project.lastActive,
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop all containers across all registered projects
|
||||
*/
|
||||
public async stopAll(): Promise<{ stopped: string[]; failed: string[] }> {
|
||||
const projects = await this.getAllProjects();
|
||||
const stopped: string[] = [];
|
||||
const failed: string[] = [];
|
||||
|
||||
for (const project of Object.values(projects)) {
|
||||
for (const containerName of Object.values(project.containers)) {
|
||||
if (containerName) {
|
||||
const status = await this.docker.getStatus(containerName);
|
||||
if (status === 'running') {
|
||||
if (await this.docker.stop(containerName)) {
|
||||
stopped.push(containerName);
|
||||
} else {
|
||||
failed.push(containerName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return { stopped, failed };
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove stale registry entries (projects that no longer exist on disk)
|
||||
*/
|
||||
public async cleanup(): Promise<string[]> {
|
||||
const projects = await this.getAllProjects();
|
||||
const removed: string[] = [];
|
||||
|
||||
for (const projectPath of Object.keys(projects)) {
|
||||
const exists = await plugins.smartfs.directory(projectPath).exists();
|
||||
if (!exists) {
|
||||
await this.unregisterProject(projectPath);
|
||||
removed.push(projectPath);
|
||||
}
|
||||
}
|
||||
|
||||
return removed;
|
||||
}
|
||||
}
|
||||
@@ -19,6 +19,11 @@ export interface IServiceConfig {
|
||||
S3_BUCKET: string;
|
||||
S3_ENDPOINT: string;
|
||||
S3_USESSL: boolean;
|
||||
ELASTICSEARCH_HOST: string;
|
||||
ELASTICSEARCH_PORT: string;
|
||||
ELASTICSEARCH_USER: string;
|
||||
ELASTICSEARCH_PASS: string;
|
||||
ELASTICSEARCH_URL: string;
|
||||
}
|
||||
|
||||
export class ServiceConfiguration {
|
||||
@@ -61,10 +66,10 @@ export class ServiceConfiguration {
|
||||
* Save the configuration to file
|
||||
*/
|
||||
public async saveConfig(): Promise<void> {
|
||||
await plugins.smartfile.memory.toFs(
|
||||
JSON.stringify(this.config, null, 2),
|
||||
this.configPath
|
||||
);
|
||||
await plugins.smartfs
|
||||
.file(this.configPath)
|
||||
.encoding('utf8')
|
||||
.write(JSON.stringify(this.config, null, 2));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -72,21 +77,24 @@ export class ServiceConfiguration {
|
||||
*/
|
||||
private async ensureNogitDirectory(): Promise<void> {
|
||||
const nogitPath = plugins.path.join(process.cwd(), '.nogit');
|
||||
await plugins.smartfile.fs.ensureDir(nogitPath);
|
||||
await plugins.smartfs.directory(nogitPath).recursive().create();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if configuration file exists
|
||||
*/
|
||||
private async configExists(): Promise<boolean> {
|
||||
return plugins.smartfile.fs.fileExists(this.configPath);
|
||||
return plugins.smartfs.file(this.configPath).exists();
|
||||
}
|
||||
|
||||
/**
|
||||
* Load configuration from file
|
||||
*/
|
||||
private async loadConfig(): Promise<void> {
|
||||
const configContent = plugins.smartfile.fs.toStringSync(this.configPath);
|
||||
const configContent = (await plugins.smartfs
|
||||
.file(this.configPath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
this.config = JSON.parse(configContent);
|
||||
}
|
||||
|
||||
@@ -94,7 +102,7 @@ export class ServiceConfiguration {
|
||||
* Create default configuration
|
||||
*/
|
||||
private async createDefaultConfig(): Promise<void> {
|
||||
const projectName = helpers.getProjectName();
|
||||
const projectName = await helpers.getProjectName();
|
||||
const mongoPort = await helpers.getRandomAvailablePort();
|
||||
const s3Port = await helpers.getRandomAvailablePort();
|
||||
let s3ConsolePort = s3Port + 1;
|
||||
@@ -111,6 +119,10 @@ export class ServiceConfiguration {
|
||||
const mongoPortStr = mongoPort.toString();
|
||||
const s3Host = 'localhost';
|
||||
const s3PortStr = s3Port.toString();
|
||||
const esHost = 'localhost';
|
||||
const esPort = '9200';
|
||||
const esUser = 'elastic';
|
||||
const esPass = 'elastic';
|
||||
|
||||
this.config = {
|
||||
PROJECT_NAME: projectName,
|
||||
@@ -127,7 +139,12 @@ export class ServiceConfiguration {
|
||||
S3_SECRETKEY: 'defaultpass',
|
||||
S3_BUCKET: `${projectName}-documents`,
|
||||
S3_ENDPOINT: s3Host,
|
||||
S3_USESSL: false
|
||||
S3_USESSL: false,
|
||||
ELASTICSEARCH_HOST: esHost,
|
||||
ELASTICSEARCH_PORT: esPort,
|
||||
ELASTICSEARCH_USER: esUser,
|
||||
ELASTICSEARCH_PASS: esPass,
|
||||
ELASTICSEARCH_URL: `http://${esUser}:${esPass}@${esHost}:${esPort}`
|
||||
};
|
||||
|
||||
await this.saveConfig();
|
||||
@@ -136,13 +153,14 @@ export class ServiceConfiguration {
|
||||
logger.log('info', `📍 MongoDB port: ${mongoPort}`);
|
||||
logger.log('info', `📍 S3 API port: ${s3Port}`);
|
||||
logger.log('info', `📍 S3 Console port: ${s3ConsolePort}`);
|
||||
logger.log('info', `📍 Elasticsearch port: ${esPort}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Update missing fields in existing configuration
|
||||
*/
|
||||
private async updateMissingFields(): Promise<void> {
|
||||
const projectName = helpers.getProjectName();
|
||||
const projectName = await helpers.getProjectName();
|
||||
let updated = false;
|
||||
const fieldsAdded: string[] = [];
|
||||
|
||||
@@ -250,6 +268,38 @@ export class ServiceConfiguration {
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.ELASTICSEARCH_HOST) {
|
||||
this.config.ELASTICSEARCH_HOST = 'localhost';
|
||||
fieldsAdded.push('ELASTICSEARCH_HOST');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.ELASTICSEARCH_PORT) {
|
||||
this.config.ELASTICSEARCH_PORT = '9200';
|
||||
fieldsAdded.push('ELASTICSEARCH_PORT');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.ELASTICSEARCH_USER) {
|
||||
this.config.ELASTICSEARCH_USER = 'elastic';
|
||||
fieldsAdded.push('ELASTICSEARCH_USER');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (!this.config.ELASTICSEARCH_PASS) {
|
||||
this.config.ELASTICSEARCH_PASS = 'elastic';
|
||||
fieldsAdded.push('ELASTICSEARCH_PASS');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
// Always update ELASTICSEARCH_URL based on current settings
|
||||
const oldEsUrl = this.config.ELASTICSEARCH_URL;
|
||||
this.config.ELASTICSEARCH_URL = `http://${this.config.ELASTICSEARCH_USER}:${this.config.ELASTICSEARCH_PASS}@${this.config.ELASTICSEARCH_HOST}:${this.config.ELASTICSEARCH_PORT}`;
|
||||
if (oldEsUrl !== this.config.ELASTICSEARCH_URL) {
|
||||
fieldsAdded.push('ELASTICSEARCH_URL');
|
||||
updated = true;
|
||||
}
|
||||
|
||||
if (updated) {
|
||||
await this.saveConfig();
|
||||
logger.log('ok', `✅ Added missing fields: ${fieldsAdded.join(', ')}`);
|
||||
@@ -272,7 +322,8 @@ export class ServiceConfiguration {
|
||||
public getContainerNames() {
|
||||
return {
|
||||
mongo: `${this.config.PROJECT_NAME}-mongodb`,
|
||||
minio: `${this.config.PROJECT_NAME}-minio`
|
||||
minio: `${this.config.PROJECT_NAME}-minio`,
|
||||
elasticsearch: `${this.config.PROJECT_NAME}-elasticsearch`
|
||||
};
|
||||
}
|
||||
|
||||
@@ -282,7 +333,8 @@ export class ServiceConfiguration {
|
||||
public getDataDirectories() {
|
||||
return {
|
||||
mongo: plugins.path.join(process.cwd(), '.nogit', 'mongodata'),
|
||||
minio: plugins.path.join(process.cwd(), '.nogit', 'miniodata')
|
||||
minio: plugins.path.join(process.cwd(), '.nogit', 'miniodata'),
|
||||
elasticsearch: plugins.path.join(process.cwd(), '.nogit', 'esdata')
|
||||
};
|
||||
}
|
||||
|
||||
@@ -331,10 +383,25 @@ export class ServiceConfiguration {
|
||||
}
|
||||
}
|
||||
|
||||
// Check Elasticsearch container
|
||||
const esStatus = await this.docker.getStatus(containers.elasticsearch);
|
||||
if (esStatus !== 'not_exists') {
|
||||
const portMappings = await this.docker.getPortMappings(containers.elasticsearch);
|
||||
if (portMappings && portMappings['9200']) {
|
||||
const dockerPort = portMappings['9200'];
|
||||
if (this.config.ELASTICSEARCH_PORT !== dockerPort) {
|
||||
logger.log('note', `📍 Syncing Elasticsearch port from Docker: ${dockerPort}`);
|
||||
this.config.ELASTICSEARCH_PORT = dockerPort;
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (updated) {
|
||||
// Update derived fields
|
||||
this.config.MONGODB_URL = `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${this.config.MONGODB_HOST}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
||||
this.config.S3_ENDPOINT = this.config.S3_HOST;
|
||||
this.config.ELASTICSEARCH_URL = `http://${this.config.ELASTICSEARCH_USER}:${this.config.ELASTICSEARCH_PASS}@${this.config.ELASTICSEARCH_HOST}:${this.config.ELASTICSEARCH_PORT}`;
|
||||
|
||||
await this.saveConfig();
|
||||
logger.log('ok', '✅ Configuration synced with Docker containers');
|
||||
@@ -351,6 +418,7 @@ export class ServiceConfiguration {
|
||||
// Check if containers exist - if they do, ports are fine
|
||||
const mongoExists = await this.docker.exists(containers.mongo);
|
||||
const minioExists = await this.docker.exists(containers.minio);
|
||||
const esExists = await this.docker.exists(containers.elasticsearch);
|
||||
|
||||
// Only check port availability if containers don't exist
|
||||
if (!mongoExists) {
|
||||
@@ -388,10 +456,22 @@ export class ServiceConfiguration {
|
||||
}
|
||||
}
|
||||
|
||||
if (!esExists) {
|
||||
const esPort = parseInt(this.config.ELASTICSEARCH_PORT);
|
||||
if (!(await helpers.isPortAvailable(esPort))) {
|
||||
logger.log('note', `⚠️ Elasticsearch port ${esPort} is in use, finding new port...`);
|
||||
const newPort = await helpers.getRandomAvailablePort();
|
||||
this.config.ELASTICSEARCH_PORT = newPort.toString();
|
||||
logger.log('ok', `✅ New Elasticsearch port: ${newPort}`);
|
||||
updated = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (updated) {
|
||||
// Update derived fields
|
||||
this.config.MONGODB_URL = `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${this.config.MONGODB_HOST}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
||||
this.config.S3_ENDPOINT = this.config.S3_HOST;
|
||||
this.config.ELASTICSEARCH_URL = `http://${this.config.ELASTICSEARCH_USER}:${this.config.ELASTICSEARCH_PASS}@${this.config.ELASTICSEARCH_HOST}:${this.config.ELASTICSEARCH_PORT}`;
|
||||
|
||||
await this.saveConfig();
|
||||
}
|
||||
@@ -414,13 +494,18 @@ export class ServiceConfiguration {
|
||||
s3ConsolePort++;
|
||||
}
|
||||
|
||||
// Elasticsearch uses standard port 9200
|
||||
const esPort = '9200';
|
||||
|
||||
this.config.MONGODB_PORT = mongoPort.toString();
|
||||
this.config.S3_PORT = s3Port.toString();
|
||||
this.config.S3_CONSOLE_PORT = s3ConsolePort.toString();
|
||||
this.config.ELASTICSEARCH_PORT = esPort;
|
||||
|
||||
// Update derived fields
|
||||
this.config.MONGODB_URL = `mongodb://${this.config.MONGODB_USER}:${this.config.MONGODB_PASS}@${this.config.MONGODB_HOST}:${this.config.MONGODB_PORT}/${this.config.MONGODB_NAME}?authSource=admin`;
|
||||
this.config.S3_ENDPOINT = this.config.S3_HOST;
|
||||
this.config.ELASTICSEARCH_URL = `http://${this.config.ELASTICSEARCH_USER}:${this.config.ELASTICSEARCH_PASS}@${this.config.ELASTICSEARCH_HOST}:${this.config.ELASTICSEARCH_PORT}`;
|
||||
|
||||
await this.saveConfig();
|
||||
|
||||
@@ -428,5 +513,6 @@ export class ServiceConfiguration {
|
||||
logger.log('info', ` 📍 MongoDB: ${mongoPort}`);
|
||||
logger.log('info', ` 📍 S3 API: ${s3Port}`);
|
||||
logger.log('info', ` 📍 S3 Console: ${s3ConsolePort}`);
|
||||
logger.log('info', ` 📍 Elasticsearch: ${esPort}`);
|
||||
}
|
||||
}
|
||||
@@ -2,15 +2,19 @@ import * as plugins from './mod.plugins.js';
|
||||
import * as helpers from './helpers.js';
|
||||
import { ServiceConfiguration } from './classes.serviceconfiguration.js';
|
||||
import { DockerContainer } from './classes.dockercontainer.js';
|
||||
import { GlobalRegistry } from './classes.globalregistry.js';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
|
||||
export class ServiceManager {
|
||||
private config: ServiceConfiguration;
|
||||
private docker: DockerContainer;
|
||||
private enabledServices: string[] | null = null;
|
||||
private globalRegistry: GlobalRegistry;
|
||||
|
||||
constructor() {
|
||||
this.config = new ServiceConfiguration();
|
||||
this.docker = new DockerContainer();
|
||||
this.globalRegistry = GlobalRegistry.getInstance();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -27,10 +31,157 @@ export class ServiceManager {
|
||||
await this.config.loadOrCreate();
|
||||
logger.log('info', `📋 Project: ${this.config.getConfig().PROJECT_NAME}`);
|
||||
|
||||
// Load service selection from npmextra.json
|
||||
await this.loadServiceConfiguration();
|
||||
|
||||
// Validate and update ports if needed
|
||||
await this.config.validateAndUpdatePorts();
|
||||
}
|
||||
|
||||
/**
|
||||
* Load service configuration from npmextra.json
|
||||
*/
|
||||
private async loadServiceConfiguration(): Promise<void> {
|
||||
const npmextraConfig = new plugins.npmextra.Npmextra(process.cwd());
|
||||
const gitzoneConfig = npmextraConfig.dataFor<any>('gitzone', {});
|
||||
|
||||
// Check if services array exists
|
||||
if (!gitzoneConfig.services || !Array.isArray(gitzoneConfig.services) || gitzoneConfig.services.length === 0) {
|
||||
// Prompt user to select services
|
||||
const smartinteract = new plugins.smartinteract.SmartInteract();
|
||||
const response = await smartinteract.askQuestion({
|
||||
name: 'services',
|
||||
type: 'checkbox',
|
||||
message: 'Which services do you want to enable for this project?',
|
||||
choices: [
|
||||
{ name: 'MongoDB', value: 'mongodb' },
|
||||
{ name: 'MinIO (S3)', value: 'minio' },
|
||||
{ name: 'Elasticsearch', value: 'elasticsearch' }
|
||||
],
|
||||
default: ['mongodb', 'minio', 'elasticsearch']
|
||||
});
|
||||
|
||||
this.enabledServices = response.value || ['mongodb', 'minio', 'elasticsearch'];
|
||||
|
||||
// Save to npmextra.json
|
||||
await this.saveServiceConfiguration(this.enabledServices);
|
||||
} else {
|
||||
this.enabledServices = gitzoneConfig.services;
|
||||
logger.log('info', `🔧 Enabled services: ${this.enabledServices.join(', ')}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save service configuration to npmextra.json
|
||||
*/
|
||||
private async saveServiceConfiguration(services: string[]): Promise<void> {
|
||||
const npmextraPath = plugins.path.join(process.cwd(), 'npmextra.json');
|
||||
let npmextraData: any = {};
|
||||
|
||||
// Read existing npmextra.json if it exists
|
||||
if (await plugins.smartfs.file(npmextraPath).exists()) {
|
||||
const content = await plugins.smartfs.file(npmextraPath).encoding('utf8').read();
|
||||
npmextraData = JSON.parse(content as string);
|
||||
}
|
||||
|
||||
// Update gitzone.services
|
||||
if (!npmextraData.gitzone) {
|
||||
npmextraData.gitzone = {};
|
||||
}
|
||||
npmextraData.gitzone.services = services;
|
||||
|
||||
// Write back to npmextra.json
|
||||
await plugins.smartfs
|
||||
.file(npmextraPath)
|
||||
.encoding('utf8')
|
||||
.write(JSON.stringify(npmextraData, null, 2));
|
||||
|
||||
logger.log('ok', `✅ Saved service configuration to npmextra.json`);
|
||||
logger.log('info', `🔧 Enabled services: ${services.join(', ')}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a service is enabled
|
||||
*/
|
||||
private isServiceEnabled(service: string): boolean {
|
||||
if (!this.enabledServices) {
|
||||
return true; // If no configuration, enable all
|
||||
}
|
||||
return this.enabledServices.includes(service);
|
||||
}
|
||||
|
||||
/**
|
||||
* Register this project with the global registry
|
||||
*/
|
||||
private async registerWithGlobalRegistry(): Promise<void> {
|
||||
const config = this.config.getConfig();
|
||||
const containers = this.config.getContainerNames();
|
||||
|
||||
await this.globalRegistry.registerProject({
|
||||
projectPath: process.cwd(),
|
||||
projectName: config.PROJECT_NAME,
|
||||
containers: {
|
||||
mongo: containers.mongo,
|
||||
minio: containers.minio,
|
||||
elasticsearch: containers.elasticsearch,
|
||||
},
|
||||
ports: {
|
||||
mongo: parseInt(config.MONGODB_PORT),
|
||||
s3: parseInt(config.S3_PORT),
|
||||
s3Console: parseInt(config.S3_CONSOLE_PORT),
|
||||
elasticsearch: parseInt(config.ELASTICSEARCH_PORT),
|
||||
},
|
||||
enabledServices: this.enabledServices || ['mongodb', 'minio', 'elasticsearch'],
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Start all enabled services
|
||||
*/
|
||||
public async startAll(): Promise<void> {
|
||||
let first = true;
|
||||
if (this.isServiceEnabled('mongodb')) {
|
||||
if (!first) console.log();
|
||||
await this.startMongoDB();
|
||||
first = false;
|
||||
}
|
||||
if (this.isServiceEnabled('minio')) {
|
||||
if (!first) console.log();
|
||||
await this.startMinIO();
|
||||
first = false;
|
||||
}
|
||||
if (this.isServiceEnabled('elasticsearch')) {
|
||||
if (!first) console.log();
|
||||
await this.startElasticsearch();
|
||||
first = false;
|
||||
}
|
||||
|
||||
// Register with global registry
|
||||
await this.registerWithGlobalRegistry();
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop all enabled services
|
||||
*/
|
||||
public async stopAll(): Promise<void> {
|
||||
let first = true;
|
||||
if (this.isServiceEnabled('mongodb')) {
|
||||
if (!first) console.log();
|
||||
await this.stopMongoDB();
|
||||
first = false;
|
||||
}
|
||||
if (this.isServiceEnabled('minio')) {
|
||||
if (!first) console.log();
|
||||
await this.stopMinIO();
|
||||
first = false;
|
||||
}
|
||||
if (this.isServiceEnabled('elasticsearch')) {
|
||||
if (!first) console.log();
|
||||
await this.stopElasticsearch();
|
||||
first = false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Start MongoDB service
|
||||
*/
|
||||
@@ -42,7 +193,7 @@ export class ServiceManager {
|
||||
const directories = this.config.getDataDirectories();
|
||||
|
||||
// Ensure data directory exists
|
||||
await plugins.smartfile.fs.ensureDir(directories.mongo);
|
||||
await plugins.smartfs.directory(directories.mongo).recursive().create();
|
||||
|
||||
const status = await this.docker.getStatus(containers.mongo);
|
||||
|
||||
@@ -141,7 +292,7 @@ export class ServiceManager {
|
||||
const directories = this.config.getDataDirectories();
|
||||
|
||||
// Ensure data directory exists
|
||||
await plugins.smartfile.fs.ensureDir(directories.minio);
|
||||
await plugins.smartfs.directory(directories.minio).recursive().create();
|
||||
|
||||
const status = await this.docker.getStatus(containers.minio);
|
||||
|
||||
@@ -260,6 +411,102 @@ export class ServiceManager {
|
||||
logger.log('info', ` Console: http://${config.S3_HOST}:${config.S3_CONSOLE_PORT} (login: ${config.S3_ACCESSKEY}/***)`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Start Elasticsearch service
|
||||
*/
|
||||
public async startElasticsearch(): Promise<void> {
|
||||
logger.log('note', '📦 Elasticsearch:');
|
||||
|
||||
const config = this.config.getConfig();
|
||||
const containers = this.config.getContainerNames();
|
||||
const directories = this.config.getDataDirectories();
|
||||
|
||||
// Ensure data directory exists
|
||||
await plugins.smartfs.directory(directories.elasticsearch).recursive().create();
|
||||
|
||||
const status = await this.docker.getStatus(containers.elasticsearch);
|
||||
|
||||
switch (status) {
|
||||
case 'running':
|
||||
logger.log('ok', ' Already running ✓');
|
||||
break;
|
||||
|
||||
case 'stopped':
|
||||
// Check if port mapping matches config
|
||||
const esPortMappings = await this.docker.getPortMappings(containers.elasticsearch);
|
||||
if (esPortMappings && esPortMappings['9200'] !== config.ELASTICSEARCH_PORT) {
|
||||
logger.log('note', ' Port configuration changed, recreating container...');
|
||||
await this.docker.remove(containers.elasticsearch, true);
|
||||
// Fall through to create new container
|
||||
const success = await this.docker.run({
|
||||
name: containers.elasticsearch,
|
||||
image: 'elasticsearch:8.11.0',
|
||||
ports: {
|
||||
[`0.0.0.0:${config.ELASTICSEARCH_PORT}`]: '9200'
|
||||
},
|
||||
volumes: {
|
||||
[directories.elasticsearch]: '/usr/share/elasticsearch/data'
|
||||
},
|
||||
environment: {
|
||||
'discovery.type': 'single-node',
|
||||
'xpack.security.enabled': 'true',
|
||||
'ELASTIC_PASSWORD': config.ELASTICSEARCH_PASS,
|
||||
'ES_JAVA_OPTS': '-Xms512m -Xmx512m'
|
||||
},
|
||||
restart: 'unless-stopped'
|
||||
});
|
||||
|
||||
if (success) {
|
||||
logger.log('ok', ' Recreated with new port ✓');
|
||||
} else {
|
||||
logger.log('error', ' Failed to recreate container');
|
||||
}
|
||||
} else {
|
||||
// Ports match, just start the container
|
||||
if (await this.docker.start(containers.elasticsearch)) {
|
||||
logger.log('ok', ' Started ✓');
|
||||
} else {
|
||||
logger.log('error', ' Failed to start');
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case 'not_exists':
|
||||
logger.log('note', ' Creating container...');
|
||||
|
||||
const success = await this.docker.run({
|
||||
name: containers.elasticsearch,
|
||||
image: 'elasticsearch:8.11.0',
|
||||
ports: {
|
||||
[`0.0.0.0:${config.ELASTICSEARCH_PORT}`]: '9200'
|
||||
},
|
||||
volumes: {
|
||||
[directories.elasticsearch]: '/usr/share/elasticsearch/data'
|
||||
},
|
||||
environment: {
|
||||
'discovery.type': 'single-node',
|
||||
'xpack.security.enabled': 'true',
|
||||
'ELASTIC_PASSWORD': config.ELASTICSEARCH_PASS,
|
||||
'ES_JAVA_OPTS': '-Xms512m -Xmx512m'
|
||||
},
|
||||
restart: 'unless-stopped'
|
||||
});
|
||||
|
||||
if (success) {
|
||||
logger.log('ok', ' Created and started ✓');
|
||||
} else {
|
||||
logger.log('error', ' Failed to create container');
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
logger.log('info', ` Container: ${containers.elasticsearch}`);
|
||||
logger.log('info', ` Port: ${config.ELASTICSEARCH_PORT}`);
|
||||
logger.log('info', ` Connection: ${config.ELASTICSEARCH_URL}`);
|
||||
logger.log('info', ` Username: ${config.ELASTICSEARCH_USER}`);
|
||||
logger.log('info', ` Password: ${config.ELASTICSEARCH_PASS}`);
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop MongoDB service
|
||||
*/
|
||||
@@ -300,6 +547,26 @@ export class ServiceManager {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop Elasticsearch service
|
||||
*/
|
||||
public async stopElasticsearch(): Promise<void> {
|
||||
logger.log('note', '📦 Elasticsearch:');
|
||||
|
||||
const containers = this.config.getContainerNames();
|
||||
const status = await this.docker.getStatus(containers.elasticsearch);
|
||||
|
||||
if (status === 'running') {
|
||||
if (await this.docker.stop(containers.elasticsearch)) {
|
||||
logger.log('ok', ' Stopped ✓');
|
||||
} else {
|
||||
logger.log('error', ' Failed to stop');
|
||||
}
|
||||
} else {
|
||||
logger.log('note', ' Not running');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Show service status
|
||||
*/
|
||||
@@ -385,6 +652,34 @@ export class ServiceManager {
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
// Elasticsearch status
|
||||
const esStatus = await this.docker.getStatus(containers.elasticsearch);
|
||||
switch (esStatus) {
|
||||
case 'running':
|
||||
logger.log('ok', '📦 Elasticsearch: 🟢 Running');
|
||||
logger.log('info', ` ├─ Container: ${containers.elasticsearch}`);
|
||||
logger.log('info', ` ├─ Port: ${config.ELASTICSEARCH_PORT}`);
|
||||
logger.log('info', ` ├─ Connection: ${config.ELASTICSEARCH_URL}`);
|
||||
logger.log('info', ` └─ Credentials: ${config.ELASTICSEARCH_USER}/${config.ELASTICSEARCH_PASS}`);
|
||||
break;
|
||||
case 'stopped':
|
||||
logger.log('note', '📦 Elasticsearch: 🟡 Stopped');
|
||||
logger.log('info', ` ├─ Container: ${containers.elasticsearch}`);
|
||||
logger.log('info', ` └─ Port: ${config.ELASTICSEARCH_PORT}`);
|
||||
break;
|
||||
case 'not_exists':
|
||||
logger.log('info', '📦 Elasticsearch: ⚪ Not installed');
|
||||
// Check port availability
|
||||
const esPort = parseInt(config.ELASTICSEARCH_PORT);
|
||||
const esAvailable = await helpers.isPortAvailable(esPort);
|
||||
if (!esAvailable) {
|
||||
logger.log('error', ` └─ ⚠️ Port ${esPort} is in use by another process`);
|
||||
} else {
|
||||
logger.log('info', ` └─ Port ${esPort} is available`);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -420,6 +715,15 @@ export class ServiceManager {
|
||||
logger.log('info', ` Data: ${this.config.getDataDirectories().minio}`);
|
||||
logger.log('info', ` Endpoint: ${config.S3_ENDPOINT}`);
|
||||
logger.log('info', ` Console URL: http://${config.S3_HOST}:${config.S3_CONSOLE_PORT}`);
|
||||
|
||||
console.log();
|
||||
logger.log('note', 'Elasticsearch:');
|
||||
logger.log('info', ` Host: ${config.ELASTICSEARCH_HOST}:${config.ELASTICSEARCH_PORT}`);
|
||||
logger.log('info', ` User: ${config.ELASTICSEARCH_USER}`);
|
||||
logger.log('info', ' Password: ***');
|
||||
logger.log('info', ` Container: ${this.config.getContainerNames().elasticsearch}`);
|
||||
logger.log('info', ` Data: ${this.config.getDataDirectories().elasticsearch}`);
|
||||
logger.log('info', ` Connection: ${config.ELASTICSEARCH_URL}`);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -478,15 +782,28 @@ export class ServiceManager {
|
||||
}
|
||||
break;
|
||||
|
||||
case 'elasticsearch':
|
||||
case 'es':
|
||||
if (await this.docker.isRunning(containers.elasticsearch)) {
|
||||
helpers.printHeader(`Elasticsearch Logs (last ${lines} lines)`);
|
||||
const logs = await this.docker.logs(containers.elasticsearch, lines);
|
||||
console.log(logs);
|
||||
} else {
|
||||
logger.log('note', 'Elasticsearch container is not running');
|
||||
}
|
||||
break;
|
||||
|
||||
case 'all':
|
||||
case '':
|
||||
await this.showLogs('mongo', lines);
|
||||
console.log();
|
||||
await this.showLogs('minio', lines);
|
||||
console.log();
|
||||
await this.showLogs('elasticsearch', lines);
|
||||
break;
|
||||
|
||||
default:
|
||||
logger.log('note', 'Usage: gitzone services logs [mongo|s3|all] [lines]');
|
||||
logger.log('note', 'Usage: gitzone services logs [mongo|s3|elasticsearch|all] [lines]');
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -512,9 +829,25 @@ export class ServiceManager {
|
||||
}
|
||||
}
|
||||
|
||||
if (await this.docker.exists(containers.elasticsearch)) {
|
||||
if (await this.docker.remove(containers.elasticsearch, true)) {
|
||||
logger.log('ok', ' Elasticsearch container removed ✓');
|
||||
removed = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (!removed) {
|
||||
logger.log('note', ' No containers to remove');
|
||||
}
|
||||
|
||||
// Check if all containers are gone, then unregister from global registry
|
||||
const mongoExists = await this.docker.exists(containers.mongo);
|
||||
const minioExists = await this.docker.exists(containers.minio);
|
||||
const esExists = await this.docker.exists(containers.elasticsearch);
|
||||
|
||||
if (!mongoExists && !minioExists && !esExists) {
|
||||
await this.globalRegistry.unregisterProject(process.cwd());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -524,23 +857,59 @@ export class ServiceManager {
|
||||
const directories = this.config.getDataDirectories();
|
||||
let cleaned = false;
|
||||
|
||||
if (await plugins.smartfile.fs.fileExists(directories.mongo)) {
|
||||
await plugins.smartfile.fs.remove(directories.mongo);
|
||||
if (await plugins.smartfs.directory(directories.mongo).exists()) {
|
||||
await plugins.smartfs.directory(directories.mongo).recursive().delete();
|
||||
logger.log('ok', ' MongoDB data removed ✓');
|
||||
cleaned = true;
|
||||
}
|
||||
|
||||
if (await plugins.smartfile.fs.fileExists(directories.minio)) {
|
||||
await plugins.smartfile.fs.remove(directories.minio);
|
||||
if (await plugins.smartfs.directory(directories.minio).exists()) {
|
||||
await plugins.smartfs.directory(directories.minio).recursive().delete();
|
||||
logger.log('ok', ' S3/MinIO data removed ✓');
|
||||
cleaned = true;
|
||||
}
|
||||
|
||||
if (await plugins.smartfs.directory(directories.elasticsearch).exists()) {
|
||||
await plugins.smartfs.directory(directories.elasticsearch).recursive().delete();
|
||||
logger.log('ok', ' Elasticsearch data removed ✓');
|
||||
cleaned = true;
|
||||
}
|
||||
|
||||
if (!cleaned) {
|
||||
logger.log('note', ' No data to clean');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure which services are enabled
|
||||
*/
|
||||
public async configureServices(): Promise<void> {
|
||||
logger.log('note', 'Select which services to enable for this project:');
|
||||
console.log();
|
||||
|
||||
const currentServices = this.enabledServices || ['mongodb', 'minio', 'elasticsearch'];
|
||||
|
||||
const smartinteract = new plugins.smartinteract.SmartInteract();
|
||||
const response = await smartinteract.askQuestion({
|
||||
name: 'services',
|
||||
type: 'checkbox',
|
||||
message: 'Which services do you want to enable?',
|
||||
choices: [
|
||||
{ name: 'MongoDB', value: 'mongodb' },
|
||||
{ name: 'MinIO (S3)', value: 'minio' },
|
||||
{ name: 'Elasticsearch', value: 'elasticsearch' }
|
||||
],
|
||||
default: currentServices
|
||||
});
|
||||
|
||||
this.enabledServices = response.value || ['mongodb', 'minio', 'elasticsearch'];
|
||||
|
||||
// Save to npmextra.json
|
||||
await this.saveServiceConfiguration(this.enabledServices);
|
||||
|
||||
logger.log('ok', '✅ Service configuration updated');
|
||||
}
|
||||
|
||||
/**
|
||||
* Reconfigure services with new ports
|
||||
*/
|
||||
@@ -562,6 +931,11 @@ export class ServiceManager {
|
||||
logger.log('ok', ' S3/MinIO stopped ✓');
|
||||
}
|
||||
|
||||
if (await this.docker.exists(containers.elasticsearch)) {
|
||||
await this.docker.stop(containers.elasticsearch);
|
||||
logger.log('ok', ' Elasticsearch stopped ✓');
|
||||
}
|
||||
|
||||
// Reconfigure ports
|
||||
await this.config.reconfigurePorts();
|
||||
|
||||
@@ -576,8 +950,7 @@ export class ServiceManager {
|
||||
|
||||
if (response.value) {
|
||||
console.log();
|
||||
await this.startMongoDB();
|
||||
await this.startMinIO();
|
||||
await this.startAll();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -42,11 +42,15 @@ export const getRandomAvailablePort = async (): Promise<number> => {
|
||||
/**
|
||||
* Get the project name from package.json or directory
|
||||
*/
|
||||
export const getProjectName = (): string => {
|
||||
export const getProjectName = async (): Promise<string> => {
|
||||
try {
|
||||
const packageJsonPath = plugins.path.join(process.cwd(), 'package.json');
|
||||
if (plugins.smartfile.fs.fileExistsSync(packageJsonPath)) {
|
||||
const packageJson = plugins.smartfile.fs.toObjectSync(packageJsonPath);
|
||||
if (await plugins.smartfs.file(packageJsonPath).exists()) {
|
||||
const content = (await plugins.smartfs
|
||||
.file(packageJsonPath)
|
||||
.encoding('utf8')
|
||||
.read()) as string;
|
||||
const packageJson = JSON.parse(content);
|
||||
if (packageJson.name) {
|
||||
// Sanitize: @fin.cx/skr → fin-cx-skr
|
||||
return packageJson.name.replace(/@/g, '').replace(/[\/\.]/g, '-');
|
||||
|
||||
@@ -1,13 +1,23 @@
|
||||
import * as plugins from './mod.plugins.js';
|
||||
import * as helpers from './helpers.js';
|
||||
import { ServiceManager } from './classes.servicemanager.js';
|
||||
import { GlobalRegistry } from './classes.globalregistry.js';
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
|
||||
export const run = async (argvArg: any) => {
|
||||
const isGlobal = argvArg.g || argvArg.global;
|
||||
const command = argvArg._[1] || 'help';
|
||||
|
||||
// Handle global commands first
|
||||
if (isGlobal) {
|
||||
await handleGlobalCommand(command);
|
||||
return;
|
||||
}
|
||||
|
||||
// Local project commands
|
||||
const serviceManager = new ServiceManager();
|
||||
await serviceManager.init();
|
||||
|
||||
const command = argvArg._[1] || 'help';
|
||||
const service = argvArg._[2] || 'all';
|
||||
|
||||
switch (command) {
|
||||
@@ -28,7 +38,11 @@ export const run = async (argvArg: any) => {
|
||||
break;
|
||||
|
||||
case 'config':
|
||||
if (service === 'services' || argvArg._[2] === 'services') {
|
||||
await handleConfigureServices(serviceManager);
|
||||
} else {
|
||||
await serviceManager.showConfig();
|
||||
}
|
||||
break;
|
||||
|
||||
case 'compass':
|
||||
@@ -73,16 +87,19 @@ async function handleStart(serviceManager: ServiceManager, service: string) {
|
||||
await serviceManager.startMinIO();
|
||||
break;
|
||||
|
||||
case 'elasticsearch':
|
||||
case 'es':
|
||||
await serviceManager.startElasticsearch();
|
||||
break;
|
||||
|
||||
case 'all':
|
||||
case '':
|
||||
await serviceManager.startMongoDB();
|
||||
console.log();
|
||||
await serviceManager.startMinIO();
|
||||
await serviceManager.startAll();
|
||||
break;
|
||||
|
||||
default:
|
||||
logger.log('error', `Unknown service: ${service}`);
|
||||
logger.log('note', 'Use: mongo, s3, or all');
|
||||
logger.log('note', 'Use: mongo, s3, elasticsearch, or all');
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -101,16 +118,19 @@ async function handleStop(serviceManager: ServiceManager, service: string) {
|
||||
await serviceManager.stopMinIO();
|
||||
break;
|
||||
|
||||
case 'elasticsearch':
|
||||
case 'es':
|
||||
await serviceManager.stopElasticsearch();
|
||||
break;
|
||||
|
||||
case 'all':
|
||||
case '':
|
||||
await serviceManager.stopMongoDB();
|
||||
console.log();
|
||||
await serviceManager.stopMinIO();
|
||||
await serviceManager.stopAll();
|
||||
break;
|
||||
|
||||
default:
|
||||
logger.log('error', `Unknown service: ${service}`);
|
||||
logger.log('note', 'Use: mongo, s3, or all');
|
||||
logger.log('note', 'Use: mongo, s3, elasticsearch, or all');
|
||||
break;
|
||||
}
|
||||
}
|
||||
@@ -133,14 +153,18 @@ async function handleRestart(serviceManager: ServiceManager, service: string) {
|
||||
await serviceManager.startMinIO();
|
||||
break;
|
||||
|
||||
case 'elasticsearch':
|
||||
case 'es':
|
||||
await serviceManager.stopElasticsearch();
|
||||
await plugins.smartdelay.delayFor(2000);
|
||||
await serviceManager.startElasticsearch();
|
||||
break;
|
||||
|
||||
case 'all':
|
||||
case '':
|
||||
await serviceManager.stopMongoDB();
|
||||
await serviceManager.stopMinIO();
|
||||
await serviceManager.stopAll();
|
||||
await plugins.smartdelay.delayFor(2000);
|
||||
await serviceManager.startMongoDB();
|
||||
console.log();
|
||||
await serviceManager.startMinIO();
|
||||
await serviceManager.startAll();
|
||||
break;
|
||||
|
||||
default:
|
||||
@@ -185,6 +209,11 @@ async function handleClean(serviceManager: ServiceManager) {
|
||||
}
|
||||
}
|
||||
|
||||
async function handleConfigureServices(serviceManager: ServiceManager) {
|
||||
helpers.printHeader('Configure Services');
|
||||
await serviceManager.configureServices();
|
||||
}
|
||||
|
||||
function showHelp() {
|
||||
helpers.printHeader('GitZone Services Manager');
|
||||
|
||||
@@ -192,22 +221,30 @@ function showHelp() {
|
||||
console.log();
|
||||
|
||||
logger.log('note', 'Commands:');
|
||||
logger.log('info', ' start [service] Start services (mongo|s3|all)');
|
||||
logger.log('info', ' stop [service] Stop services (mongo|s3|all)');
|
||||
logger.log('info', ' restart [service] Restart services (mongo|s3|all)');
|
||||
logger.log('info', ' start [service] Start services (mongo|s3|elasticsearch|all)');
|
||||
logger.log('info', ' stop [service] Stop services (mongo|s3|elasticsearch|all)');
|
||||
logger.log('info', ' restart [service] Restart services (mongo|s3|elasticsearch|all)');
|
||||
logger.log('info', ' status Show service status');
|
||||
logger.log('info', ' config Show current configuration');
|
||||
logger.log('info', ' config services Configure which services are enabled');
|
||||
logger.log('info', ' compass Show MongoDB Compass connection string');
|
||||
logger.log('info', ' logs [service] Show logs (mongo|s3|all) [lines]');
|
||||
logger.log('info', ' logs [service] Show logs (mongo|s3|elasticsearch|all) [lines]');
|
||||
logger.log('info', ' reconfigure Reassign ports and restart services');
|
||||
logger.log('info', ' remove Remove all containers');
|
||||
logger.log('info', ' clean Remove all containers and data ⚠️');
|
||||
logger.log('info', ' help Show this help message');
|
||||
console.log();
|
||||
|
||||
logger.log('note', 'Available Services:');
|
||||
logger.log('info', ' • MongoDB (mongo) - Document database');
|
||||
logger.log('info', ' • MinIO (s3) - S3-compatible object storage');
|
||||
logger.log('info', ' • Elasticsearch (elasticsearch) - Search and analytics engine');
|
||||
console.log();
|
||||
|
||||
logger.log('note', 'Features:');
|
||||
logger.log('info', ' • Auto-creates .nogit/env.json with smart defaults');
|
||||
logger.log('info', ' • Random ports (20000-30000) to avoid conflicts');
|
||||
logger.log('info', ' • Random ports (20000-30000) for MongoDB/MinIO to avoid conflicts');
|
||||
logger.log('info', ' • Elasticsearch uses standard port 9200');
|
||||
logger.log('info', ' • Project-specific containers for multi-project support');
|
||||
logger.log('info', ' • Preserves custom configuration values');
|
||||
logger.log('info', ' • MongoDB Compass connection support');
|
||||
@@ -216,9 +253,181 @@ function showHelp() {
|
||||
logger.log('note', 'Examples:');
|
||||
logger.log('info', ' gitzone services start # Start all services');
|
||||
logger.log('info', ' gitzone services start mongo # Start only MongoDB');
|
||||
logger.log('info', ' gitzone services start elasticsearch # Start only Elasticsearch');
|
||||
logger.log('info', ' gitzone services stop # Stop all services');
|
||||
logger.log('info', ' gitzone services status # Check service status');
|
||||
logger.log('info', ' gitzone services config # Show configuration');
|
||||
logger.log('info', ' gitzone services compass # Get MongoDB Compass connection');
|
||||
logger.log('info', ' gitzone services logs mongo 50 # Show last 50 lines of MongoDB logs');
|
||||
logger.log('info', ' gitzone services logs elasticsearch # Show Elasticsearch logs');
|
||||
console.log();
|
||||
|
||||
logger.log('note', 'Global Commands (-g/--global):');
|
||||
logger.log('info', ' list -g List all registered projects');
|
||||
logger.log('info', ' status -g Show status across all projects');
|
||||
logger.log('info', ' stop -g Stop all containers across all projects');
|
||||
logger.log('info', ' cleanup -g Remove stale registry entries');
|
||||
console.log();
|
||||
|
||||
logger.log('note', 'Global Examples:');
|
||||
logger.log('info', ' gitzone services list -g # List all registered projects');
|
||||
logger.log('info', ' gitzone services status -g # Show global container status');
|
||||
logger.log('info', ' gitzone services stop -g # Stop all (prompts for confirmation)');
|
||||
}
|
||||
|
||||
// ==================== Global Command Handlers ====================
|
||||
|
||||
async function handleGlobalCommand(command: string) {
|
||||
const globalRegistry = GlobalRegistry.getInstance();
|
||||
|
||||
switch (command) {
|
||||
case 'list':
|
||||
await handleGlobalList(globalRegistry);
|
||||
break;
|
||||
|
||||
case 'status':
|
||||
await handleGlobalStatus(globalRegistry);
|
||||
break;
|
||||
|
||||
case 'stop':
|
||||
await handleGlobalStop(globalRegistry);
|
||||
break;
|
||||
|
||||
case 'cleanup':
|
||||
await handleGlobalCleanup(globalRegistry);
|
||||
break;
|
||||
|
||||
case 'help':
|
||||
default:
|
||||
showHelp();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
async function handleGlobalList(globalRegistry: GlobalRegistry) {
|
||||
helpers.printHeader('Registered Projects (Global)');
|
||||
|
||||
const projects = await globalRegistry.getAllProjects();
|
||||
const projectPaths = Object.keys(projects);
|
||||
|
||||
if (projectPaths.length === 0) {
|
||||
logger.log('note', 'No projects registered');
|
||||
return;
|
||||
}
|
||||
|
||||
for (const path of projectPaths) {
|
||||
const project = projects[path];
|
||||
const lastActive = new Date(project.lastActive).toLocaleString();
|
||||
|
||||
console.log();
|
||||
logger.log('ok', `📁 ${project.projectName}`);
|
||||
logger.log('info', ` Path: ${project.projectPath}`);
|
||||
logger.log('info', ` Services: ${project.enabledServices.join(', ')}`);
|
||||
logger.log('info', ` Last Active: ${lastActive}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function handleGlobalStatus(globalRegistry: GlobalRegistry) {
|
||||
helpers.printHeader('Global Service Status');
|
||||
|
||||
const statuses = await globalRegistry.getGlobalStatus();
|
||||
|
||||
if (statuses.length === 0) {
|
||||
logger.log('note', 'No projects registered');
|
||||
return;
|
||||
}
|
||||
|
||||
let runningCount = 0;
|
||||
let totalContainers = 0;
|
||||
|
||||
for (const project of statuses) {
|
||||
console.log();
|
||||
logger.log('ok', `📁 ${project.projectName}`);
|
||||
logger.log('info', ` Path: ${project.projectPath}`);
|
||||
|
||||
if (project.containers.length === 0) {
|
||||
logger.log('note', ' No containers configured');
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const container of project.containers) {
|
||||
totalContainers++;
|
||||
const statusIcon = container.status === 'running' ? '🟢' : container.status === 'exited' ? '🟡' : '⚪';
|
||||
if (container.status === 'running') runningCount++;
|
||||
logger.log('info', ` ${statusIcon} ${container.name}: ${container.status}`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log();
|
||||
logger.log('note', `Summary: ${runningCount}/${totalContainers} containers running across ${statuses.length} project(s)`);
|
||||
}
|
||||
|
||||
async function handleGlobalStop(globalRegistry: GlobalRegistry) {
|
||||
helpers.printHeader('Stop All Containers (Global)');
|
||||
|
||||
const statuses = await globalRegistry.getGlobalStatus();
|
||||
|
||||
// Count running containers
|
||||
let runningCount = 0;
|
||||
for (const project of statuses) {
|
||||
for (const container of project.containers) {
|
||||
if (container.status === 'running') runningCount++;
|
||||
}
|
||||
}
|
||||
|
||||
if (runningCount === 0) {
|
||||
logger.log('note', 'No running containers found');
|
||||
return;
|
||||
}
|
||||
|
||||
logger.log('note', `Found ${runningCount} running container(s) across ${statuses.length} project(s)`);
|
||||
console.log();
|
||||
|
||||
// Show what will be stopped
|
||||
for (const project of statuses) {
|
||||
const runningContainers = project.containers.filter(c => c.status === 'running');
|
||||
if (runningContainers.length > 0) {
|
||||
logger.log('info', `${project.projectName}:`);
|
||||
for (const container of runningContainers) {
|
||||
logger.log('info', ` • ${container.name}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log();
|
||||
const shouldContinue = await plugins.smartinteract.SmartInteract.getCliConfirmation(
|
||||
'Stop all containers?',
|
||||
false
|
||||
);
|
||||
|
||||
if (!shouldContinue) {
|
||||
logger.log('note', 'Cancelled');
|
||||
return;
|
||||
}
|
||||
|
||||
logger.log('note', 'Stopping all containers...');
|
||||
const result = await globalRegistry.stopAll();
|
||||
|
||||
if (result.stopped.length > 0) {
|
||||
logger.log('ok', `Stopped: ${result.stopped.join(', ')}`);
|
||||
}
|
||||
if (result.failed.length > 0) {
|
||||
logger.log('error', `Failed to stop: ${result.failed.join(', ')}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function handleGlobalCleanup(globalRegistry: GlobalRegistry) {
|
||||
helpers.printHeader('Cleanup Registry (Global)');
|
||||
|
||||
logger.log('note', 'Checking for stale registry entries...');
|
||||
const removed = await globalRegistry.cleanup();
|
||||
|
||||
if (removed.length === 0) {
|
||||
logger.log('ok', 'No stale entries found');
|
||||
return;
|
||||
}
|
||||
|
||||
logger.log('ok', `Removed ${removed.length} stale entr${removed.length === 1 ? 'y' : 'ies'}:`);
|
||||
for (const path of removed) {
|
||||
logger.log('info', ` • ${path}`);
|
||||
}
|
||||
}
|
||||
@@ -6,23 +6,36 @@ import * as paths from '../paths.js';
|
||||
|
||||
import { logger } from '../gitzone.logging.js';
|
||||
|
||||
export let run = () => {
|
||||
export let run = async () => {
|
||||
const done = plugins.smartpromise.defer();
|
||||
logger.log('warn', 'no action specified');
|
||||
|
||||
const dirEntries = await plugins.smartfs.directory(paths.templatesDir).list();
|
||||
const templates: string[] = [];
|
||||
for (const entry of dirEntries) {
|
||||
try {
|
||||
const stats = await plugins.smartfs
|
||||
.file(plugins.path.join(paths.templatesDir, entry.path))
|
||||
.stat();
|
||||
if (stats.isDirectory) {
|
||||
templates.push(entry.name);
|
||||
}
|
||||
} catch {
|
||||
// Skip entries that can't be accessed
|
||||
}
|
||||
}
|
||||
|
||||
let projects = `\n`;
|
||||
for (const template of templates) {
|
||||
projects += ` - ${template}\n`;
|
||||
}
|
||||
|
||||
logger.log(
|
||||
'info',
|
||||
`
|
||||
You can do one of the following things:
|
||||
* create a new project with 'gitzone template [template]'
|
||||
the following templates exist: ${(() => {
|
||||
let projects = `\n`;
|
||||
for (const template of plugins.smartfile.fs.listFoldersSync(
|
||||
paths.templatesDir,
|
||||
)) {
|
||||
projects += ` - ${template}\n`;
|
||||
}
|
||||
return projects;
|
||||
})()}
|
||||
the following templates exist: ${projects}
|
||||
* format a project with 'gitzone format'
|
||||
`,
|
||||
);
|
||||
|
||||
@@ -11,7 +11,7 @@ export const getTemplatePath = (templateNameArg: string) => {
|
||||
* receives a template name and returns wether there is a corresponding template
|
||||
*/
|
||||
export const isTemplate = async (templateNameArg: string) => {
|
||||
return plugins.smartfile.fs.isDirectory(getTemplatePath(templateNameArg));
|
||||
return plugins.smartfs.directory(getTemplatePath(templateNameArg)).exists();
|
||||
};
|
||||
|
||||
export const getTemplate = async (templateNameArg: string) => {
|
||||
|
||||
@@ -10,9 +10,13 @@ import * as smartupdate from '@push.rocks/smartupdate';
|
||||
import * as smartshell from '@push.rocks/smartshell';
|
||||
import * as smartnetwork from '@push.rocks/smartnetwork';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import { SmartFs, SmartFsProviderNode } from '@push.rocks/smartfs';
|
||||
import * as smartinteract from '@push.rocks/smartinteract';
|
||||
import * as smartdelay from '@push.rocks/smartdelay';
|
||||
|
||||
// Create smartfs instance for filesystem operations
|
||||
export const smartfs = new SmartFs(new SmartFsProviderNode());
|
||||
|
||||
export {
|
||||
smartlog,
|
||||
smartlogDestinationLocal,
|
||||
|
||||
Reference in New Issue
Block a user