feat(storage): add StorageManager and cache subsystem; integrate storage into ConnectionManager and GitopsApp, migrate legacy connections, and add tests
This commit is contained in:
10
changelog.md
10
changelog.md
@@ -1,5 +1,15 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 2026-02-24 - 2.3.0 - feat(storage)
|
||||||
|
add StorageManager and cache subsystem; integrate storage into ConnectionManager and GitopsApp, migrate legacy connections, and add tests
|
||||||
|
|
||||||
|
- Add StorageManager with filesystem and memory backends, key normalization, atomic writes and JSON helpers (getJSON/setJSON).
|
||||||
|
- ConnectionManager now depends on StorageManager, persists each connection as /connections/<id>.json, and includes a one-time migration from legacy .nogit/connections.json.
|
||||||
|
- Introduce cache subsystem: CacheDb (LocalTsmDb + Smartdata), CacheCleaner, CachedDocument and CachedProject for TTL'd cached provider data, plus lifecycle management in GitopsApp.
|
||||||
|
- GitopsApp now initializes StorageManager, wires ConnectionManager to storage, starts/stops CacheDb and CacheCleaner, and uses resolved default paths via resolvePaths.
|
||||||
|
- Export smartmongo and smartdata in plugins and add corresponding deps to deno.json.
|
||||||
|
- Add comprehensive tests: storage unit tests, connection manager integration using StorageManager, and a tsmdb + smartdata spike test.
|
||||||
|
|
||||||
## 2026-02-24 - 2.2.1 - fix(ts_bundled)
|
## 2026-02-24 - 2.2.1 - fix(ts_bundled)
|
||||||
add generated bundled JavaScript and source map for ts build (bundle.js and bundle.js.map)
|
add generated bundled JavaScript and source map for ts build (bundle.js and bundle.js.map)
|
||||||
|
|
||||||
|
|||||||
@@ -17,7 +17,9 @@
|
|||||||
"@push.rocks/smartguard": "npm:@push.rocks/smartguard@^3.1.0",
|
"@push.rocks/smartguard": "npm:@push.rocks/smartguard@^3.1.0",
|
||||||
"@push.rocks/smartjwt": "npm:@push.rocks/smartjwt@^2.2.1",
|
"@push.rocks/smartjwt": "npm:@push.rocks/smartjwt@^2.2.1",
|
||||||
"@apiclient.xyz/gitea": "npm:@apiclient.xyz/gitea@^1.0.3",
|
"@apiclient.xyz/gitea": "npm:@apiclient.xyz/gitea@^1.0.3",
|
||||||
"@apiclient.xyz/gitlab": "npm:@apiclient.xyz/gitlab@^2.0.3"
|
"@apiclient.xyz/gitlab": "npm:@apiclient.xyz/gitlab@^2.0.3",
|
||||||
|
"@push.rocks/smartmongo": "npm:@push.rocks/smartmongo@^5.1.0",
|
||||||
|
"@push.rocks/smartdata": "npm:@push.rocks/smartdata@^7.0.15"
|
||||||
},
|
},
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"lib": [
|
"lib": [
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import { assertEquals, assertExists } from 'https://deno.land/std@0.208.0/assert
|
|||||||
import { BaseProvider, GiteaProvider, GitLabProvider } from '../ts/providers/index.ts';
|
import { BaseProvider, GiteaProvider, GitLabProvider } from '../ts/providers/index.ts';
|
||||||
import { ConnectionManager } from '../ts/classes/connectionmanager.ts';
|
import { ConnectionManager } from '../ts/classes/connectionmanager.ts';
|
||||||
import { GitopsApp } from '../ts/classes/gitopsapp.ts';
|
import { GitopsApp } from '../ts/classes/gitopsapp.ts';
|
||||||
|
import { StorageManager } from '../ts/storage/index.ts';
|
||||||
|
|
||||||
Deno.test('GiteaProvider instantiates correctly', () => {
|
Deno.test('GiteaProvider instantiates correctly', () => {
|
||||||
const provider = new GiteaProvider('test-id', 'https://gitea.example.com', 'test-token');
|
const provider = new GiteaProvider('test-id', 'https://gitea.example.com', 'test-token');
|
||||||
@@ -18,13 +19,15 @@ Deno.test('GitLabProvider instantiates correctly', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
Deno.test('ConnectionManager instantiates correctly', () => {
|
Deno.test('ConnectionManager instantiates correctly', () => {
|
||||||
const manager = new ConnectionManager();
|
const storage = new StorageManager({ backend: 'memory' });
|
||||||
|
const manager = new ConnectionManager(storage);
|
||||||
assertExists(manager);
|
assertExists(manager);
|
||||||
});
|
});
|
||||||
|
|
||||||
Deno.test('GitopsApp instantiates correctly', () => {
|
Deno.test('GitopsApp instantiates correctly', () => {
|
||||||
const app = new GitopsApp();
|
const app = new GitopsApp();
|
||||||
assertExists(app);
|
assertExists(app);
|
||||||
|
assertExists(app.storageManager);
|
||||||
assertExists(app.connectionManager);
|
assertExists(app.connectionManager);
|
||||||
assertExists(app.opsServer);
|
assertExists(app.opsServer);
|
||||||
});
|
});
|
||||||
|
|||||||
137
test/test.storage_test.ts
Normal file
137
test/test.storage_test.ts
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
import { assertEquals, assertExists } from 'https://deno.land/std@0.208.0/assert/mod.ts';
|
||||||
|
import { StorageManager } from '../ts/storage/index.ts';
|
||||||
|
|
||||||
|
Deno.test('StorageManager memory: set and get', async () => {
|
||||||
|
const sm = new StorageManager({ backend: 'memory' });
|
||||||
|
await sm.set('/test/key1', 'hello');
|
||||||
|
const result = await sm.get('/test/key1');
|
||||||
|
assertEquals(result, 'hello');
|
||||||
|
});
|
||||||
|
|
||||||
|
Deno.test('StorageManager memory: get nonexistent returns null', async () => {
|
||||||
|
const sm = new StorageManager({ backend: 'memory' });
|
||||||
|
const result = await sm.get('/missing');
|
||||||
|
assertEquals(result, null);
|
||||||
|
});
|
||||||
|
|
||||||
|
Deno.test('StorageManager memory: delete', async () => {
|
||||||
|
const sm = new StorageManager({ backend: 'memory' });
|
||||||
|
await sm.set('/test/key1', 'hello');
|
||||||
|
const deleted = await sm.delete('/test/key1');
|
||||||
|
assertEquals(deleted, true);
|
||||||
|
const result = await sm.get('/test/key1');
|
||||||
|
assertEquals(result, null);
|
||||||
|
});
|
||||||
|
|
||||||
|
Deno.test('StorageManager memory: delete nonexistent returns false', async () => {
|
||||||
|
const sm = new StorageManager({ backend: 'memory' });
|
||||||
|
const deleted = await sm.delete('/missing');
|
||||||
|
assertEquals(deleted, false);
|
||||||
|
});
|
||||||
|
|
||||||
|
Deno.test('StorageManager memory: exists', async () => {
|
||||||
|
const sm = new StorageManager({ backend: 'memory' });
|
||||||
|
assertEquals(await sm.exists('/test/key1'), false);
|
||||||
|
await sm.set('/test/key1', 'hello');
|
||||||
|
assertEquals(await sm.exists('/test/key1'), true);
|
||||||
|
});
|
||||||
|
|
||||||
|
Deno.test('StorageManager memory: list keys under prefix', async () => {
|
||||||
|
const sm = new StorageManager({ backend: 'memory' });
|
||||||
|
await sm.set('/connections/a.json', '{}');
|
||||||
|
await sm.set('/connections/b.json', '{}');
|
||||||
|
await sm.set('/other/c.json', '{}');
|
||||||
|
const keys = await sm.list('/connections/');
|
||||||
|
assertEquals(keys, ['/connections/a.json', '/connections/b.json']);
|
||||||
|
});
|
||||||
|
|
||||||
|
Deno.test('StorageManager memory: getJSON and setJSON roundtrip', async () => {
|
||||||
|
const sm = new StorageManager({ backend: 'memory' });
|
||||||
|
const data = { id: '123', name: 'test', nested: { value: 42 } };
|
||||||
|
await sm.setJSON('/data/item.json', data);
|
||||||
|
const result = await sm.getJSON<typeof data>('/data/item.json');
|
||||||
|
assertEquals(result, data);
|
||||||
|
});
|
||||||
|
|
||||||
|
Deno.test('StorageManager memory: getJSON nonexistent returns null', async () => {
|
||||||
|
const sm = new StorageManager({ backend: 'memory' });
|
||||||
|
const result = await sm.getJSON('/missing.json');
|
||||||
|
assertEquals(result, null);
|
||||||
|
});
|
||||||
|
|
||||||
|
Deno.test('StorageManager: key validation requires leading slash', async () => {
|
||||||
|
const sm = new StorageManager({ backend: 'memory' });
|
||||||
|
let threw = false;
|
||||||
|
try {
|
||||||
|
await sm.get('no-slash');
|
||||||
|
} catch {
|
||||||
|
threw = true;
|
||||||
|
}
|
||||||
|
assertEquals(threw, true);
|
||||||
|
});
|
||||||
|
|
||||||
|
Deno.test('StorageManager: key normalization strips ..', async () => {
|
||||||
|
const sm = new StorageManager({ backend: 'memory' });
|
||||||
|
await sm.set('/test/../actual/key', 'value');
|
||||||
|
// '..' segments are stripped, so key becomes /test/actual/key — wait,
|
||||||
|
// the normalizer filters out '..' segments entirely
|
||||||
|
// /test/../actual/key -> segments: ['test', 'actual', 'key'] (.. filtered)
|
||||||
|
const result = await sm.get('/test/actual/key');
|
||||||
|
assertEquals(result, 'value');
|
||||||
|
});
|
||||||
|
|
||||||
|
Deno.test('StorageManager filesystem: set, get, delete roundtrip', async () => {
|
||||||
|
const tmpDir = await Deno.makeTempDir();
|
||||||
|
const sm = new StorageManager({ backend: 'filesystem', fsPath: tmpDir });
|
||||||
|
try {
|
||||||
|
await sm.set('/test/file.txt', 'filesystem content');
|
||||||
|
const result = await sm.get('/test/file.txt');
|
||||||
|
assertEquals(result, 'filesystem content');
|
||||||
|
|
||||||
|
assertEquals(await sm.exists('/test/file.txt'), true);
|
||||||
|
|
||||||
|
const deleted = await sm.delete('/test/file.txt');
|
||||||
|
assertEquals(deleted, true);
|
||||||
|
assertEquals(await sm.get('/test/file.txt'), null);
|
||||||
|
} finally {
|
||||||
|
await Deno.remove(tmpDir, { recursive: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Deno.test('StorageManager filesystem: list keys', async () => {
|
||||||
|
const tmpDir = await Deno.makeTempDir();
|
||||||
|
const sm = new StorageManager({ backend: 'filesystem', fsPath: tmpDir });
|
||||||
|
try {
|
||||||
|
await sm.setJSON('/items/a.json', { id: 'a' });
|
||||||
|
await sm.setJSON('/items/b.json', { id: 'b' });
|
||||||
|
const keys = await sm.list('/items/');
|
||||||
|
assertEquals(keys, ['/items/a.json', '/items/b.json']);
|
||||||
|
} finally {
|
||||||
|
await Deno.remove(tmpDir, { recursive: true });
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
Deno.test('ConnectionManager with StorageManager: create and load', async () => {
|
||||||
|
const { ConnectionManager } = await import('../ts/classes/connectionmanager.ts');
|
||||||
|
const sm = new StorageManager({ backend: 'memory' });
|
||||||
|
const cm = new ConnectionManager(sm);
|
||||||
|
await cm.init();
|
||||||
|
|
||||||
|
// Create a connection
|
||||||
|
const conn = await cm.createConnection('test', 'gitea', 'https://gitea.example.com', 'token');
|
||||||
|
assertExists(conn.id);
|
||||||
|
assertEquals(conn.name, 'test');
|
||||||
|
assertEquals(conn.token, '***');
|
||||||
|
|
||||||
|
// Verify it's stored in StorageManager
|
||||||
|
const stored = await sm.getJSON<{ id: string }>(`/connections/${conn.id}.json`);
|
||||||
|
assertExists(stored);
|
||||||
|
assertEquals(stored.id, conn.id);
|
||||||
|
|
||||||
|
// Create a new ConnectionManager and verify it loads the connection
|
||||||
|
const cm2 = new ConnectionManager(sm);
|
||||||
|
await cm2.init();
|
||||||
|
const conns = cm2.getConnections();
|
||||||
|
assertEquals(conns.length, 1);
|
||||||
|
assertEquals(conns[0].id, conn.id);
|
||||||
|
});
|
||||||
59
test/test.tsmdb_spike_test.ts
Normal file
59
test/test.tsmdb_spike_test.ts
Normal file
@@ -0,0 +1,59 @@
|
|||||||
|
import { assertEquals, assertExists } from 'https://deno.land/std@0.208.0/assert/mod.ts';
|
||||||
|
import { LocalTsmDb } from '@push.rocks/smartmongo';
|
||||||
|
import { SmartdataDb, SmartDataDbDoc, Collection, svDb, unI } from '@push.rocks/smartdata';
|
||||||
|
|
||||||
|
Deno.test({
|
||||||
|
name: 'TsmDb spike: LocalTsmDb + SmartdataDb roundtrip',
|
||||||
|
sanitizeOps: false,
|
||||||
|
sanitizeResources: false,
|
||||||
|
fn: async () => {
|
||||||
|
const tmpDir = await Deno.makeTempDir();
|
||||||
|
|
||||||
|
// 1. Start local MongoDB-compatible server
|
||||||
|
const localDb = new LocalTsmDb({ folderPath: tmpDir });
|
||||||
|
const { connectionUri } = await localDb.start();
|
||||||
|
assertExists(connectionUri);
|
||||||
|
|
||||||
|
// 2. Connect smartdata
|
||||||
|
const smartDb = new SmartdataDb({
|
||||||
|
mongoDbUrl: connectionUri,
|
||||||
|
mongoDbName: 'gitops_spike_test',
|
||||||
|
});
|
||||||
|
await smartDb.init();
|
||||||
|
assertEquals(smartDb.status, 'connected');
|
||||||
|
|
||||||
|
// 3. Define a simple document class
|
||||||
|
@Collection(() => smartDb)
|
||||||
|
class TestDoc extends SmartDataDbDoc<TestDoc, TestDoc> {
|
||||||
|
@unI()
|
||||||
|
public id: string = '';
|
||||||
|
|
||||||
|
@svDb()
|
||||||
|
public label: string = '';
|
||||||
|
|
||||||
|
@svDb()
|
||||||
|
public value: number = 0;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Insert a document
|
||||||
|
const doc = new TestDoc();
|
||||||
|
doc.id = 'test-1';
|
||||||
|
doc.label = 'spike';
|
||||||
|
doc.value = 42;
|
||||||
|
await doc.save();
|
||||||
|
|
||||||
|
// 5. Query it back
|
||||||
|
const found = await TestDoc.getInstance({ id: 'test-1' });
|
||||||
|
assertExists(found);
|
||||||
|
assertEquals(found.label, 'spike');
|
||||||
|
assertEquals(found.value, 42);
|
||||||
|
|
||||||
|
// 6. Cleanup — smartDb closes; localDb.stop() hangs under Deno, so fire-and-forget
|
||||||
|
await smartDb.close();
|
||||||
|
localDb.stop().catch(() => {});
|
||||||
|
},
|
||||||
|
});
|
||||||
@@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@serve.zone/gitops',
|
name: '@serve.zone/gitops',
|
||||||
version: '2.2.1',
|
version: '2.3.0',
|
||||||
description: 'GitOps management app for Gitea and GitLab - manage secrets, browse projects, view CI pipelines, and stream build logs'
|
description: 'GitOps management app for Gitea and GitLab - manage secrets, browse projects, view CI pipelines, and stream build logs'
|
||||||
}
|
}
|
||||||
|
|||||||
68
ts/cache/classes.cache.cleaner.ts
vendored
Normal file
68
ts/cache/classes.cache.cleaner.ts
vendored
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
import { logger } from '../logging.ts';
|
||||||
|
import type { CacheDb } from './classes.cachedb.ts';
|
||||||
|
|
||||||
|
// deno-lint-ignore no-explicit-any
|
||||||
|
type DocumentClass = { getInstances: (filter: any) => Promise<{ delete: () => Promise<void> }[]> };
|
||||||
|
|
||||||
|
const DEFAULT_INTERVAL_MS = 60 * 60 * 1000; // 1 hour
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Periodically cleans up expired cached documents.
|
||||||
|
*/
|
||||||
|
export class CacheCleaner {
|
||||||
|
private intervalId: number | null = null;
|
||||||
|
private intervalMs: number;
|
||||||
|
private documentClasses: DocumentClass[] = [];
|
||||||
|
private cacheDb: CacheDb;
|
||||||
|
|
||||||
|
constructor(cacheDb: CacheDb, intervalMs = DEFAULT_INTERVAL_MS) {
|
||||||
|
this.cacheDb = cacheDb;
|
||||||
|
this.intervalMs = intervalMs;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Register a document class for cleanup */
|
||||||
|
registerClass(cls: DocumentClass): void {
|
||||||
|
this.documentClasses.push(cls);
|
||||||
|
}
|
||||||
|
|
||||||
|
start(): void {
|
||||||
|
if (this.intervalId !== null) return;
|
||||||
|
this.intervalId = setInterval(() => {
|
||||||
|
this.clean().catch((err) => {
|
||||||
|
logger.error(`CacheCleaner error: ${err}`);
|
||||||
|
});
|
||||||
|
}, this.intervalMs);
|
||||||
|
// Unref so the interval doesn't prevent process exit
|
||||||
|
Deno.unrefTimer(this.intervalId);
|
||||||
|
logger.debug(`CacheCleaner started (interval: ${this.intervalMs}ms)`);
|
||||||
|
}
|
||||||
|
|
||||||
|
stop(): void {
|
||||||
|
if (this.intervalId !== null) {
|
||||||
|
clearInterval(this.intervalId);
|
||||||
|
this.intervalId = null;
|
||||||
|
logger.debug('CacheCleaner stopped');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Run a single cleanup pass */
|
||||||
|
async clean(): Promise<number> {
|
||||||
|
const now = Date.now();
|
||||||
|
let totalDeleted = 0;
|
||||||
|
for (const cls of this.documentClasses) {
|
||||||
|
try {
|
||||||
|
const expired = await cls.getInstances({ expiresAt: { $lt: now } });
|
||||||
|
for (const doc of expired) {
|
||||||
|
await doc.delete();
|
||||||
|
totalDeleted++;
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
logger.error(`CacheCleaner: failed to clean class: ${err}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (totalDeleted > 0) {
|
||||||
|
logger.debug(`CacheCleaner: deleted ${totalDeleted} expired document(s)`);
|
||||||
|
}
|
||||||
|
return totalDeleted;
|
||||||
|
}
|
||||||
|
}
|
||||||
57
ts/cache/classes.cached.document.ts
vendored
Normal file
57
ts/cache/classes.cached.document.ts
vendored
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
import * as plugins from '../plugins.ts';
|
||||||
|
|
||||||
|
/** TTL duration constants in milliseconds */
|
||||||
|
export const TTL = {
|
||||||
|
MINUTES_5: 5 * 60 * 1000,
|
||||||
|
HOURS_1: 60 * 60 * 1000,
|
||||||
|
HOURS_24: 24 * 60 * 60 * 1000,
|
||||||
|
DAYS_7: 7 * 24 * 60 * 60 * 1000,
|
||||||
|
DAYS_30: 30 * 24 * 60 * 60 * 1000,
|
||||||
|
DAYS_90: 90 * 24 * 60 * 60 * 1000,
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Abstract base class for cached documents with TTL support.
|
||||||
|
* Extend this class and add @Collection decorator pointing to your CacheDb.
|
||||||
|
*/
|
||||||
|
export abstract class CachedDocument<
|
||||||
|
T extends CachedDocument<T>,
|
||||||
|
> extends plugins.smartdata.SmartDataDbDoc<T, T> {
|
||||||
|
@plugins.smartdata.svDb()
|
||||||
|
public createdAt: number = Date.now();
|
||||||
|
|
||||||
|
@plugins.smartdata.svDb()
|
||||||
|
public expiresAt: number = Date.now() + TTL.HOURS_1;
|
||||||
|
|
||||||
|
@plugins.smartdata.svDb()
|
||||||
|
public lastAccessedAt: number = Date.now();
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Set TTL in milliseconds from now */
|
||||||
|
setTTL(ms: number): void {
|
||||||
|
this.expiresAt = Date.now() + ms;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Set TTL in days from now */
|
||||||
|
setTTLDays(days: number): void {
|
||||||
|
this.setTTL(days * 24 * 60 * 60 * 1000);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Set TTL in hours from now */
|
||||||
|
setTTLHours(hours: number): void {
|
||||||
|
this.setTTL(hours * 60 * 60 * 1000);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Check if this document has expired */
|
||||||
|
isExpired(): boolean {
|
||||||
|
return Date.now() > this.expiresAt;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Update last accessed timestamp */
|
||||||
|
touch(): void {
|
||||||
|
this.lastAccessedAt = Date.now();
|
||||||
|
}
|
||||||
|
}
|
||||||
82
ts/cache/classes.cachedb.ts
vendored
Normal file
82
ts/cache/classes.cachedb.ts
vendored
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
import * as plugins from '../plugins.ts';
|
||||||
|
import { logger } from '../logging.ts';
|
||||||
|
|
||||||
|
export interface ICacheDbOptions {
|
||||||
|
storagePath?: string;
|
||||||
|
dbName?: string;
|
||||||
|
debug?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Singleton wrapper around LocalTsmDb + SmartdataDb.
|
||||||
|
* Provides a managed MongoDB-compatible cache database.
|
||||||
|
*/
|
||||||
|
export class CacheDb {
|
||||||
|
private static instance: CacheDb | null = null;
|
||||||
|
|
||||||
|
private localTsmDb: InstanceType<typeof plugins.smartmongo.LocalTsmDb> | null = null;
|
||||||
|
private smartdataDb: InstanceType<typeof plugins.smartdata.SmartdataDb> | null = null;
|
||||||
|
private options: Required<ICacheDbOptions>;
|
||||||
|
|
||||||
|
private constructor(options: ICacheDbOptions = {}) {
|
||||||
|
this.options = {
|
||||||
|
storagePath: options.storagePath ?? './.nogit/cachedb',
|
||||||
|
dbName: options.dbName ?? 'gitops_cache',
|
||||||
|
debug: options.debug ?? false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
static getInstance(options?: ICacheDbOptions): CacheDb {
|
||||||
|
if (!CacheDb.instance) {
|
||||||
|
CacheDb.instance = new CacheDb(options);
|
||||||
|
}
|
||||||
|
return CacheDb.instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
static resetInstance(): void {
|
||||||
|
CacheDb.instance = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async start(): Promise<void> {
|
||||||
|
logger.info('Starting CacheDb...');
|
||||||
|
this.localTsmDb = new plugins.smartmongo.LocalTsmDb({
|
||||||
|
folderPath: this.options.storagePath,
|
||||||
|
});
|
||||||
|
const { connectionUri } = await this.localTsmDb.start();
|
||||||
|
|
||||||
|
this.smartdataDb = new plugins.smartdata.SmartdataDb({
|
||||||
|
mongoDbUrl: connectionUri,
|
||||||
|
mongoDbName: this.options.dbName,
|
||||||
|
});
|
||||||
|
await this.smartdataDb.init();
|
||||||
|
logger.success(`CacheDb started (db: ${this.options.dbName})`);
|
||||||
|
}
|
||||||
|
|
||||||
|
async stop(): Promise<void> {
|
||||||
|
logger.info('Stopping CacheDb...');
|
||||||
|
if (this.smartdataDb) {
|
||||||
|
await this.smartdataDb.close();
|
||||||
|
this.smartdataDb = null;
|
||||||
|
}
|
||||||
|
if (this.localTsmDb) {
|
||||||
|
// localDb.stop() may hang under Deno — fire-and-forget with timeout
|
||||||
|
const stopPromise = this.localTsmDb.stop().catch(() => {});
|
||||||
|
await Promise.race([
|
||||||
|
stopPromise,
|
||||||
|
new Promise<void>((resolve) => {
|
||||||
|
const id = setTimeout(resolve, 3000);
|
||||||
|
Deno.unrefTimer(id);
|
||||||
|
}),
|
||||||
|
]);
|
||||||
|
this.localTsmDb = null;
|
||||||
|
}
|
||||||
|
logger.success('CacheDb stopped');
|
||||||
|
}
|
||||||
|
|
||||||
|
getDb(): InstanceType<typeof plugins.smartdata.SmartdataDb> {
|
||||||
|
if (!this.smartdataDb) {
|
||||||
|
throw new Error('CacheDb not started. Call start() first.');
|
||||||
|
}
|
||||||
|
return this.smartdataDb;
|
||||||
|
}
|
||||||
|
}
|
||||||
32
ts/cache/documents/classes.cached.project.ts
vendored
Normal file
32
ts/cache/documents/classes.cached.project.ts
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
import * as plugins from '../../plugins.ts';
|
||||||
|
import { CacheDb } from '../classes.cachedb.ts';
|
||||||
|
import { CachedDocument, TTL } from '../classes.cached.document.ts';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cached project data from git providers. TTL: 5 minutes.
|
||||||
|
*/
|
||||||
|
@plugins.smartdata.Collection(() => CacheDb.getInstance().getDb())
|
||||||
|
export class CachedProject extends CachedDocument<CachedProject> {
|
||||||
|
@plugins.smartdata.unI()
|
||||||
|
public id: string = '';
|
||||||
|
|
||||||
|
@plugins.smartdata.svDb()
|
||||||
|
public connectionId: string = '';
|
||||||
|
|
||||||
|
@plugins.smartdata.svDb()
|
||||||
|
public projectName: string = '';
|
||||||
|
|
||||||
|
@plugins.smartdata.svDb()
|
||||||
|
public projectUrl: string = '';
|
||||||
|
|
||||||
|
@plugins.smartdata.svDb()
|
||||||
|
public description: string = '';
|
||||||
|
|
||||||
|
@plugins.smartdata.svDb()
|
||||||
|
public defaultBranch: string = '';
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
super();
|
||||||
|
this.setTTL(TTL.MINUTES_5);
|
||||||
|
}
|
||||||
|
}
|
||||||
1
ts/cache/documents/index.ts
vendored
Normal file
1
ts/cache/documents/index.ts
vendored
Normal file
@@ -0,0 +1 @@
|
|||||||
|
export { CachedProject } from './classes.cached.project.ts';
|
||||||
5
ts/cache/index.ts
vendored
Normal file
5
ts/cache/index.ts
vendored
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
export { CacheDb } from './classes.cachedb.ts';
|
||||||
|
export type { ICacheDbOptions } from './classes.cachedb.ts';
|
||||||
|
export { CachedDocument, TTL } from './classes.cached.document.ts';
|
||||||
|
export { CacheCleaner } from './classes.cache.cleaner.ts';
|
||||||
|
export * from './documents/index.ts';
|
||||||
@@ -2,41 +2,74 @@ import * as plugins from '../plugins.ts';
|
|||||||
import { logger } from '../logging.ts';
|
import { logger } from '../logging.ts';
|
||||||
import type * as interfaces from '../../ts_interfaces/index.ts';
|
import type * as interfaces from '../../ts_interfaces/index.ts';
|
||||||
import { BaseProvider, GiteaProvider, GitLabProvider } from '../providers/index.ts';
|
import { BaseProvider, GiteaProvider, GitLabProvider } from '../providers/index.ts';
|
||||||
|
import type { StorageManager } from '../storage/index.ts';
|
||||||
|
|
||||||
const CONNECTIONS_FILE = './.nogit/connections.json';
|
const LEGACY_CONNECTIONS_FILE = './.nogit/connections.json';
|
||||||
|
const CONNECTIONS_PREFIX = '/connections/';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Manages provider connections - persists to .nogit/connections.json
|
* Manages provider connections — persists each connection as an
|
||||||
* and creates provider instances on demand.
|
* individual JSON file via StorageManager.
|
||||||
*/
|
*/
|
||||||
export class ConnectionManager {
|
export class ConnectionManager {
|
||||||
private connections: interfaces.data.IProviderConnection[] = [];
|
private connections: interfaces.data.IProviderConnection[] = [];
|
||||||
|
private storageManager: StorageManager;
|
||||||
|
|
||||||
|
constructor(storageManager: StorageManager) {
|
||||||
|
this.storageManager = storageManager;
|
||||||
|
}
|
||||||
|
|
||||||
async init(): Promise<void> {
|
async init(): Promise<void> {
|
||||||
|
await this.migrateLegacyFile();
|
||||||
await this.loadConnections();
|
await this.loadConnections();
|
||||||
}
|
}
|
||||||
|
|
||||||
private async loadConnections(): Promise<void> {
|
/**
|
||||||
|
* One-time migration from the legacy .nogit/connections.json file.
|
||||||
|
*/
|
||||||
|
private async migrateLegacyFile(): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const text = await Deno.readTextFile(CONNECTIONS_FILE);
|
const text = await Deno.readTextFile(LEGACY_CONNECTIONS_FILE);
|
||||||
this.connections = JSON.parse(text);
|
const legacy: interfaces.data.IProviderConnection[] = JSON.parse(text);
|
||||||
logger.info(`Loaded ${this.connections.length} connection(s)`);
|
if (legacy.length > 0) {
|
||||||
|
logger.info(`Migrating ${legacy.length} connection(s) from legacy file...`);
|
||||||
|
for (const conn of legacy) {
|
||||||
|
await this.storageManager.setJSON(`${CONNECTIONS_PREFIX}${conn.id}.json`, conn);
|
||||||
|
}
|
||||||
|
// Rename legacy file so migration doesn't repeat
|
||||||
|
await Deno.rename(LEGACY_CONNECTIONS_FILE, LEGACY_CONNECTIONS_FILE + '.migrated');
|
||||||
|
logger.success('Legacy connections migrated successfully');
|
||||||
|
}
|
||||||
} catch {
|
} catch {
|
||||||
this.connections = [];
|
// No legacy file or already migrated — nothing to do
|
||||||
logger.debug('No existing connections file found, starting fresh');
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private async saveConnections(): Promise<void> {
|
private async loadConnections(): Promise<void> {
|
||||||
// Ensure .nogit directory exists
|
const keys = await this.storageManager.list(CONNECTIONS_PREFIX);
|
||||||
try {
|
this.connections = [];
|
||||||
await Deno.mkdir('./.nogit', { recursive: true });
|
for (const key of keys) {
|
||||||
} catch { /* already exists */ }
|
const conn = await this.storageManager.getJSON<interfaces.data.IProviderConnection>(key);
|
||||||
await Deno.writeTextFile(CONNECTIONS_FILE, JSON.stringify(this.connections, null, 2));
|
if (conn) {
|
||||||
|
this.connections.push(conn);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (this.connections.length > 0) {
|
||||||
|
logger.info(`Loaded ${this.connections.length} connection(s)`);
|
||||||
|
} else {
|
||||||
|
logger.debug('No existing connections found, starting fresh');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async persistConnection(conn: interfaces.data.IProviderConnection): Promise<void> {
|
||||||
|
await this.storageManager.setJSON(`${CONNECTIONS_PREFIX}${conn.id}.json`, conn);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async removeConnection(id: string): Promise<void> {
|
||||||
|
await this.storageManager.delete(`${CONNECTIONS_PREFIX}${id}.json`);
|
||||||
}
|
}
|
||||||
|
|
||||||
getConnections(): interfaces.data.IProviderConnection[] {
|
getConnections(): interfaces.data.IProviderConnection[] {
|
||||||
// Return connections without exposing tokens
|
|
||||||
return this.connections.map((c) => ({ ...c, token: '***' }));
|
return this.connections.map((c) => ({ ...c, token: '***' }));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -60,7 +93,7 @@ export class ConnectionManager {
|
|||||||
status: 'disconnected',
|
status: 'disconnected',
|
||||||
};
|
};
|
||||||
this.connections.push(connection);
|
this.connections.push(connection);
|
||||||
await this.saveConnections();
|
await this.persistConnection(connection);
|
||||||
logger.success(`Connection created: ${name} (${providerType})`);
|
logger.success(`Connection created: ${name} (${providerType})`);
|
||||||
return { ...connection, token: '***' };
|
return { ...connection, token: '***' };
|
||||||
}
|
}
|
||||||
@@ -74,7 +107,7 @@ export class ConnectionManager {
|
|||||||
if (updates.name) conn.name = updates.name;
|
if (updates.name) conn.name = updates.name;
|
||||||
if (updates.baseUrl) conn.baseUrl = updates.baseUrl.replace(/\/+$/, '');
|
if (updates.baseUrl) conn.baseUrl = updates.baseUrl.replace(/\/+$/, '');
|
||||||
if (updates.token) conn.token = updates.token;
|
if (updates.token) conn.token = updates.token;
|
||||||
await this.saveConnections();
|
await this.persistConnection(conn);
|
||||||
return { ...conn, token: '***' };
|
return { ...conn, token: '***' };
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -82,7 +115,7 @@ export class ConnectionManager {
|
|||||||
const idx = this.connections.findIndex((c) => c.id === id);
|
const idx = this.connections.findIndex((c) => c.id === id);
|
||||||
if (idx === -1) throw new Error(`Connection not found: ${id}`);
|
if (idx === -1) throw new Error(`Connection not found: ${id}`);
|
||||||
this.connections.splice(idx, 1);
|
this.connections.splice(idx, 1);
|
||||||
await this.saveConnections();
|
await this.removeConnection(id);
|
||||||
logger.info(`Connection deleted: ${id}`);
|
logger.info(`Connection deleted: ${id}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -91,7 +124,7 @@ export class ConnectionManager {
|
|||||||
const result = await provider.testConnection();
|
const result = await provider.testConnection();
|
||||||
const conn = this.connections.find((c) => c.id === id)!;
|
const conn = this.connections.find((c) => c.id === id)!;
|
||||||
conn.status = result.ok ? 'connected' : 'error';
|
conn.status = result.ok ? 'connected' : 'error';
|
||||||
await this.saveConnections();
|
await this.persistConnection(conn);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,25 +1,50 @@
|
|||||||
import { logger } from '../logging.ts';
|
import { logger } from '../logging.ts';
|
||||||
import { ConnectionManager } from './connectionmanager.ts';
|
import { ConnectionManager } from './connectionmanager.ts';
|
||||||
import { OpsServer } from '../opsserver/index.ts';
|
import { OpsServer } from '../opsserver/index.ts';
|
||||||
|
import { StorageManager } from '../storage/index.ts';
|
||||||
|
import { CacheDb, CacheCleaner, CachedProject } from '../cache/index.ts';
|
||||||
|
import { resolvePaths } from '../paths.ts';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Main GitOps application orchestrator
|
* Main GitOps application orchestrator
|
||||||
*/
|
*/
|
||||||
export class GitopsApp {
|
export class GitopsApp {
|
||||||
|
public storageManager: StorageManager;
|
||||||
public connectionManager: ConnectionManager;
|
public connectionManager: ConnectionManager;
|
||||||
public opsServer: OpsServer;
|
public opsServer: OpsServer;
|
||||||
|
public cacheDb: CacheDb;
|
||||||
|
public cacheCleaner: CacheCleaner;
|
||||||
|
|
||||||
constructor() {
|
constructor() {
|
||||||
this.connectionManager = new ConnectionManager();
|
const paths = resolvePaths();
|
||||||
|
this.storageManager = new StorageManager({
|
||||||
|
backend: 'filesystem',
|
||||||
|
fsPath: paths.defaultStoragePath,
|
||||||
|
});
|
||||||
|
this.connectionManager = new ConnectionManager(this.storageManager);
|
||||||
|
|
||||||
|
this.cacheDb = CacheDb.getInstance({
|
||||||
|
storagePath: paths.defaultTsmDbPath,
|
||||||
|
dbName: 'gitops_cache',
|
||||||
|
});
|
||||||
|
this.cacheCleaner = new CacheCleaner(this.cacheDb);
|
||||||
|
this.cacheCleaner.registerClass(CachedProject);
|
||||||
|
|
||||||
this.opsServer = new OpsServer(this);
|
this.opsServer = new OpsServer(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
async start(port = 3000): Promise<void> {
|
async start(port = 3000): Promise<void> {
|
||||||
logger.info('Initializing GitOps...');
|
logger.info('Initializing GitOps...');
|
||||||
|
|
||||||
|
// Start CacheDb
|
||||||
|
await this.cacheDb.start();
|
||||||
|
|
||||||
// Initialize connection manager (loads saved connections)
|
// Initialize connection manager (loads saved connections)
|
||||||
await this.connectionManager.init();
|
await this.connectionManager.init();
|
||||||
|
|
||||||
|
// Start CacheCleaner
|
||||||
|
this.cacheCleaner.start();
|
||||||
|
|
||||||
// Start OpsServer
|
// Start OpsServer
|
||||||
await this.opsServer.start(port);
|
await this.opsServer.start(port);
|
||||||
|
|
||||||
@@ -29,6 +54,8 @@ export class GitopsApp {
|
|||||||
async stop(): Promise<void> {
|
async stop(): Promise<void> {
|
||||||
logger.info('Shutting down GitOps...');
|
logger.info('Shutting down GitOps...');
|
||||||
await this.opsServer.stop();
|
await this.opsServer.stop();
|
||||||
|
this.cacheCleaner.stop();
|
||||||
|
await this.cacheDb.stop();
|
||||||
logger.success('GitOps shutdown complete');
|
logger.success('GitOps shutdown complete');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
19
ts/paths.ts
Normal file
19
ts/paths.ts
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
import * as path from '@std/path';
|
||||||
|
|
||||||
|
export interface IGitopsPaths {
|
||||||
|
gitopsHomeDir: string;
|
||||||
|
defaultStoragePath: string;
|
||||||
|
defaultTsmDbPath: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve gitops paths. Accepts optional baseDir for test isolation.
|
||||||
|
*/
|
||||||
|
export function resolvePaths(baseDir?: string): IGitopsPaths {
|
||||||
|
const home = baseDir ?? path.join(Deno.env.get('HOME') ?? '/tmp', '.serve.zone', 'gitops');
|
||||||
|
return {
|
||||||
|
gitopsHomeDir: home,
|
||||||
|
defaultStoragePath: path.join(home, 'storage'),
|
||||||
|
defaultTsmDbPath: path.join(home, 'tsmdb'),
|
||||||
|
};
|
||||||
|
}
|
||||||
@@ -23,3 +23,8 @@ export { smartguard, smartjwt };
|
|||||||
import * as giteaClient from '@apiclient.xyz/gitea';
|
import * as giteaClient from '@apiclient.xyz/gitea';
|
||||||
import * as gitlabClient from '@apiclient.xyz/gitlab';
|
import * as gitlabClient from '@apiclient.xyz/gitlab';
|
||||||
export { giteaClient, gitlabClient };
|
export { giteaClient, gitlabClient };
|
||||||
|
|
||||||
|
// Database
|
||||||
|
import * as smartmongo from '@push.rocks/smartmongo';
|
||||||
|
import * as smartdata from '@push.rocks/smartdata';
|
||||||
|
export { smartmongo, smartdata };
|
||||||
|
|||||||
139
ts/storage/classes.storagemanager.ts
Normal file
139
ts/storage/classes.storagemanager.ts
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
import * as path from '@std/path';
|
||||||
|
|
||||||
|
export type TStorageBackend = 'filesystem' | 'memory';
|
||||||
|
|
||||||
|
export interface IStorageConfig {
|
||||||
|
backend?: TStorageBackend;
|
||||||
|
fsPath?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Key-value storage abstraction with filesystem and memory backends.
|
||||||
|
* Keys must start with '/' and are normalized (no '..', no double slashes).
|
||||||
|
*/
|
||||||
|
export class StorageManager {
|
||||||
|
private backend: TStorageBackend;
|
||||||
|
private fsPath: string;
|
||||||
|
private memoryStore: Map<string, string>;
|
||||||
|
|
||||||
|
constructor(config: IStorageConfig = {}) {
|
||||||
|
this.backend = config.backend ?? 'filesystem';
|
||||||
|
this.fsPath = config.fsPath ?? './storage';
|
||||||
|
this.memoryStore = new Map();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize and validate a storage key.
|
||||||
|
*/
|
||||||
|
private normalizeKey(key: string): string {
|
||||||
|
if (!key.startsWith('/')) {
|
||||||
|
throw new Error(`Storage key must start with '/': ${key}`);
|
||||||
|
}
|
||||||
|
// Strip '..' segments and normalize double slashes
|
||||||
|
const segments = key.split('/').filter((s) => s !== '' && s !== '..');
|
||||||
|
return '/' + segments.join('/');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Resolve a key to a filesystem path.
|
||||||
|
*/
|
||||||
|
private keyToPath(key: string): string {
|
||||||
|
const normalized = this.normalizeKey(key);
|
||||||
|
return path.join(this.fsPath, ...normalized.split('/').filter(Boolean));
|
||||||
|
}
|
||||||
|
|
||||||
|
async get(key: string): Promise<string | null> {
|
||||||
|
const normalized = this.normalizeKey(key);
|
||||||
|
if (this.backend === 'memory') {
|
||||||
|
return this.memoryStore.get(normalized) ?? null;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return await Deno.readTextFile(this.keyToPath(normalized));
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof Deno.errors.NotFound) return null;
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async set(key: string, value: string): Promise<void> {
|
||||||
|
const normalized = this.normalizeKey(key);
|
||||||
|
if (this.backend === 'memory') {
|
||||||
|
this.memoryStore.set(normalized, value);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const filePath = this.keyToPath(normalized);
|
||||||
|
const dir = path.dirname(filePath);
|
||||||
|
await Deno.mkdir(dir, { recursive: true });
|
||||||
|
// Atomic write: write to temp then rename
|
||||||
|
const tmpPath = filePath + '.tmp';
|
||||||
|
await Deno.writeTextFile(tmpPath, value);
|
||||||
|
await Deno.rename(tmpPath, filePath);
|
||||||
|
}
|
||||||
|
|
||||||
|
async delete(key: string): Promise<boolean> {
|
||||||
|
const normalized = this.normalizeKey(key);
|
||||||
|
if (this.backend === 'memory') {
|
||||||
|
return this.memoryStore.delete(normalized);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
await Deno.remove(this.keyToPath(normalized));
|
||||||
|
return true;
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof Deno.errors.NotFound) return false;
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async exists(key: string): Promise<boolean> {
|
||||||
|
const normalized = this.normalizeKey(key);
|
||||||
|
if (this.backend === 'memory') {
|
||||||
|
return this.memoryStore.has(normalized);
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
await Deno.stat(this.keyToPath(normalized));
|
||||||
|
return true;
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof Deno.errors.NotFound) return false;
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List keys under a given prefix.
|
||||||
|
*/
|
||||||
|
async list(prefix: string): Promise<string[]> {
|
||||||
|
const normalized = this.normalizeKey(prefix);
|
||||||
|
if (this.backend === 'memory') {
|
||||||
|
const keys: string[] = [];
|
||||||
|
for (const key of this.memoryStore.keys()) {
|
||||||
|
if (key.startsWith(normalized)) {
|
||||||
|
keys.push(key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return keys.sort();
|
||||||
|
}
|
||||||
|
const dirPath = this.keyToPath(normalized);
|
||||||
|
const keys: string[] = [];
|
||||||
|
try {
|
||||||
|
for await (const entry of Deno.readDir(dirPath)) {
|
||||||
|
if (entry.isFile) {
|
||||||
|
keys.push(normalized.replace(/\/$/, '') + '/' + entry.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
if (err instanceof Deno.errors.NotFound) return [];
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
return keys.sort();
|
||||||
|
}
|
||||||
|
|
||||||
|
async getJSON<T>(key: string): Promise<T | null> {
|
||||||
|
const raw = await this.get(key);
|
||||||
|
if (raw === null) return null;
|
||||||
|
return JSON.parse(raw) as T;
|
||||||
|
}
|
||||||
|
|
||||||
|
async setJSON(key: string, value: unknown): Promise<void> {
|
||||||
|
await this.set(key, JSON.stringify(value, null, 2));
|
||||||
|
}
|
||||||
|
}
|
||||||
2
ts/storage/index.ts
Normal file
2
ts/storage/index.ts
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
export { StorageManager } from './classes.storagemanager.ts';
|
||||||
|
export type { IStorageConfig, TStorageBackend } from './classes.storagemanager.ts';
|
||||||
@@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@serve.zone/gitops',
|
name: '@serve.zone/gitops',
|
||||||
version: '2.2.1',
|
version: '2.3.0',
|
||||||
description: 'GitOps management app for Gitea and GitLab - manage secrets, browse projects, view CI pipelines, and stream build logs'
|
description: 'GitOps management app for Gitea and GitLab - manage secrets, browse projects, view CI pipelines, and stream build logs'
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user