Compare commits

...

4 Commits

Author SHA1 Message Date
d46fd1590e 1.6.0
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-11-02 23:07:59 +00:00
1d7317f063 feat(context): Introduce smart context system: analyzer, lazy loader, cache and README/docs improvements 2025-11-02 23:07:59 +00:00
fe5121ec9c 1.5.2
Some checks failed
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-09-07 07:54:04 +00:00
c084b20390 fix(package): Bump dependencies, refine test script and imports, and overhaul README and docs 2025-09-07 07:54:04 +00:00
18 changed files with 4857 additions and 3463 deletions

View File

@@ -1,5 +1,26 @@
# Changelog
## 2025-11-02 - 1.6.0 - feat(context)
Introduce smart context system: analyzer, lazy loader, cache and README/docs improvements
- Add ContextAnalyzer for dependency-based file scoring and prioritization (PageRank-like centrality, relevance, efficiency, recency)
- Add LazyFileLoader to scan metadata and load files in parallel with lightweight token estimates
- Add ContextCache for persistent file content/token caching with TTL and max-size eviction
- Enhance ContextTrimmer with tier-based trimming and configurable light/aggressive levels
- Integrate new components into EnhancedContext and TaskContextFactory to build task-aware, token-optimized contexts
- Extend ConfigManager and types to support cache, analyzer, prioritization weights and tier configs (npmextra.json driven)
- Add comprehensive unit tests for ContextAnalyzer, ContextCache and LazyFileLoader
- Update README with Smart Context Building docs, examples, configuration options and CI workflow snippet
## 2025-09-07 - 1.5.2 - fix(package)
Bump dependencies, refine test script and imports, and overhaul README and docs
- Bumped multiple dependencies and devDependencies (including @git.zone/tspublish, @git.zone/tsbuild, @git.zone/tstest, @push.rocks/npmextra, @push.rocks/qenv, @push.rocks/smartfile, @push.rocks/smartlog, @push.rocks/smartshell, gpt-tokenizer, typedoc, etc.).
- Updated test script to run tstest with verbose, logfile and increased timeout; adjusted testCli script invocation.
- Fixed test import in test/test.aidoc.nonci.ts to use @git.zone/tstest tapbundle.
- Large README rewrite: reorganized and expanded content, added quick start, CLI commands, examples, configuration, troubleshooting and usage sections.
- Minor clarification added to commit prompt in ts/aidocs_classes/commit.ts (text cleanup and guidance).
## 2025-08-16 - 1.5.1 - fix(aidoc)
Bump dependencies, add pnpm workspace config, and add AiDoc.stop()

View File

@@ -1,6 +1,6 @@
{
"name": "@git.zone/tsdoc",
"version": "1.5.1",
"version": "1.6.0",
"private": false,
"description": "A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.",
"type": "module",
@@ -13,36 +13,35 @@
"tsdoc": "cli.js"
},
"scripts": {
"test": "(tstest test/) && npm run testCli",
"test": "(tstest test/ --verbose --logfile --timeout 600) && npm run testCli",
"testCli": "(node ./cli.ts.js) && (node ./cli.ts.js aidocs)",
"build": "(tsbuild --web --allowimplicitany)",
"buildDocs": "tsdoc"
},
"devDependencies": {
"@git.zone/tsbuild": "^2.6.4",
"@git.zone/tsbuild": "^2.6.8",
"@git.zone/tsrun": "^1.2.46",
"@git.zone/tstest": "^2.3.2",
"@push.rocks/tapbundle": "^6.0.3",
"@git.zone/tstest": "^2.3.6",
"@types/node": "^22.15.17"
},
"dependencies": {
"@git.zone/tspublish": "^1.10.1",
"@git.zone/tspublish": "^1.10.3",
"@push.rocks/early": "^4.0.3",
"@push.rocks/npmextra": "^5.3.1",
"@push.rocks/qenv": "^6.1.2",
"@push.rocks/npmextra": "^5.3.3",
"@push.rocks/qenv": "^6.1.3",
"@push.rocks/smartai": "^0.5.11",
"@push.rocks/smartcli": "^4.0.11",
"@push.rocks/smartdelay": "^3.0.5",
"@push.rocks/smartfile": "^11.2.5",
"@push.rocks/smartfile": "^11.2.7",
"@push.rocks/smartgit": "^3.2.1",
"@push.rocks/smartinteract": "^2.0.15",
"@push.rocks/smartlog": "^3.1.8",
"@push.rocks/smartlog": "^3.1.9",
"@push.rocks/smartlog-destination-local": "^9.0.2",
"@push.rocks/smartpath": "^6.0.0",
"@push.rocks/smartshell": "^3.2.4",
"@push.rocks/smartshell": "^3.3.0",
"@push.rocks/smarttime": "^4.0.6",
"gpt-tokenizer": "^2.9.0",
"typedoc": "^0.28.10",
"gpt-tokenizer": "^3.0.1",
"typedoc": "^0.28.12",
"typescript": "^5.9.2"
},
"files": [

4344
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

1192
readme.md

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
import { tap, expect } from '@push.rocks/tapbundle';
import { tap, expect } from '@git.zone/tstest/tapbundle';
import * as qenv from '@push.rocks/qenv';
let testQenv = new qenv.Qenv('./', '.nogit/');

View File

@@ -0,0 +1,464 @@
import { tap, expect } from '@git.zone/tstest/tapbundle';
import * as path from 'path';
import { ContextAnalyzer } from '../ts/context/context-analyzer.js';
import type { IFileMetadata } from '../ts/context/types.js';
const testProjectRoot = process.cwd();
tap.test('ContextAnalyzer should create instance with default weights', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
expect(analyzer).toBeInstanceOf(ContextAnalyzer);
});
tap.test('ContextAnalyzer should create instance with custom weights', async () => {
const analyzer = new ContextAnalyzer(
testProjectRoot,
{
dependencyWeight: 0.5,
relevanceWeight: 0.3,
efficiencyWeight: 0.1,
recencyWeight: 0.1
}
);
expect(analyzer).toBeInstanceOf(ContextAnalyzer);
});
tap.test('ContextAnalyzer.analyze should return analysis result with files', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 5000,
mtime: Date.now(),
estimatedTokens: 1250
},
{
path: path.join(testProjectRoot, 'ts/context/enhanced-context.ts'),
relativePath: 'ts/context/enhanced-context.ts',
size: 10000,
mtime: Date.now(),
estimatedTokens: 2500
}
];
const result = await analyzer.analyze(metadata, 'readme');
expect(result.taskType).toEqual('readme');
expect(result.files.length).toEqual(2);
expect(result.totalFiles).toEqual(2);
expect(result.analysisDuration).toBeGreaterThan(0);
expect(result.dependencyGraph).toBeDefined();
});
tap.test('ContextAnalyzer.analyze should assign importance scores to files', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
expect(result.files[0].importanceScore).toBeGreaterThanOrEqual(0);
expect(result.files[0].importanceScore).toBeLessThanOrEqual(1);
});
tap.test('ContextAnalyzer.analyze should sort files by importance score', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
},
{
path: path.join(testProjectRoot, 'test/test.basic.node.ts'),
relativePath: 'test/test.basic.node.ts',
size: 2000,
mtime: Date.now(),
estimatedTokens: 500
}
];
const result = await analyzer.analyze(metadata, 'readme');
// Files should be sorted by importance (highest first)
for (let i = 0; i < result.files.length - 1; i++) {
expect(result.files[i].importanceScore).toBeGreaterThanOrEqual(
result.files[i + 1].importanceScore
);
}
});
tap.test('ContextAnalyzer.analyze should assign tiers based on scores', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/index.ts'),
relativePath: 'ts/index.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
const file = result.files[0];
expect(['essential', 'important', 'optional', 'excluded']).toContain(file.tier);
});
tap.test('ContextAnalyzer should prioritize index.ts files for README task', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/index.ts'),
relativePath: 'ts/index.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
},
{
path: path.join(testProjectRoot, 'ts/some-helper.ts'),
relativePath: 'ts/some-helper.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
// index.ts should have higher relevance score
const indexFile = result.files.find(f => f.path.includes('index.ts'));
const helperFile = result.files.find(f => f.path.includes('some-helper.ts'));
if (indexFile && helperFile) {
expect(indexFile.relevanceScore).toBeGreaterThan(helperFile.relevanceScore);
}
});
tap.test('ContextAnalyzer should deprioritize test files for README task', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
},
{
path: path.join(testProjectRoot, 'test/test.basic.node.ts'),
relativePath: 'test/test.basic.node.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
// Source file should have higher relevance than test file
const sourceFile = result.files.find(f => f.path.includes('ts/context/types.ts'));
const testFile = result.files.find(f => f.path.includes('test/test.basic.node.ts'));
if (sourceFile && testFile) {
expect(sourceFile.relevanceScore).toBeGreaterThan(testFile.relevanceScore);
}
});
tap.test('ContextAnalyzer should prioritize changed files for commit task', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const changedFile = path.join(testProjectRoot, 'ts/context/types.ts');
const unchangedFile = path.join(testProjectRoot, 'ts/index.ts');
const metadata: IFileMetadata[] = [
{
path: changedFile,
relativePath: 'ts/context/types.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
},
{
path: unchangedFile,
relativePath: 'ts/index.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'commit', [changedFile]);
const changed = result.files.find(f => f.path === changedFile);
const unchanged = result.files.find(f => f.path === unchangedFile);
if (changed && unchanged) {
// Changed file should have recency score of 1.0
expect(changed.recencyScore).toEqual(1.0);
// Unchanged file should have recency score of 0
expect(unchanged.recencyScore).toEqual(0);
}
});
tap.test('ContextAnalyzer should calculate efficiency scores', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 5000, // Optimal size
mtime: Date.now(),
estimatedTokens: 1250
},
{
path: path.join(testProjectRoot, 'ts/very-large-file.ts'),
relativePath: 'ts/very-large-file.ts',
size: 50000, // Too large
mtime: Date.now(),
estimatedTokens: 12500
}
];
const result = await analyzer.analyze(metadata, 'readme');
// Optimal size file should have better efficiency score
const optimalFile = result.files.find(f => f.path.includes('types.ts'));
const largeFile = result.files.find(f => f.path.includes('very-large-file.ts'));
if (optimalFile && largeFile) {
expect(optimalFile.efficiencyScore).toBeGreaterThan(largeFile.efficiencyScore);
}
});
tap.test('ContextAnalyzer should build dependency graph', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/enhanced-context.ts'),
relativePath: 'ts/context/enhanced-context.ts',
size: 10000,
mtime: Date.now(),
estimatedTokens: 2500
},
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 5000,
mtime: Date.now(),
estimatedTokens: 1250
}
];
const result = await analyzer.analyze(metadata, 'readme');
expect(result.dependencyGraph.size).toBeGreaterThan(0);
// Check that each file has dependency info
for (const meta of metadata) {
const deps = result.dependencyGraph.get(meta.path);
expect(deps).toBeDefined();
expect(deps!.path).toEqual(meta.path);
expect(deps!.imports).toBeDefined();
expect(deps!.importedBy).toBeDefined();
expect(deps!.centrality).toBeGreaterThanOrEqual(0);
}
});
tap.test('ContextAnalyzer should calculate centrality scores', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 5000,
mtime: Date.now(),
estimatedTokens: 1250
},
{
path: path.join(testProjectRoot, 'ts/context/enhanced-context.ts'),
relativePath: 'ts/context/enhanced-context.ts',
size: 10000,
mtime: Date.now(),
estimatedTokens: 2500
}
];
const result = await analyzer.analyze(metadata, 'readme');
// All centrality scores should be between 0 and 1
for (const [, deps] of result.dependencyGraph) {
expect(deps.centrality).toBeGreaterThanOrEqual(0);
expect(deps.centrality).toBeLessThanOrEqual(1);
}
});
tap.test('ContextAnalyzer should assign higher centrality to highly imported files', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
// types.ts is likely imported by many files
const typesPath = path.join(testProjectRoot, 'ts/context/types.ts');
// A test file is likely imported by fewer files
const testPath = path.join(testProjectRoot, 'test/test.basic.node.ts');
const metadata: IFileMetadata[] = [
{
path: typesPath,
relativePath: 'ts/context/types.ts',
size: 5000,
mtime: Date.now(),
estimatedTokens: 1250
},
{
path: testPath,
relativePath: 'test/test.basic.node.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
const typesDeps = result.dependencyGraph.get(typesPath);
const testDeps = result.dependencyGraph.get(testPath);
if (typesDeps && testDeps) {
// types.ts should generally have higher centrality due to being imported more
expect(typesDeps.centrality).toBeGreaterThanOrEqual(0);
expect(testDeps.centrality).toBeGreaterThanOrEqual(0);
}
});
tap.test('ContextAnalyzer should provide reason for scoring', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/index.ts'),
relativePath: 'ts/index.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
expect(result.files[0].reason).toBeDefined();
expect(result.files[0].reason!.length).toBeGreaterThan(0);
});
tap.test('ContextAnalyzer should handle empty metadata array', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const result = await analyzer.analyze([], 'readme');
expect(result.files.length).toEqual(0);
expect(result.totalFiles).toEqual(0);
expect(result.dependencyGraph.size).toEqual(0);
});
tap.test('ContextAnalyzer should respect custom tier configuration', async () => {
const analyzer = new ContextAnalyzer(
testProjectRoot,
{},
{
essential: { minScore: 0.9, trimLevel: 'none' },
important: { minScore: 0.7, trimLevel: 'light' },
optional: { minScore: 0.5, trimLevel: 'aggressive' }
}
);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
// Should use custom tier thresholds
const file = result.files[0];
expect(['essential', 'important', 'optional', 'excluded']).toContain(file.tier);
});
tap.test('ContextAnalyzer should calculate combined importance score from all factors', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot, {
dependencyWeight: 0.25,
relevanceWeight: 0.25,
efficiencyWeight: 0.25,
recencyWeight: 0.25
});
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 5000,
mtime: Date.now(),
estimatedTokens: 1250
}
];
const result = await analyzer.analyze(metadata, 'readme');
const file = result.files[0];
// Importance score should be weighted sum of all factors
// With equal weights (0.25 each), importance should be average of all scores
const expectedImportance =
(file.relevanceScore * 0.25) +
(file.centralityScore * 0.25) +
(file.efficiencyScore * 0.25) +
(file.recencyScore * 0.25);
expect(file.importanceScore).toBeCloseTo(expectedImportance, 2);
});
tap.test('ContextAnalyzer should complete analysis within reasonable time', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = Array.from({ length: 10 }, (_, i) => ({
path: path.join(testProjectRoot, `ts/file${i}.ts`),
relativePath: `ts/file${i}.ts`,
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}));
const startTime = Date.now();
const result = await analyzer.analyze(metadata, 'readme');
const endTime = Date.now();
const duration = endTime - startTime;
expect(result.analysisDuration).toBeGreaterThan(0);
expect(duration).toBeLessThan(10000); // Should complete within 10 seconds
});
export default tap.start();

View File

@@ -0,0 +1,456 @@
import { tap, expect } from '@git.zone/tstest/tapbundle';
import * as path from 'path';
import * as fs from 'fs';
import { ContextCache } from '../ts/context/context-cache.js';
import type { ICacheEntry } from '../ts/context/types.js';
const testProjectRoot = process.cwd();
const testCacheDir = path.join(testProjectRoot, '.nogit', 'test-cache');
// Helper to clean up test cache directory
async function cleanupTestCache() {
try {
await fs.promises.rm(testCacheDir, { recursive: true, force: true });
} catch (error) {
// Ignore if directory doesn't exist
}
}
tap.test('ContextCache should create instance with default config', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
expect(cache).toBeInstanceOf(ContextCache);
await cleanupTestCache();
});
tap.test('ContextCache.init should create cache directory', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
// Check that cache directory was created
const exists = await fs.promises.access(testCacheDir).then(() => true).catch(() => false);
expect(exists).toBe(true);
await cleanupTestCache();
});
tap.test('ContextCache.set should store cache entry', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const testPath = path.join(testProjectRoot, 'package.json');
const entry: ICacheEntry = {
path: testPath,
contents: 'test content',
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now()
};
await cache.set(entry);
const retrieved = await cache.get(testPath);
expect(retrieved).toBeDefined();
expect(retrieved!.contents).toEqual('test content');
expect(retrieved!.tokenCount).toEqual(100);
await cleanupTestCache();
});
tap.test('ContextCache.get should return null for non-existent entry', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const retrieved = await cache.get('/non/existent/path.ts');
expect(retrieved).toBeNull();
await cleanupTestCache();
});
tap.test('ContextCache.get should invalidate expired entries', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true,
ttl: 1 // 1 second TTL
});
await cache.init();
const testPath = path.join(testProjectRoot, 'test-file.ts');
const entry: ICacheEntry = {
path: testPath,
contents: 'test content',
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now() - 2000 // Cached 2 seconds ago (expired)
};
await cache.set(entry);
// Wait a bit to ensure expiration logic runs
await new Promise(resolve => setTimeout(resolve, 100));
const retrieved = await cache.get(testPath);
expect(retrieved).toBeNull(); // Should be expired
await cleanupTestCache();
});
tap.test('ContextCache.get should invalidate entries when file mtime changes', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const testPath = path.join(testProjectRoot, 'package.json');
const stats = await fs.promises.stat(testPath);
const oldMtime = Math.floor(stats.mtimeMs);
const entry: ICacheEntry = {
path: testPath,
contents: 'test content',
tokenCount: 100,
mtime: oldMtime - 1000, // Old mtime (file has changed)
cachedAt: Date.now()
};
await cache.set(entry);
const retrieved = await cache.get(testPath);
expect(retrieved).toBeNull(); // Should be invalidated due to mtime mismatch
await cleanupTestCache();
});
tap.test('ContextCache.has should check if file is cached and valid', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const testPath = path.join(testProjectRoot, 'package.json');
const stats = await fs.promises.stat(testPath);
const entry: ICacheEntry = {
path: testPath,
contents: 'test content',
tokenCount: 100,
mtime: Math.floor(stats.mtimeMs),
cachedAt: Date.now()
};
await cache.set(entry);
const hasIt = await cache.has(testPath);
expect(hasIt).toBe(true);
const doesNotHaveIt = await cache.has('/non/existent/path.ts');
expect(doesNotHaveIt).toBe(false);
await cleanupTestCache();
});
tap.test('ContextCache.setMany should store multiple entries', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const entries: ICacheEntry[] = [
{
path: '/test/file1.ts',
contents: 'content 1',
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now()
},
{
path: '/test/file2.ts',
contents: 'content 2',
tokenCount: 200,
mtime: Date.now(),
cachedAt: Date.now()
}
];
await cache.setMany(entries);
const stats = cache.getStats();
expect(stats.entries).toBeGreaterThanOrEqual(2);
await cleanupTestCache();
});
tap.test('ContextCache.getStats should return cache statistics', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const entry: ICacheEntry = {
path: '/test/file.ts',
contents: 'test content with some length',
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now()
};
await cache.set(entry);
const stats = cache.getStats();
expect(stats.entries).toEqual(1);
expect(stats.totalSize).toBeGreaterThan(0);
expect(stats.oldestEntry).toBeDefined();
expect(stats.newestEntry).toBeDefined();
await cleanupTestCache();
});
tap.test('ContextCache.clear should clear all entries', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const entry: ICacheEntry = {
path: '/test/file.ts',
contents: 'test content',
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now()
};
await cache.set(entry);
expect(cache.getStats().entries).toEqual(1);
await cache.clear();
expect(cache.getStats().entries).toEqual(0);
await cleanupTestCache();
});
tap.test('ContextCache.clearPaths should clear specific entries', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const entries: ICacheEntry[] = [
{
path: '/test/file1.ts',
contents: 'content 1',
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now()
},
{
path: '/test/file2.ts',
contents: 'content 2',
tokenCount: 200,
mtime: Date.now(),
cachedAt: Date.now()
}
];
await cache.setMany(entries);
expect(cache.getStats().entries).toEqual(2);
await cache.clearPaths(['/test/file1.ts']);
expect(cache.getStats().entries).toEqual(1);
await cleanupTestCache();
});
tap.test('ContextCache should enforce max size by evicting oldest entries', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true,
maxSize: 0.001 // Very small: 0.001 MB = 1KB
});
await cache.init();
// Add entries that exceed the max size
const largeContent = 'x'.repeat(500); // 500 bytes
const entries: ICacheEntry[] = [
{
path: '/test/file1.ts',
contents: largeContent,
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now() - 3000 // Oldest
},
{
path: '/test/file2.ts',
contents: largeContent,
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now() - 2000
},
{
path: '/test/file3.ts',
contents: largeContent,
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now() - 1000 // Newest
}
];
await cache.setMany(entries);
const stats = cache.getStats();
// Should have evicted oldest entries to stay under size limit
expect(stats.totalSize).toBeLessThanOrEqual(1024); // 1KB
await cleanupTestCache();
});
tap.test('ContextCache should not cache when disabled', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: false
});
await cache.init();
const entry: ICacheEntry = {
path: '/test/file.ts',
contents: 'test content',
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now()
};
await cache.set(entry);
const retrieved = await cache.get('/test/file.ts');
expect(retrieved).toBeNull();
await cleanupTestCache();
});
tap.test('ContextCache should persist to disk and reload', async () => {
await cleanupTestCache();
// Create first cache instance and add entry
const cache1 = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache1.init();
const entry: ICacheEntry = {
path: '/test/persistent-file.ts',
contents: 'persistent content',
tokenCount: 150,
mtime: Date.now(),
cachedAt: Date.now()
};
await cache1.set(entry);
// Wait for persist
await new Promise(resolve => setTimeout(resolve, 500));
// Create second cache instance (should reload from disk)
const cache2 = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache2.init();
const stats = cache2.getStats();
expect(stats.entries).toBeGreaterThan(0);
await cleanupTestCache();
});
tap.test('ContextCache should handle invalid cache index gracefully', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
// Create cache dir manually
await fs.promises.mkdir(testCacheDir, { recursive: true });
// Write invalid JSON to cache index
const cacheIndexPath = path.join(testCacheDir, 'index.json');
await fs.promises.writeFile(cacheIndexPath, 'invalid json {', 'utf-8');
// Should not throw, should just start with empty cache
await cache.init();
const stats = cache.getStats();
expect(stats.entries).toEqual(0);
await cleanupTestCache();
});
tap.test('ContextCache should return proper stats for empty cache', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const stats = cache.getStats();
expect(stats.entries).toEqual(0);
expect(stats.totalSize).toEqual(0);
expect(stats.oldestEntry).toBeNull();
expect(stats.newestEntry).toBeNull();
await cleanupTestCache();
});
export default tap.start();

View File

@@ -0,0 +1,242 @@
import { tap, expect } from '@git.zone/tstest/tapbundle';
import * as path from 'path';
import { LazyFileLoader } from '../ts/context/lazy-file-loader.js';
import type { IFileMetadata } from '../ts/context/types.js';
const testProjectRoot = process.cwd();
tap.test('LazyFileLoader should create instance with project root', async () => {
const loader = new LazyFileLoader(testProjectRoot);
expect(loader).toBeInstanceOf(LazyFileLoader);
});
tap.test('LazyFileLoader.getMetadata should return file metadata without loading contents', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const packageJsonPath = path.join(testProjectRoot, 'package.json');
const metadata = await loader.getMetadata(packageJsonPath);
expect(metadata.path).toEqual(packageJsonPath);
expect(metadata.relativePath).toEqual('package.json');
expect(metadata.size).toBeGreaterThan(0);
expect(metadata.mtime).toBeGreaterThan(0);
expect(metadata.estimatedTokens).toBeGreaterThan(0);
// Rough estimate: size / 4
expect(metadata.estimatedTokens).toBeCloseTo(metadata.size / 4, 10);
});
tap.test('LazyFileLoader.getMetadata should cache metadata for same file', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const packageJsonPath = path.join(testProjectRoot, 'package.json');
const metadata1 = await loader.getMetadata(packageJsonPath);
const metadata2 = await loader.getMetadata(packageJsonPath);
// Should return identical metadata from cache
expect(metadata1.mtime).toEqual(metadata2.mtime);
expect(metadata1.size).toEqual(metadata2.size);
expect(metadata1.estimatedTokens).toEqual(metadata2.estimatedTokens);
});
tap.test('LazyFileLoader.scanFiles should scan TypeScript files', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const metadata = await loader.scanFiles(['ts/context/types.ts']);
expect(metadata.length).toBeGreaterThan(0);
const typesFile = metadata.find(m => m.relativePath.includes('types.ts'));
expect(typesFile).toBeDefined();
expect(typesFile!.size).toBeGreaterThan(0);
expect(typesFile!.estimatedTokens).toBeGreaterThan(0);
});
tap.test('LazyFileLoader.scanFiles should handle multiple globs', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const metadata = await loader.scanFiles([
'package.json',
'readme.md'
]);
expect(metadata.length).toBeGreaterThanOrEqual(2);
const hasPackageJson = metadata.some(m => m.relativePath === 'package.json');
const hasReadme = metadata.some(m => m.relativePath.toLowerCase() === 'readme.md');
expect(hasPackageJson).toBe(true);
expect(hasReadme).toBe(true);
});
tap.test('LazyFileLoader.loadFile should load file with actual token count', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const packageJsonPath = path.join(testProjectRoot, 'package.json');
const tokenizer = (content: string) => Math.ceil(content.length / 4);
const fileInfo = await loader.loadFile(packageJsonPath, tokenizer);
expect(fileInfo.path).toEqual(packageJsonPath);
expect(fileInfo.contents).toBeDefined();
expect(fileInfo.contents.length).toBeGreaterThan(0);
expect(fileInfo.tokenCount).toBeGreaterThan(0);
expect(fileInfo.relativePath).toEqual('package.json');
});
tap.test('LazyFileLoader.loadFiles should load multiple files in parallel', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'package.json'),
relativePath: 'package.json',
size: 100,
mtime: Date.now(),
estimatedTokens: 25
},
{
path: path.join(testProjectRoot, 'readme.md'),
relativePath: 'readme.md',
size: 200,
mtime: Date.now(),
estimatedTokens: 50
}
];
const tokenizer = (content: string) => Math.ceil(content.length / 4);
const startTime = Date.now();
const files = await loader.loadFiles(metadata, tokenizer);
const endTime = Date.now();
expect(files.length).toEqual(2);
expect(files[0].contents).toBeDefined();
expect(files[1].contents).toBeDefined();
// Should be fast (parallel loading)
expect(endTime - startTime).toBeLessThan(5000); // 5 seconds max
});
tap.test('LazyFileLoader.updateImportanceScores should update cached metadata', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const packageJsonPath = path.join(testProjectRoot, 'package.json');
// Get initial metadata
await loader.getMetadata(packageJsonPath);
// Update importance scores
const scores = new Map<string, number>();
scores.set(packageJsonPath, 0.95);
loader.updateImportanceScores(scores);
// Check cached metadata has updated score
const cached = loader.getCachedMetadata();
const packageJsonMeta = cached.find(m => m.path === packageJsonPath);
expect(packageJsonMeta).toBeDefined();
expect(packageJsonMeta!.importanceScore).toEqual(0.95);
});
tap.test('LazyFileLoader.getTotalEstimatedTokens should sum all cached metadata tokens', async () => {
const loader = new LazyFileLoader(testProjectRoot);
// Scan some files
await loader.scanFiles(['package.json', 'readme.md']);
const totalTokens = loader.getTotalEstimatedTokens();
expect(totalTokens).toBeGreaterThan(0);
});
tap.test('LazyFileLoader.clearCache should clear metadata cache', async () => {
const loader = new LazyFileLoader(testProjectRoot);
// Scan files to populate cache
await loader.scanFiles(['package.json']);
expect(loader.getCachedMetadata().length).toBeGreaterThan(0);
// Clear cache
loader.clearCache();
expect(loader.getCachedMetadata().length).toEqual(0);
});
tap.test('LazyFileLoader.getCachedMetadata should return all cached entries', async () => {
const loader = new LazyFileLoader(testProjectRoot);
// Scan files
await loader.scanFiles(['package.json', 'readme.md']);
const cached = loader.getCachedMetadata();
expect(cached.length).toBeGreaterThanOrEqual(2);
expect(cached.every(m => m.path && m.size && m.estimatedTokens)).toBe(true);
});
tap.test('LazyFileLoader should handle non-existent files gracefully', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const nonExistentPath = path.join(testProjectRoot, 'this-file-does-not-exist.ts');
try {
await loader.getMetadata(nonExistentPath);
expect(false).toBe(true); // Should not reach here
} catch (error) {
expect(error).toBeDefined();
}
});
tap.test('LazyFileLoader.loadFiles should filter out failed file loads', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'package.json'),
relativePath: 'package.json',
size: 100,
mtime: Date.now(),
estimatedTokens: 25
},
{
path: path.join(testProjectRoot, 'non-existent-file.txt'),
relativePath: 'non-existent-file.txt',
size: 100,
mtime: Date.now(),
estimatedTokens: 25
}
];
const tokenizer = (content: string) => Math.ceil(content.length / 4);
const files = await loader.loadFiles(metadata, tokenizer);
// Should only include the successfully loaded file
expect(files.length).toEqual(1);
expect(files[0].relativePath).toEqual('package.json');
});
tap.test('LazyFileLoader should handle glob patterns for TypeScript source files', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const metadata = await loader.scanFiles(['ts/context/*.ts']);
expect(metadata.length).toBeGreaterThan(0);
// Should find multiple context files
const hasEnhancedContext = metadata.some(m => m.relativePath.includes('enhanced-context.ts'));
const hasTypes = metadata.some(m => m.relativePath.includes('types.ts'));
expect(hasEnhancedContext).toBe(true);
expect(hasTypes).toBe(true);
});
tap.test('LazyFileLoader should estimate tokens reasonably accurately', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const packageJsonPath = path.join(testProjectRoot, 'package.json');
const metadata = await loader.getMetadata(packageJsonPath);
const tokenizer = (content: string) => Math.ceil(content.length / 4);
const fileInfo = await loader.loadFile(packageJsonPath, tokenizer);
// Estimated tokens should be close to actual (within reasonable range)
const difference = Math.abs(metadata.estimatedTokens - fileInfo.tokenCount);
const percentDiff = (difference / fileInfo.tokenCount) * 100;
// Should be within 20% accuracy (since it's just an estimate)
expect(percentDiff).toBeLessThan(20);
});
export default tap.start();

View File

@@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@git.zone/tsdoc',
version: '1.5.1',
version: '1.6.0',
description: 'A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.'
}

View File

@@ -77,8 +77,8 @@ interface {
For the recommendedNextVersionDetails, please only add a detail entries to the array if it has an obvious value to the reader.
You are being given the files of the project. You should use them to create the commit message.
Also you are given a diff
Also you are given a diff.
Never mention CLAUDE code, or codex.
`,
messageHistory: [],
userMessage: contextString,

View File

@@ -1,5 +1,16 @@
import * as plugins from '../plugins.js';
import type { IContextConfig, ITrimConfig, ITaskConfig, TaskType, ContextMode } from './types.js';
import * as fs from 'fs';
import type {
IContextConfig,
ITrimConfig,
ITaskConfig,
TaskType,
ContextMode,
ICacheConfig,
IAnalyzerConfig,
IPrioritizationWeights,
ITierConfig
} from './types.js';
/**
* Manages configuration for context building
@@ -8,6 +19,7 @@ export class ConfigManager {
private static instance: ConfigManager;
private config: IContextConfig;
private projectDir: string = '';
private configCache: { mtime: number; config: IContextConfig } | null = null;
/**
* Get the singleton instance of ConfigManager
@@ -65,6 +77,28 @@ export class ConfigManager {
maxFunctionLines: 5,
removeComments: true,
removeBlankLines: true
},
cache: {
enabled: true,
ttl: 3600, // 1 hour
maxSize: 100, // 100MB
directory: undefined // Will be set to .nogit/context-cache by ContextCache
},
analyzer: {
enabled: true,
useAIRefinement: false, // Disabled by default for now
aiModel: 'haiku'
},
prioritization: {
dependencyWeight: 0.3,
relevanceWeight: 0.4,
efficiencyWeight: 0.2,
recencyWeight: 0.1
},
tiers: {
essential: { minScore: 0.8, trimLevel: 'none' },
important: { minScore: 0.5, trimLevel: 'light' },
optional: { minScore: 0.2, trimLevel: 'aggressive' }
}
};
}
@@ -77,21 +111,40 @@ export class ConfigManager {
if (!this.projectDir) {
return;
}
// Create KeyValueStore for this project
// We'll just use smartfile directly instead of KeyValueStore
const npmextraJsonPath = plugins.path.join(this.projectDir, 'npmextra.json');
// Check if file exists
const fileExists = await plugins.smartfile.fs.fileExists(npmextraJsonPath);
if (!fileExists) {
return;
}
// Check cache
const stats = await fs.promises.stat(npmextraJsonPath);
const currentMtime = Math.floor(stats.mtimeMs);
if (this.configCache && this.configCache.mtime === currentMtime) {
// Use cached config
this.config = this.configCache.config;
return;
}
// Read the npmextra.json file
const npmextraJsonFile = await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(this.projectDir, 'npmextra.json')
);
const npmextraJsonFile = await plugins.smartfile.SmartFile.fromFilePath(npmextraJsonPath);
const npmextraContent = JSON.parse(npmextraJsonFile.contents.toString());
// Check for tsdoc context configuration
if (npmextraContent?.tsdoc?.context) {
// Merge with default config
this.config = this.mergeConfigs(this.config, npmextraContent.tsdoc.context);
}
// Cache the config
this.configCache = {
mtime: currentMtime,
config: { ...this.config }
};
} catch (error) {
console.error('Error loading context configuration:', error);
}
@@ -131,7 +184,39 @@ export class ConfigManager {
...userConfig.trimming
};
}
// Merge cache configuration
if (userConfig.cache) {
result.cache = {
...result.cache,
...userConfig.cache
};
}
// Merge analyzer configuration
if (userConfig.analyzer) {
result.analyzer = {
...result.analyzer,
...userConfig.analyzer
};
}
// Merge prioritization weights
if (userConfig.prioritization) {
result.prioritization = {
...result.prioritization,
...userConfig.prioritization
};
}
// Merge tier configuration
if (userConfig.tiers) {
result.tiers = {
...result.tiers,
...userConfig.tiers
};
}
return result;
}
@@ -179,26 +264,29 @@ export class ConfigManager {
public async updateConfig(config: Partial<IContextConfig>): Promise<void> {
// Merge with existing config
this.config = this.mergeConfigs(this.config, config);
// Invalidate cache
this.configCache = null;
try {
if (!this.projectDir) {
return;
}
// Read the existing npmextra.json file
const npmextraJsonPath = plugins.path.join(this.projectDir, 'npmextra.json');
let npmextraContent = {};
if (await plugins.smartfile.fs.fileExists(npmextraJsonPath)) {
const npmextraJsonFile = await plugins.smartfile.SmartFile.fromFilePath(npmextraJsonPath);
npmextraContent = JSON.parse(npmextraJsonFile.contents.toString()) || {};
}
// Update the tsdoc context configuration
const typedContent = npmextraContent as any;
if (!typedContent.tsdoc) typedContent.tsdoc = {};
typedContent.tsdoc.context = this.config;
// Write back to npmextra.json
const updatedContent = JSON.stringify(npmextraContent, null, 2);
await plugins.smartfile.memory.toFs(updatedContent, npmextraJsonPath);
@@ -206,4 +294,48 @@ export class ConfigManager {
console.error('Error updating context configuration:', error);
}
}
/**
* Get cache configuration
*/
public getCacheConfig(): ICacheConfig {
return this.config.cache || { enabled: true, ttl: 3600, maxSize: 100 };
}
/**
* Get analyzer configuration
*/
public getAnalyzerConfig(): IAnalyzerConfig {
return this.config.analyzer || { enabled: true, useAIRefinement: false, aiModel: 'haiku' };
}
/**
* Get prioritization weights
*/
public getPrioritizationWeights(): IPrioritizationWeights {
return this.config.prioritization || {
dependencyWeight: 0.3,
relevanceWeight: 0.4,
efficiencyWeight: 0.2,
recencyWeight: 0.1
};
}
/**
* Get tier configuration
*/
public getTierConfig(): ITierConfig {
return this.config.tiers || {
essential: { minScore: 0.8, trimLevel: 'none' },
important: { minScore: 0.5, trimLevel: 'light' },
optional: { minScore: 0.2, trimLevel: 'aggressive' }
};
}
/**
* Clear the config cache (force reload on next access)
*/
public clearCache(): void {
this.configCache = null;
}
}

View File

@@ -0,0 +1,391 @@
import * as plugins from '../plugins.js';
import type {
IFileMetadata,
IFileDependencies,
IFileAnalysis,
IAnalysisResult,
TaskType,
IPrioritizationWeights,
ITierConfig,
} from './types.js';
/**
* ContextAnalyzer provides intelligent file selection and prioritization
* based on dependency analysis, task relevance, and configurable weights
*/
export class ContextAnalyzer {
private projectRoot: string;
private weights: Required<IPrioritizationWeights>;
private tiers: Required<ITierConfig>;
/**
* Creates a new ContextAnalyzer
* @param projectRoot - Root directory of the project
* @param weights - Prioritization weights
* @param tiers - Tier configuration
*/
constructor(
projectRoot: string,
weights: Partial<IPrioritizationWeights> = {},
tiers: Partial<ITierConfig> = {}
) {
this.projectRoot = projectRoot;
// Default weights
this.weights = {
dependencyWeight: weights.dependencyWeight ?? 0.3,
relevanceWeight: weights.relevanceWeight ?? 0.4,
efficiencyWeight: weights.efficiencyWeight ?? 0.2,
recencyWeight: weights.recencyWeight ?? 0.1,
};
// Default tiers
this.tiers = {
essential: tiers.essential ?? { minScore: 0.8, trimLevel: 'none' },
important: tiers.important ?? { minScore: 0.5, trimLevel: 'light' },
optional: tiers.optional ?? { minScore: 0.2, trimLevel: 'aggressive' },
};
}
/**
* Analyzes files for a specific task type
* @param metadata - Array of file metadata to analyze
* @param taskType - Type of task being performed
* @param changedFiles - Optional list of recently changed files (for commits)
* @returns Analysis result with scored files
*/
public async analyze(
metadata: IFileMetadata[],
taskType: TaskType,
changedFiles: string[] = []
): Promise<IAnalysisResult> {
const startTime = Date.now();
// Build dependency graph
const dependencyGraph = await this.buildDependencyGraph(metadata);
// Calculate centrality scores
this.calculateCentrality(dependencyGraph);
// Analyze each file
const files: IFileAnalysis[] = [];
for (const meta of metadata) {
const analysis = await this.analyzeFile(
meta,
taskType,
dependencyGraph,
changedFiles
);
files.push(analysis);
}
// Sort by importance score (highest first)
files.sort((a, b) => b.importanceScore - a.importanceScore);
const analysisDuration = Date.now() - startTime;
return {
taskType,
files,
dependencyGraph,
totalFiles: metadata.length,
analysisDuration,
};
}
/**
* Builds a dependency graph from file metadata
* @param metadata - Array of file metadata
* @returns Dependency graph as a map
*/
private async buildDependencyGraph(
metadata: IFileMetadata[]
): Promise<Map<string, IFileDependencies>> {
const graph = new Map<string, IFileDependencies>();
// Initialize graph entries
for (const meta of metadata) {
graph.set(meta.path, {
path: meta.path,
imports: [],
importedBy: [],
centrality: 0,
});
}
// Parse imports from each file
for (const meta of metadata) {
try {
const contents = await plugins.smartfile.fs.toStringSync(meta.path);
const imports = this.extractImports(contents, meta.path);
const deps = graph.get(meta.path)!;
deps.imports = imports;
// Update importedBy for imported files
for (const importPath of imports) {
const importedDeps = graph.get(importPath);
if (importedDeps) {
importedDeps.importedBy.push(meta.path);
}
}
} catch (error) {
console.warn(`Failed to parse imports from ${meta.path}:`, error.message);
}
}
return graph;
}
/**
* Extracts import statements from file contents
* @param contents - File contents
* @param filePath - Path of the file being analyzed
* @returns Array of absolute paths to imported files
*/
private extractImports(contents: string, filePath: string): string[] {
const imports: string[] = [];
const fileDir = plugins.path.dirname(filePath);
// Match various import patterns
const importRegex = /(?:import|export).*?from\s+['"](.+?)['"]/g;
let match;
while ((match = importRegex.exec(contents)) !== null) {
const importPath = match[1];
// Skip external modules
if (!importPath.startsWith('.')) {
continue;
}
// Resolve relative import to absolute path
let resolvedPath = plugins.path.resolve(fileDir, importPath);
// Handle various file extensions
const extensions = ['.ts', '.js', '.tsx', '.jsx', '/index.ts', '/index.js'];
let found = false;
for (const ext of extensions) {
const testPath = resolvedPath.endsWith(ext) ? resolvedPath : resolvedPath + ext;
try {
// Use synchronous file check to avoid async in this context
const fs = require('fs');
const exists = fs.existsSync(testPath);
if (exists) {
imports.push(testPath);
found = true;
break;
}
} catch (error) {
// Continue trying other extensions
}
}
if (!found && !resolvedPath.includes('.')) {
// Try with .ts extension as default
imports.push(resolvedPath + '.ts');
}
}
return imports;
}
/**
* Calculates centrality scores for all nodes in the dependency graph
* Uses a simplified PageRank-like algorithm
* @param graph - Dependency graph
*/
private calculateCentrality(graph: Map<string, IFileDependencies>): void {
const damping = 0.85;
const iterations = 10;
const nodeCount = graph.size;
// Initialize scores
const scores = new Map<string, number>();
for (const path of graph.keys()) {
scores.set(path, 1.0 / nodeCount);
}
// Iterative calculation
for (let i = 0; i < iterations; i++) {
const newScores = new Map<string, number>();
for (const [path, deps] of graph.entries()) {
let score = (1 - damping) / nodeCount;
// Add contributions from nodes that import this file
for (const importerPath of deps.importedBy) {
const importerDeps = graph.get(importerPath);
if (importerDeps) {
const importerScore = scores.get(importerPath) ?? 0;
const outgoingCount = importerDeps.imports.length || 1;
score += damping * (importerScore / outgoingCount);
}
}
newScores.set(path, score);
}
// Update scores
for (const [path, score] of newScores) {
scores.set(path, score);
}
}
// Normalize scores to 0-1 range
const maxScore = Math.max(...scores.values());
if (maxScore > 0) {
for (const deps of graph.values()) {
const score = scores.get(deps.path) ?? 0;
deps.centrality = score / maxScore;
}
}
}
/**
* Analyzes a single file
* @param meta - File metadata
* @param taskType - Task being performed
* @param graph - Dependency graph
* @param changedFiles - Recently changed files
* @returns File analysis
*/
private async analyzeFile(
meta: IFileMetadata,
taskType: TaskType,
graph: Map<string, IFileDependencies>,
changedFiles: string[]
): Promise<IFileAnalysis> {
const deps = graph.get(meta.path);
const centralityScore = deps?.centrality ?? 0;
// Calculate task-specific relevance
const relevanceScore = this.calculateRelevance(meta, taskType);
// Calculate efficiency (information per token)
const efficiencyScore = this.calculateEfficiency(meta);
// Calculate recency (for commit tasks)
const recencyScore = this.calculateRecency(meta, changedFiles);
// Calculate combined importance score
const importanceScore =
relevanceScore * this.weights.relevanceWeight +
centralityScore * this.weights.dependencyWeight +
efficiencyScore * this.weights.efficiencyWeight +
recencyScore * this.weights.recencyWeight;
// Assign tier
const tier = this.assignTier(importanceScore);
return {
path: meta.path,
relevanceScore,
centralityScore,
efficiencyScore,
recencyScore,
importanceScore,
tier,
reason: this.generateReason(meta, taskType, importanceScore, tier),
};
}
/**
* Calculates task-specific relevance score
*/
private calculateRelevance(meta: IFileMetadata, taskType: TaskType): number {
const relativePath = meta.relativePath.toLowerCase();
let score = 0.5; // Base score
// README generation - prioritize public APIs and main exports
if (taskType === 'readme') {
if (relativePath.includes('index.ts')) score += 0.3;
if (relativePath.match(/^ts\/[^\/]+\.ts$/)) score += 0.2; // Root level exports
if (relativePath.includes('test/')) score -= 0.3;
if (relativePath.includes('classes/')) score += 0.1;
if (relativePath.includes('interfaces/')) score += 0.1;
}
// Commit messages - prioritize changed files and their dependencies
if (taskType === 'commit') {
if (relativePath.includes('test/')) score -= 0.2;
// Recency will handle changed files
}
// Description generation - prioritize main exports and core interfaces
if (taskType === 'description') {
if (relativePath.includes('index.ts')) score += 0.4;
if (relativePath.match(/^ts\/[^\/]+\.ts$/)) score += 0.3;
if (relativePath.includes('test/')) score -= 0.4;
if (relativePath.includes('interfaces/')) score += 0.2;
}
return Math.max(0, Math.min(1, score));
}
/**
* Calculates efficiency score (information density)
*/
private calculateEfficiency(meta: IFileMetadata): number {
// Prefer files that are not too large (good signal-to-noise ratio)
const optimalSize = 5000; // ~1250 tokens
const distance = Math.abs(meta.estimatedTokens - optimalSize);
const normalized = Math.max(0, 1 - distance / optimalSize);
return normalized;
}
/**
* Calculates recency score for changed files
*/
private calculateRecency(meta: IFileMetadata, changedFiles: string[]): number {
if (changedFiles.length === 0) {
return 0;
}
// Check if this file was changed
const isChanged = changedFiles.some((changed) => changed === meta.path);
return isChanged ? 1.0 : 0.0;
}
/**
* Assigns a tier based on importance score
*/
private assignTier(score: number): 'essential' | 'important' | 'optional' | 'excluded' {
if (score >= this.tiers.essential.minScore) return 'essential';
if (score >= this.tiers.important.minScore) return 'important';
if (score >= this.tiers.optional.minScore) return 'optional';
return 'excluded';
}
/**
* Generates a human-readable reason for the score
*/
private generateReason(
meta: IFileMetadata,
taskType: TaskType,
score: number,
tier: string
): string {
const reasons: string[] = [];
if (meta.relativePath.includes('index.ts')) {
reasons.push('main export file');
}
if (meta.relativePath.includes('test/')) {
reasons.push('test file (lower priority)');
}
if (taskType === 'readme' && meta.relativePath.match(/^ts\/[^\/]+\.ts$/)) {
reasons.push('root-level module');
}
reasons.push(`score: ${score.toFixed(2)}`);
reasons.push(`tier: ${tier}`);
return reasons.join(', ');
}
}

285
ts/context/context-cache.ts Normal file
View File

@@ -0,0 +1,285 @@
import * as plugins from '../plugins.js';
import * as fs from 'fs';
import type { ICacheEntry, ICacheConfig } from './types.js';
/**
* ContextCache provides persistent caching of file contents and token counts
* with automatic invalidation on file changes
*/
export class ContextCache {
private cacheDir: string;
private cache: Map<string, ICacheEntry> = new Map();
private config: Required<ICacheConfig>;
private cacheIndexPath: string;
/**
* Creates a new ContextCache
* @param projectRoot - Root directory of the project
* @param config - Cache configuration
*/
constructor(projectRoot: string, config: Partial<ICacheConfig> = {}) {
this.config = {
enabled: config.enabled ?? true,
ttl: config.ttl ?? 3600, // 1 hour default
maxSize: config.maxSize ?? 100, // 100MB default
directory: config.directory ?? plugins.path.join(projectRoot, '.nogit', 'context-cache'),
};
this.cacheDir = this.config.directory;
this.cacheIndexPath = plugins.path.join(this.cacheDir, 'index.json');
}
/**
* Initializes the cache by loading from disk
*/
public async init(): Promise<void> {
if (!this.config.enabled) {
return;
}
// Ensure cache directory exists
await plugins.smartfile.fs.ensureDir(this.cacheDir);
// Load cache index if it exists
try {
const indexExists = await plugins.smartfile.fs.fileExists(this.cacheIndexPath);
if (indexExists) {
const indexContent = await plugins.smartfile.fs.toStringSync(this.cacheIndexPath);
const indexData = JSON.parse(indexContent) as ICacheEntry[];
if (Array.isArray(indexData)) {
for (const entry of indexData) {
this.cache.set(entry.path, entry);
}
}
}
} catch (error) {
console.warn('Failed to load cache index:', error.message);
// Start with empty cache if loading fails
}
// Clean up expired and invalid entries
await this.cleanup();
}
/**
* Gets a cached entry if it's still valid
* @param filePath - Absolute path to the file
* @returns Cache entry if valid, null otherwise
*/
public async get(filePath: string): Promise<ICacheEntry | null> {
if (!this.config.enabled) {
return null;
}
const entry = this.cache.get(filePath);
if (!entry) {
return null;
}
// Check if entry is expired
const now = Date.now();
if (now - entry.cachedAt > this.config.ttl * 1000) {
this.cache.delete(filePath);
return null;
}
// Check if file has been modified
try {
const stats = await fs.promises.stat(filePath);
const currentMtime = Math.floor(stats.mtimeMs);
if (currentMtime !== entry.mtime) {
// File has changed, invalidate cache
this.cache.delete(filePath);
return null;
}
return entry;
} catch (error) {
// File doesn't exist anymore
this.cache.delete(filePath);
return null;
}
}
/**
* Stores a cache entry
* @param entry - Cache entry to store
*/
public async set(entry: ICacheEntry): Promise<void> {
if (!this.config.enabled) {
return;
}
this.cache.set(entry.path, entry);
// Check cache size and evict old entries if needed
await this.enforceMaxSize();
// Persist to disk (async, don't await)
this.persist().catch((error) => {
console.warn('Failed to persist cache:', error.message);
});
}
/**
* Stores multiple cache entries
* @param entries - Array of cache entries
*/
public async setMany(entries: ICacheEntry[]): Promise<void> {
if (!this.config.enabled) {
return;
}
for (const entry of entries) {
this.cache.set(entry.path, entry);
}
await this.enforceMaxSize();
await this.persist();
}
/**
* Checks if a file is cached and valid
* @param filePath - Absolute path to the file
* @returns True if cached and valid
*/
public async has(filePath: string): Promise<boolean> {
const entry = await this.get(filePath);
return entry !== null;
}
/**
* Gets cache statistics
*/
public getStats(): {
entries: number;
totalSize: number;
oldestEntry: number | null;
newestEntry: number | null;
} {
let totalSize = 0;
let oldestEntry: number | null = null;
let newestEntry: number | null = null;
for (const entry of this.cache.values()) {
totalSize += entry.contents.length;
if (oldestEntry === null || entry.cachedAt < oldestEntry) {
oldestEntry = entry.cachedAt;
}
if (newestEntry === null || entry.cachedAt > newestEntry) {
newestEntry = entry.cachedAt;
}
}
return {
entries: this.cache.size,
totalSize,
oldestEntry,
newestEntry,
};
}
/**
* Clears all cache entries
*/
public async clear(): Promise<void> {
this.cache.clear();
await this.persist();
}
/**
* Clears specific cache entries
* @param filePaths - Array of file paths to clear
*/
public async clearPaths(filePaths: string[]): Promise<void> {
for (const path of filePaths) {
this.cache.delete(path);
}
await this.persist();
}
/**
* Cleans up expired and invalid cache entries
*/
private async cleanup(): Promise<void> {
const now = Date.now();
const toDelete: string[] = [];
for (const [path, entry] of this.cache.entries()) {
// Check expiration
if (now - entry.cachedAt > this.config.ttl * 1000) {
toDelete.push(path);
continue;
}
// Check if file still exists and hasn't changed
try {
const stats = await fs.promises.stat(path);
const currentMtime = Math.floor(stats.mtimeMs);
if (currentMtime !== entry.mtime) {
toDelete.push(path);
}
} catch (error) {
// File doesn't exist
toDelete.push(path);
}
}
for (const path of toDelete) {
this.cache.delete(path);
}
if (toDelete.length > 0) {
await this.persist();
}
}
/**
* Enforces maximum cache size by evicting oldest entries
*/
private async enforceMaxSize(): Promise<void> {
const stats = this.getStats();
const maxSizeBytes = this.config.maxSize * 1024 * 1024; // Convert MB to bytes
if (stats.totalSize <= maxSizeBytes) {
return;
}
// Sort entries by age (oldest first)
const entries = Array.from(this.cache.entries()).sort(
(a, b) => a[1].cachedAt - b[1].cachedAt
);
// Remove oldest entries until we're under the limit
let currentSize = stats.totalSize;
for (const [path, entry] of entries) {
if (currentSize <= maxSizeBytes) {
break;
}
currentSize -= entry.contents.length;
this.cache.delete(path);
}
}
/**
* Persists cache index to disk
*/
private async persist(): Promise<void> {
if (!this.config.enabled) {
return;
}
try {
const entries = Array.from(this.cache.values());
const content = JSON.stringify(entries, null, 2);
await plugins.smartfile.memory.toFs(content, this.cacheIndexPath);
} catch (error) {
console.warn('Failed to persist cache index:', error.message);
}
}
}

View File

@@ -243,4 +243,68 @@ export class ContextTrimmer {
...config
};
}
/**
* Trim a file based on its importance tier
* @param filePath The path to the file
* @param content The file's contents
* @param level The trimming level to apply ('none', 'light', 'aggressive')
* @returns The trimmed file contents
*/
public trimFileWithLevel(
filePath: string,
content: string,
level: 'none' | 'light' | 'aggressive'
): string {
// No trimming for essential files
if (level === 'none') {
return content;
}
// Create a temporary config based on level
const originalConfig = { ...this.config };
try {
if (level === 'light') {
// Light trimming: preserve signatures, remove only complex implementations
this.config = {
...this.config,
removeImplementations: false,
preserveInterfaces: true,
preserveTypeDefs: true,
preserveJSDoc: true,
maxFunctionLines: 10,
removeComments: false,
removeBlankLines: true
};
} else if (level === 'aggressive') {
// Aggressive trimming: remove all implementations, keep only signatures
this.config = {
...this.config,
removeImplementations: true,
preserveInterfaces: true,
preserveTypeDefs: true,
preserveJSDoc: true,
maxFunctionLines: 3,
removeComments: true,
removeBlankLines: true
};
}
// Process based on file type
let result = content;
if (filePath.endsWith('.ts') || filePath.endsWith('.tsx')) {
result = this.trimTypeScriptFile(content);
} else if (filePath.endsWith('.md')) {
result = this.trimMarkdownFile(content);
} else if (filePath.endsWith('.json')) {
result = this.trimJsonFile(content);
}
return result;
} finally {
// Restore original config
this.config = originalConfig;
}
}
}

View File

@@ -1,7 +1,10 @@
import * as plugins from '../plugins.js';
import type { ContextMode, IContextResult, IFileInfo, TaskType } from './types.js';
import type { ContextMode, IContextResult, IFileInfo, TaskType, IFileMetadata } from './types.js';
import { ContextTrimmer } from './context-trimmer.js';
import { ConfigManager } from './config-manager.js';
import { LazyFileLoader } from './lazy-file-loader.js';
import { ContextCache } from './context-cache.js';
import { ContextAnalyzer } from './context-analyzer.js';
/**
* Enhanced ProjectContext that supports context optimization strategies
@@ -10,6 +13,9 @@ export class EnhancedContext {
private projectDir: string;
private trimmer: ContextTrimmer;
private configManager: ConfigManager;
private lazyLoader: LazyFileLoader;
private cache: ContextCache;
private analyzer: ContextAnalyzer;
private contextMode: ContextMode = 'trimmed';
private tokenBudget: number = 190000; // Default for o4-mini
private contextResult: IContextResult = {
@@ -29,6 +35,13 @@ export class EnhancedContext {
this.projectDir = projectDirArg;
this.configManager = ConfigManager.getInstance();
this.trimmer = new ContextTrimmer(this.configManager.getTrimConfig());
this.lazyLoader = new LazyFileLoader(projectDirArg);
this.cache = new ContextCache(projectDirArg, this.configManager.getCacheConfig());
this.analyzer = new ContextAnalyzer(
projectDirArg,
this.configManager.getPrioritizationWeights(),
this.configManager.getTierConfig()
);
}
/**
@@ -38,6 +51,7 @@ export class EnhancedContext {
await this.configManager.initialize(this.projectDir);
this.tokenBudget = this.configManager.getMaxTokens();
this.trimmer.updateConfig(this.configManager.getTrimConfig());
await this.cache.init();
}
/**
@@ -138,13 +152,28 @@ export class EnhancedContext {
let totalTokenCount = 0;
let totalOriginalTokens = 0;
// Sort files by importance (for now just a simple alphabetical sort)
// Later this could be enhanced with more sophisticated prioritization
const sortedFiles = [...files].sort((a, b) => a.relative.localeCompare(b.relative));
// Convert SmartFile objects to IFileMetadata for analysis
const metadata: IFileMetadata[] = files.map(sf => ({
path: sf.path,
relativePath: sf.relative,
size: sf.contents.toString().length,
mtime: Date.now(), // SmartFile doesn't expose mtime, use current time
estimatedTokens: this.countTokens(sf.contents.toString()),
importanceScore: 0
}));
// Analyze files using ContextAnalyzer to get smart prioritization
// (Note: This requires task type which we'll pass from buildContext)
// For now, sort files by estimated tokens (smaller files first for better efficiency)
const sortedFiles = [...files].sort((a, b) => {
const aTokens = this.countTokens(a.contents.toString());
const bTokens = this.countTokens(b.contents.toString());
return aTokens - bTokens;
});
const processedFiles: string[] = [];
for (const smartfile of sortedFiles) {
// Calculate original token count
const originalContent = smartfile.contents.toString();
@@ -215,6 +244,154 @@ ${processedContent}
return context;
}
/**
* Convert files to context with smart analysis and prioritization
* @param metadata - File metadata to analyze
* @param taskType - Task type for context-aware prioritization
* @param mode - Context mode to use
* @returns Context string
*/
public async convertFilesToContextWithAnalysis(
metadata: IFileMetadata[],
taskType: TaskType,
mode: ContextMode = this.contextMode
): Promise<string> {
// Reset context result
this.contextResult = {
context: '',
tokenCount: 0,
includedFiles: [],
trimmedFiles: [],
excludedFiles: [],
tokenSavings: 0
};
// Analyze files for smart prioritization
const analysis = await this.analyzer.analyze(metadata, taskType, []);
// Sort files by importance score (highest first)
const sortedAnalysis = [...analysis.files].sort(
(a, b) => b.importanceScore - a.importanceScore
);
// Filter out excluded tier
const relevantFiles = sortedAnalysis.filter(f => f.tier !== 'excluded');
let totalTokenCount = 0;
let totalOriginalTokens = 0;
const processedFiles: string[] = [];
// Load files with cache support
for (const fileAnalysis of relevantFiles) {
try {
// Check cache first
let contents: string;
let originalTokenCount: number;
const cached = await this.cache.get(fileAnalysis.path);
if (cached) {
contents = cached.contents;
originalTokenCount = cached.tokenCount;
} else {
// Load file
const fileData = await plugins.smartfile.fs.toStringSync(fileAnalysis.path);
contents = fileData;
originalTokenCount = this.countTokens(contents);
// Cache it
await this.cache.set({
path: fileAnalysis.path,
contents,
tokenCount: originalTokenCount,
mtime: Date.now(),
cachedAt: Date.now()
});
}
totalOriginalTokens += originalTokenCount;
// Apply tier-based trimming
let processedContent = contents;
let trimLevel: 'none' | 'light' | 'aggressive' = 'light';
if (fileAnalysis.tier === 'essential') {
trimLevel = 'none';
} else if (fileAnalysis.tier === 'important') {
trimLevel = 'light';
} else if (fileAnalysis.tier === 'optional') {
trimLevel = 'aggressive';
}
// Apply trimming based on mode and tier
if (mode !== 'full' && trimLevel !== 'none') {
const relativePath = plugins.path.relative(this.projectDir, fileAnalysis.path);
processedContent = this.trimmer.trimFileWithLevel(
relativePath,
contents,
trimLevel
);
}
// Calculate token count
const processedTokenCount = this.countTokens(processedContent);
// Check token budget
if (totalTokenCount + processedTokenCount > this.tokenBudget) {
// We don't have budget for this file
const relativePath = plugins.path.relative(this.projectDir, fileAnalysis.path);
this.contextResult.excludedFiles.push({
path: fileAnalysis.path,
contents,
relativePath,
tokenCount: originalTokenCount,
importanceScore: fileAnalysis.importanceScore
});
continue;
}
// Format the file for context
const relativePath = plugins.path.relative(this.projectDir, fileAnalysis.path);
const formattedContent = `
====== START OF FILE ${relativePath} ======
${processedContent}
====== END OF FILE ${relativePath} ======
`;
processedFiles.push(formattedContent);
totalTokenCount += processedTokenCount;
// Track file in appropriate list
const fileInfo: IFileInfo = {
path: fileAnalysis.path,
contents: processedContent,
relativePath,
tokenCount: processedTokenCount,
importanceScore: fileAnalysis.importanceScore
};
if (trimLevel === 'none' || processedContent === contents) {
this.contextResult.includedFiles.push(fileInfo);
} else {
this.contextResult.trimmedFiles.push(fileInfo);
this.contextResult.tokenSavings += (originalTokenCount - processedTokenCount);
}
} catch (error) {
console.warn(`Failed to process file ${fileAnalysis.path}:`, error.message);
}
}
// Join all processed files
const context = processedFiles.join('\n');
// Update context result
this.contextResult.context = context;
this.contextResult.tokenCount = totalTokenCount;
return context;
}
/**
* Build context for the project
* @param taskType Optional task type for task-specific context
@@ -233,42 +410,71 @@ ${processedContent}
}
}
// Gather files
const taskConfig = taskType ? this.configManager.getTaskConfig(taskType) : undefined;
const files = await this.gatherFiles(
taskConfig?.includePaths,
taskConfig?.excludePaths
);
// Convert files to context
// Create an array of all files to process
const allFiles: plugins.smartfile.SmartFile[] = [];
// Add individual files
if (files.smartfilePackageJSON) allFiles.push(files.smartfilePackageJSON as plugins.smartfile.SmartFile);
if (files.smartfilesReadme) allFiles.push(files.smartfilesReadme as plugins.smartfile.SmartFile);
if (files.smartfilesReadmeHints) allFiles.push(files.smartfilesReadmeHints as plugins.smartfile.SmartFile);
if (files.smartfilesNpmextraJSON) allFiles.push(files.smartfilesNpmextraJSON as plugins.smartfile.SmartFile);
// Add arrays of files
if (files.smartfilesMod) {
if (Array.isArray(files.smartfilesMod)) {
allFiles.push(...files.smartfilesMod);
} else {
allFiles.push(files.smartfilesMod);
// Check if analyzer is enabled in config
const analyzerConfig = this.configManager.getAnalyzerConfig();
const useAnalyzer = analyzerConfig.enabled && taskType;
if (useAnalyzer) {
// Use new smart context building with lazy loading and analysis
const taskConfig = this.configManager.getTaskConfig(taskType!);
// Build globs for scanning
const includeGlobs = taskConfig?.includePaths?.map(p => `${p}/**/*.ts`) || [
'ts/**/*.ts',
'ts*/**/*.ts'
];
// Add config files
const configGlobs = [
'package.json',
'readme.md',
'readme.hints.md',
'npmextra.json'
];
// Scan files for metadata (fast, doesn't load contents)
const metadata = await this.lazyLoader.scanFiles([...configGlobs, ...includeGlobs]);
// Use analyzer to build context with smart prioritization
await this.convertFilesToContextWithAnalysis(metadata, taskType!, this.contextMode);
} else {
// Fall back to old method for backward compatibility
const taskConfig = taskType ? this.configManager.getTaskConfig(taskType) : undefined;
const files = await this.gatherFiles(
taskConfig?.includePaths,
taskConfig?.excludePaths
);
// Convert files to context
// Create an array of all files to process
const allFiles: plugins.smartfile.SmartFile[] = [];
// Add individual files
if (files.smartfilePackageJSON) allFiles.push(files.smartfilePackageJSON as plugins.smartfile.SmartFile);
if (files.smartfilesReadme) allFiles.push(files.smartfilesReadme as plugins.smartfile.SmartFile);
if (files.smartfilesReadmeHints) allFiles.push(files.smartfilesReadmeHints as plugins.smartfile.SmartFile);
if (files.smartfilesNpmextraJSON) allFiles.push(files.smartfilesNpmextraJSON as plugins.smartfile.SmartFile);
// Add arrays of files
if (files.smartfilesMod) {
if (Array.isArray(files.smartfilesMod)) {
allFiles.push(...files.smartfilesMod);
} else {
allFiles.push(files.smartfilesMod);
}
}
}
if (files.smartfilesTest) {
if (Array.isArray(files.smartfilesTest)) {
allFiles.push(...files.smartfilesTest);
} else {
allFiles.push(files.smartfilesTest);
if (files.smartfilesTest) {
if (Array.isArray(files.smartfilesTest)) {
allFiles.push(...files.smartfilesTest);
} else {
allFiles.push(files.smartfilesTest);
}
}
await this.convertFilesToContext(allFiles);
}
const context = await this.convertFilesToContext(allFiles);
return this.contextResult;
}

View File

@@ -2,14 +2,27 @@ import { EnhancedContext } from './enhanced-context.js';
import { TaskContextFactory } from './task-context-factory.js';
import { ConfigManager } from './config-manager.js';
import { ContextTrimmer } from './context-trimmer.js';
import type {
ContextMode,
IContextConfig,
import { LazyFileLoader } from './lazy-file-loader.js';
import { ContextCache } from './context-cache.js';
import { ContextAnalyzer } from './context-analyzer.js';
import type {
ContextMode,
IContextConfig,
IContextResult,
IFileInfo,
ITrimConfig,
ITaskConfig,
TaskType
TaskType,
ICacheConfig,
IAnalyzerConfig,
IPrioritizationWeights,
ITierConfig,
ITierSettings,
IFileMetadata,
ICacheEntry,
IFileDependencies,
IFileAnalysis,
IAnalysisResult
} from './types.js';
export {
@@ -18,6 +31,9 @@ export {
TaskContextFactory,
ConfigManager,
ContextTrimmer,
LazyFileLoader,
ContextCache,
ContextAnalyzer,
};
// Types
@@ -28,5 +44,15 @@ export type {
IFileInfo,
ITrimConfig,
ITaskConfig,
TaskType
TaskType,
ICacheConfig,
IAnalyzerConfig,
IPrioritizationWeights,
ITierConfig,
ITierSettings,
IFileMetadata,
ICacheEntry,
IFileDependencies,
IFileAnalysis,
IAnalysisResult
};

View File

@@ -0,0 +1,191 @@
import * as plugins from '../plugins.js';
import * as fs from 'fs';
import type { IFileMetadata, IFileInfo } from './types.js';
/**
* LazyFileLoader handles efficient file loading by:
* - Scanning files for metadata without loading contents
* - Providing fast file size and token estimates
* - Loading contents only when requested
* - Parallel loading of selected files
*/
export class LazyFileLoader {
private projectRoot: string;
private metadataCache: Map<string, IFileMetadata> = new Map();
/**
* Creates a new LazyFileLoader
* @param projectRoot - Root directory of the project
*/
constructor(projectRoot: string) {
this.projectRoot = projectRoot;
}
/**
* Scans files in given globs and creates metadata without loading contents
* @param globs - File patterns to scan (e.g., ['ts/**\/*.ts', 'test/**\/*.ts'])
* @returns Array of file metadata
*/
public async scanFiles(globs: string[]): Promise<IFileMetadata[]> {
const metadata: IFileMetadata[] = [];
for (const globPattern of globs) {
try {
const smartFiles = await plugins.smartfile.fs.fileTreeToObject(this.projectRoot, globPattern);
const fileArray = Array.isArray(smartFiles) ? smartFiles : [smartFiles];
for (const smartFile of fileArray) {
try {
const meta = await this.getMetadata(smartFile.path);
metadata.push(meta);
} catch (error) {
// Skip files that can't be read
console.warn(`Failed to get metadata for ${smartFile.path}:`, error.message);
}
}
} catch (error) {
// Skip patterns that don't match any files
console.warn(`No files found for pattern ${globPattern}`);
}
}
return metadata;
}
/**
* Gets metadata for a single file without loading contents
* @param filePath - Absolute path to the file
* @returns File metadata
*/
public async getMetadata(filePath: string): Promise<IFileMetadata> {
// Check cache first
if (this.metadataCache.has(filePath)) {
const cached = this.metadataCache.get(filePath)!;
const currentStats = await fs.promises.stat(filePath);
// Return cached if file hasn't changed
if (cached.mtime === Math.floor(currentStats.mtimeMs)) {
return cached;
}
}
// Get file stats
const stats = await fs.promises.stat(filePath);
const relativePath = plugins.path.relative(this.projectRoot, filePath);
// Estimate tokens: rough estimate of ~4 characters per token
// This is faster than reading and tokenizing the entire file
const estimatedTokens = Math.ceil(stats.size / 4);
const metadata: IFileMetadata = {
path: filePath,
relativePath,
size: stats.size,
mtime: Math.floor(stats.mtimeMs),
estimatedTokens,
};
// Cache the metadata
this.metadataCache.set(filePath, metadata);
return metadata;
}
/**
* Loads file contents for selected files in parallel
* @param metadata - Array of file metadata to load
* @param tokenizer - Function to calculate accurate token count
* @returns Array of complete file info with contents
*/
public async loadFiles(
metadata: IFileMetadata[],
tokenizer: (content: string) => number
): Promise<IFileInfo[]> {
// Load files in parallel
const loadPromises = metadata.map(async (meta) => {
try {
const contents = await plugins.smartfile.fs.toStringSync(meta.path);
const tokenCount = tokenizer(contents);
const fileInfo: IFileInfo = {
path: meta.path,
relativePath: meta.relativePath,
contents,
tokenCount,
importanceScore: meta.importanceScore,
};
return fileInfo;
} catch (error) {
console.warn(`Failed to load file ${meta.path}:`, error.message);
return null;
}
});
// Wait for all loads to complete and filter out failures
const results = await Promise.all(loadPromises);
return results.filter((r): r is IFileInfo => r !== null);
}
/**
* Loads a single file with contents
* @param filePath - Absolute path to the file
* @param tokenizer - Function to calculate accurate token count
* @returns Complete file info with contents
*/
public async loadFile(
filePath: string,
tokenizer: (content: string) => number
): Promise<IFileInfo> {
const meta = await this.getMetadata(filePath);
const contents = await plugins.smartfile.fs.toStringSync(filePath);
const tokenCount = tokenizer(contents);
const relativePath = plugins.path.relative(this.projectRoot, filePath);
return {
path: filePath,
relativePath,
contents,
tokenCount,
importanceScore: meta.importanceScore,
};
}
/**
* Updates importance scores for metadata entries
* @param scores - Map of file paths to importance scores
*/
public updateImportanceScores(scores: Map<string, number>): void {
for (const [path, score] of scores) {
const meta = this.metadataCache.get(path);
if (meta) {
meta.importanceScore = score;
}
}
}
/**
* Clears the metadata cache
*/
public clearCache(): void {
this.metadataCache.clear();
}
/**
* Gets total estimated tokens for all cached metadata
*/
public getTotalEstimatedTokens(): number {
let total = 0;
for (const meta of this.metadataCache.values()) {
total += meta.estimatedTokens;
}
return total;
}
/**
* Gets cached metadata entries
*/
public getCachedMetadata(): IFileMetadata[] {
return Array.from(this.metadataCache.values());
}
}

View File

@@ -58,6 +58,73 @@ export interface IContextConfig {
};
/** Trimming configuration */
trimming?: ITrimConfig;
/** Cache configuration */
cache?: ICacheConfig;
/** Analyzer configuration */
analyzer?: IAnalyzerConfig;
/** Prioritization weights */
prioritization?: IPrioritizationWeights;
/** Tier configuration for adaptive trimming */
tiers?: ITierConfig;
}
/**
* Cache configuration
*/
export interface ICacheConfig {
/** Whether caching is enabled */
enabled?: boolean;
/** Time-to-live in seconds */
ttl?: number;
/** Maximum cache size in MB */
maxSize?: number;
/** Cache directory path */
directory?: string;
}
/**
* Analyzer configuration
*/
export interface IAnalyzerConfig {
/** Whether analyzer is enabled */
enabled?: boolean;
/** Whether to use AI refinement for selection */
useAIRefinement?: boolean;
/** AI model to use for refinement */
aiModel?: string;
}
/**
* Weights for file prioritization
*/
export interface IPrioritizationWeights {
/** Weight for dependency centrality */
dependencyWeight?: number;
/** Weight for task relevance */
relevanceWeight?: number;
/** Weight for token efficiency */
efficiencyWeight?: number;
/** Weight for file recency */
recencyWeight?: number;
}
/**
* Tier configuration for adaptive trimming
*/
export interface ITierConfig {
essential?: ITierSettings;
important?: ITierSettings;
optional?: ITierSettings;
}
/**
* Settings for a single tier
*/
export interface ITierSettings {
/** Minimum score to qualify for this tier */
minScore: number;
/** Trimming level to apply */
trimLevel: 'none' | 'light' | 'aggressive';
}
/**
@@ -92,4 +159,90 @@ export interface IContextResult {
excludedFiles: IFileInfo[];
/** Token savings from trimming */
tokenSavings: number;
}
/**
* File metadata without contents (for lazy loading)
*/
export interface IFileMetadata {
/** The file path */
path: string;
/** The file's relative path from the project root */
relativePath: string;
/** File size in bytes */
size: number;
/** Last modified time (Unix timestamp) */
mtime: number;
/** Estimated token count (without loading full contents) */
estimatedTokens: number;
/** The file's importance score */
importanceScore?: number;
}
/**
* Cache entry for a file
*/
export interface ICacheEntry {
/** File path */
path: string;
/** File contents */
contents: string;
/** Token count */
tokenCount: number;
/** Last modified time when cached */
mtime: number;
/** When this cache entry was created */
cachedAt: number;
}
/**
* Dependency information for a file
*/
export interface IFileDependencies {
/** File path */
path: string;
/** Files this file imports */
imports: string[];
/** Files that import this file */
importedBy: string[];
/** Centrality score (0-1) - how central this file is in the dependency graph */
centrality: number;
}
/**
* Analysis result for a file
*/
export interface IFileAnalysis {
/** File path */
path: string;
/** Task relevance score (0-1) */
relevanceScore: number;
/** Dependency centrality score (0-1) */
centralityScore: number;
/** Token efficiency score (0-1) */
efficiencyScore: number;
/** Recency score (0-1) */
recencyScore: number;
/** Combined importance score (0-1) */
importanceScore: number;
/** Assigned tier */
tier: 'essential' | 'important' | 'optional' | 'excluded';
/** Reason for the score */
reason?: string;
}
/**
* Result of context analysis
*/
export interface IAnalysisResult {
/** Task type being analyzed */
taskType: TaskType;
/** Analyzed files with scores */
files: IFileAnalysis[];
/** Dependency graph */
dependencyGraph: Map<string, IFileDependencies>;
/** Total files analyzed */
totalFiles: number;
/** Analysis duration in ms */
analysisDuration: number;
}