feat(context): Introduce smart context system: analyzer, lazy loader, cache and README/docs improvements

This commit is contained in:
2025-11-02 23:07:59 +00:00
parent fe5121ec9c
commit 1d7317f063
14 changed files with 3031 additions and 114 deletions

View File

@@ -0,0 +1,464 @@
import { tap, expect } from '@git.zone/tstest/tapbundle';
import * as path from 'path';
import { ContextAnalyzer } from '../ts/context/context-analyzer.js';
import type { IFileMetadata } from '../ts/context/types.js';
const testProjectRoot = process.cwd();
tap.test('ContextAnalyzer should create instance with default weights', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
expect(analyzer).toBeInstanceOf(ContextAnalyzer);
});
tap.test('ContextAnalyzer should create instance with custom weights', async () => {
const analyzer = new ContextAnalyzer(
testProjectRoot,
{
dependencyWeight: 0.5,
relevanceWeight: 0.3,
efficiencyWeight: 0.1,
recencyWeight: 0.1
}
);
expect(analyzer).toBeInstanceOf(ContextAnalyzer);
});
tap.test('ContextAnalyzer.analyze should return analysis result with files', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 5000,
mtime: Date.now(),
estimatedTokens: 1250
},
{
path: path.join(testProjectRoot, 'ts/context/enhanced-context.ts'),
relativePath: 'ts/context/enhanced-context.ts',
size: 10000,
mtime: Date.now(),
estimatedTokens: 2500
}
];
const result = await analyzer.analyze(metadata, 'readme');
expect(result.taskType).toEqual('readme');
expect(result.files.length).toEqual(2);
expect(result.totalFiles).toEqual(2);
expect(result.analysisDuration).toBeGreaterThan(0);
expect(result.dependencyGraph).toBeDefined();
});
tap.test('ContextAnalyzer.analyze should assign importance scores to files', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
expect(result.files[0].importanceScore).toBeGreaterThanOrEqual(0);
expect(result.files[0].importanceScore).toBeLessThanOrEqual(1);
});
tap.test('ContextAnalyzer.analyze should sort files by importance score', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
},
{
path: path.join(testProjectRoot, 'test/test.basic.node.ts'),
relativePath: 'test/test.basic.node.ts',
size: 2000,
mtime: Date.now(),
estimatedTokens: 500
}
];
const result = await analyzer.analyze(metadata, 'readme');
// Files should be sorted by importance (highest first)
for (let i = 0; i < result.files.length - 1; i++) {
expect(result.files[i].importanceScore).toBeGreaterThanOrEqual(
result.files[i + 1].importanceScore
);
}
});
tap.test('ContextAnalyzer.analyze should assign tiers based on scores', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/index.ts'),
relativePath: 'ts/index.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
const file = result.files[0];
expect(['essential', 'important', 'optional', 'excluded']).toContain(file.tier);
});
tap.test('ContextAnalyzer should prioritize index.ts files for README task', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/index.ts'),
relativePath: 'ts/index.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
},
{
path: path.join(testProjectRoot, 'ts/some-helper.ts'),
relativePath: 'ts/some-helper.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
// index.ts should have higher relevance score
const indexFile = result.files.find(f => f.path.includes('index.ts'));
const helperFile = result.files.find(f => f.path.includes('some-helper.ts'));
if (indexFile && helperFile) {
expect(indexFile.relevanceScore).toBeGreaterThan(helperFile.relevanceScore);
}
});
tap.test('ContextAnalyzer should deprioritize test files for README task', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
},
{
path: path.join(testProjectRoot, 'test/test.basic.node.ts'),
relativePath: 'test/test.basic.node.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
// Source file should have higher relevance than test file
const sourceFile = result.files.find(f => f.path.includes('ts/context/types.ts'));
const testFile = result.files.find(f => f.path.includes('test/test.basic.node.ts'));
if (sourceFile && testFile) {
expect(sourceFile.relevanceScore).toBeGreaterThan(testFile.relevanceScore);
}
});
tap.test('ContextAnalyzer should prioritize changed files for commit task', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const changedFile = path.join(testProjectRoot, 'ts/context/types.ts');
const unchangedFile = path.join(testProjectRoot, 'ts/index.ts');
const metadata: IFileMetadata[] = [
{
path: changedFile,
relativePath: 'ts/context/types.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
},
{
path: unchangedFile,
relativePath: 'ts/index.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'commit', [changedFile]);
const changed = result.files.find(f => f.path === changedFile);
const unchanged = result.files.find(f => f.path === unchangedFile);
if (changed && unchanged) {
// Changed file should have recency score of 1.0
expect(changed.recencyScore).toEqual(1.0);
// Unchanged file should have recency score of 0
expect(unchanged.recencyScore).toEqual(0);
}
});
tap.test('ContextAnalyzer should calculate efficiency scores', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 5000, // Optimal size
mtime: Date.now(),
estimatedTokens: 1250
},
{
path: path.join(testProjectRoot, 'ts/very-large-file.ts'),
relativePath: 'ts/very-large-file.ts',
size: 50000, // Too large
mtime: Date.now(),
estimatedTokens: 12500
}
];
const result = await analyzer.analyze(metadata, 'readme');
// Optimal size file should have better efficiency score
const optimalFile = result.files.find(f => f.path.includes('types.ts'));
const largeFile = result.files.find(f => f.path.includes('very-large-file.ts'));
if (optimalFile && largeFile) {
expect(optimalFile.efficiencyScore).toBeGreaterThan(largeFile.efficiencyScore);
}
});
tap.test('ContextAnalyzer should build dependency graph', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/enhanced-context.ts'),
relativePath: 'ts/context/enhanced-context.ts',
size: 10000,
mtime: Date.now(),
estimatedTokens: 2500
},
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 5000,
mtime: Date.now(),
estimatedTokens: 1250
}
];
const result = await analyzer.analyze(metadata, 'readme');
expect(result.dependencyGraph.size).toBeGreaterThan(0);
// Check that each file has dependency info
for (const meta of metadata) {
const deps = result.dependencyGraph.get(meta.path);
expect(deps).toBeDefined();
expect(deps!.path).toEqual(meta.path);
expect(deps!.imports).toBeDefined();
expect(deps!.importedBy).toBeDefined();
expect(deps!.centrality).toBeGreaterThanOrEqual(0);
}
});
tap.test('ContextAnalyzer should calculate centrality scores', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 5000,
mtime: Date.now(),
estimatedTokens: 1250
},
{
path: path.join(testProjectRoot, 'ts/context/enhanced-context.ts'),
relativePath: 'ts/context/enhanced-context.ts',
size: 10000,
mtime: Date.now(),
estimatedTokens: 2500
}
];
const result = await analyzer.analyze(metadata, 'readme');
// All centrality scores should be between 0 and 1
for (const [, deps] of result.dependencyGraph) {
expect(deps.centrality).toBeGreaterThanOrEqual(0);
expect(deps.centrality).toBeLessThanOrEqual(1);
}
});
tap.test('ContextAnalyzer should assign higher centrality to highly imported files', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
// types.ts is likely imported by many files
const typesPath = path.join(testProjectRoot, 'ts/context/types.ts');
// A test file is likely imported by fewer files
const testPath = path.join(testProjectRoot, 'test/test.basic.node.ts');
const metadata: IFileMetadata[] = [
{
path: typesPath,
relativePath: 'ts/context/types.ts',
size: 5000,
mtime: Date.now(),
estimatedTokens: 1250
},
{
path: testPath,
relativePath: 'test/test.basic.node.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
const typesDeps = result.dependencyGraph.get(typesPath);
const testDeps = result.dependencyGraph.get(testPath);
if (typesDeps && testDeps) {
// types.ts should generally have higher centrality due to being imported more
expect(typesDeps.centrality).toBeGreaterThanOrEqual(0);
expect(testDeps.centrality).toBeGreaterThanOrEqual(0);
}
});
tap.test('ContextAnalyzer should provide reason for scoring', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/index.ts'),
relativePath: 'ts/index.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
expect(result.files[0].reason).toBeDefined();
expect(result.files[0].reason!.length).toBeGreaterThan(0);
});
tap.test('ContextAnalyzer should handle empty metadata array', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const result = await analyzer.analyze([], 'readme');
expect(result.files.length).toEqual(0);
expect(result.totalFiles).toEqual(0);
expect(result.dependencyGraph.size).toEqual(0);
});
tap.test('ContextAnalyzer should respect custom tier configuration', async () => {
const analyzer = new ContextAnalyzer(
testProjectRoot,
{},
{
essential: { minScore: 0.9, trimLevel: 'none' },
important: { minScore: 0.7, trimLevel: 'light' },
optional: { minScore: 0.5, trimLevel: 'aggressive' }
}
);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
// Should use custom tier thresholds
const file = result.files[0];
expect(['essential', 'important', 'optional', 'excluded']).toContain(file.tier);
});
tap.test('ContextAnalyzer should calculate combined importance score from all factors', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot, {
dependencyWeight: 0.25,
relevanceWeight: 0.25,
efficiencyWeight: 0.25,
recencyWeight: 0.25
});
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 5000,
mtime: Date.now(),
estimatedTokens: 1250
}
];
const result = await analyzer.analyze(metadata, 'readme');
const file = result.files[0];
// Importance score should be weighted sum of all factors
// With equal weights (0.25 each), importance should be average of all scores
const expectedImportance =
(file.relevanceScore * 0.25) +
(file.centralityScore * 0.25) +
(file.efficiencyScore * 0.25) +
(file.recencyScore * 0.25);
expect(file.importanceScore).toBeCloseTo(expectedImportance, 2);
});
tap.test('ContextAnalyzer should complete analysis within reasonable time', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = Array.from({ length: 10 }, (_, i) => ({
path: path.join(testProjectRoot, `ts/file${i}.ts`),
relativePath: `ts/file${i}.ts`,
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}));
const startTime = Date.now();
const result = await analyzer.analyze(metadata, 'readme');
const endTime = Date.now();
const duration = endTime - startTime;
expect(result.analysisDuration).toBeGreaterThan(0);
expect(duration).toBeLessThan(10000); // Should complete within 10 seconds
});
export default tap.start();

View File

@@ -0,0 +1,456 @@
import { tap, expect } from '@git.zone/tstest/tapbundle';
import * as path from 'path';
import * as fs from 'fs';
import { ContextCache } from '../ts/context/context-cache.js';
import type { ICacheEntry } from '../ts/context/types.js';
const testProjectRoot = process.cwd();
const testCacheDir = path.join(testProjectRoot, '.nogit', 'test-cache');
// Helper to clean up test cache directory
async function cleanupTestCache() {
try {
await fs.promises.rm(testCacheDir, { recursive: true, force: true });
} catch (error) {
// Ignore if directory doesn't exist
}
}
tap.test('ContextCache should create instance with default config', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
expect(cache).toBeInstanceOf(ContextCache);
await cleanupTestCache();
});
tap.test('ContextCache.init should create cache directory', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
// Check that cache directory was created
const exists = await fs.promises.access(testCacheDir).then(() => true).catch(() => false);
expect(exists).toBe(true);
await cleanupTestCache();
});
tap.test('ContextCache.set should store cache entry', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const testPath = path.join(testProjectRoot, 'package.json');
const entry: ICacheEntry = {
path: testPath,
contents: 'test content',
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now()
};
await cache.set(entry);
const retrieved = await cache.get(testPath);
expect(retrieved).toBeDefined();
expect(retrieved!.contents).toEqual('test content');
expect(retrieved!.tokenCount).toEqual(100);
await cleanupTestCache();
});
tap.test('ContextCache.get should return null for non-existent entry', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const retrieved = await cache.get('/non/existent/path.ts');
expect(retrieved).toBeNull();
await cleanupTestCache();
});
tap.test('ContextCache.get should invalidate expired entries', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true,
ttl: 1 // 1 second TTL
});
await cache.init();
const testPath = path.join(testProjectRoot, 'test-file.ts');
const entry: ICacheEntry = {
path: testPath,
contents: 'test content',
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now() - 2000 // Cached 2 seconds ago (expired)
};
await cache.set(entry);
// Wait a bit to ensure expiration logic runs
await new Promise(resolve => setTimeout(resolve, 100));
const retrieved = await cache.get(testPath);
expect(retrieved).toBeNull(); // Should be expired
await cleanupTestCache();
});
tap.test('ContextCache.get should invalidate entries when file mtime changes', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const testPath = path.join(testProjectRoot, 'package.json');
const stats = await fs.promises.stat(testPath);
const oldMtime = Math.floor(stats.mtimeMs);
const entry: ICacheEntry = {
path: testPath,
contents: 'test content',
tokenCount: 100,
mtime: oldMtime - 1000, // Old mtime (file has changed)
cachedAt: Date.now()
};
await cache.set(entry);
const retrieved = await cache.get(testPath);
expect(retrieved).toBeNull(); // Should be invalidated due to mtime mismatch
await cleanupTestCache();
});
tap.test('ContextCache.has should check if file is cached and valid', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const testPath = path.join(testProjectRoot, 'package.json');
const stats = await fs.promises.stat(testPath);
const entry: ICacheEntry = {
path: testPath,
contents: 'test content',
tokenCount: 100,
mtime: Math.floor(stats.mtimeMs),
cachedAt: Date.now()
};
await cache.set(entry);
const hasIt = await cache.has(testPath);
expect(hasIt).toBe(true);
const doesNotHaveIt = await cache.has('/non/existent/path.ts');
expect(doesNotHaveIt).toBe(false);
await cleanupTestCache();
});
tap.test('ContextCache.setMany should store multiple entries', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const entries: ICacheEntry[] = [
{
path: '/test/file1.ts',
contents: 'content 1',
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now()
},
{
path: '/test/file2.ts',
contents: 'content 2',
tokenCount: 200,
mtime: Date.now(),
cachedAt: Date.now()
}
];
await cache.setMany(entries);
const stats = cache.getStats();
expect(stats.entries).toBeGreaterThanOrEqual(2);
await cleanupTestCache();
});
tap.test('ContextCache.getStats should return cache statistics', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const entry: ICacheEntry = {
path: '/test/file.ts',
contents: 'test content with some length',
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now()
};
await cache.set(entry);
const stats = cache.getStats();
expect(stats.entries).toEqual(1);
expect(stats.totalSize).toBeGreaterThan(0);
expect(stats.oldestEntry).toBeDefined();
expect(stats.newestEntry).toBeDefined();
await cleanupTestCache();
});
tap.test('ContextCache.clear should clear all entries', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const entry: ICacheEntry = {
path: '/test/file.ts',
contents: 'test content',
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now()
};
await cache.set(entry);
expect(cache.getStats().entries).toEqual(1);
await cache.clear();
expect(cache.getStats().entries).toEqual(0);
await cleanupTestCache();
});
tap.test('ContextCache.clearPaths should clear specific entries', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const entries: ICacheEntry[] = [
{
path: '/test/file1.ts',
contents: 'content 1',
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now()
},
{
path: '/test/file2.ts',
contents: 'content 2',
tokenCount: 200,
mtime: Date.now(),
cachedAt: Date.now()
}
];
await cache.setMany(entries);
expect(cache.getStats().entries).toEqual(2);
await cache.clearPaths(['/test/file1.ts']);
expect(cache.getStats().entries).toEqual(1);
await cleanupTestCache();
});
tap.test('ContextCache should enforce max size by evicting oldest entries', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true,
maxSize: 0.001 // Very small: 0.001 MB = 1KB
});
await cache.init();
// Add entries that exceed the max size
const largeContent = 'x'.repeat(500); // 500 bytes
const entries: ICacheEntry[] = [
{
path: '/test/file1.ts',
contents: largeContent,
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now() - 3000 // Oldest
},
{
path: '/test/file2.ts',
contents: largeContent,
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now() - 2000
},
{
path: '/test/file3.ts',
contents: largeContent,
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now() - 1000 // Newest
}
];
await cache.setMany(entries);
const stats = cache.getStats();
// Should have evicted oldest entries to stay under size limit
expect(stats.totalSize).toBeLessThanOrEqual(1024); // 1KB
await cleanupTestCache();
});
tap.test('ContextCache should not cache when disabled', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: false
});
await cache.init();
const entry: ICacheEntry = {
path: '/test/file.ts',
contents: 'test content',
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now()
};
await cache.set(entry);
const retrieved = await cache.get('/test/file.ts');
expect(retrieved).toBeNull();
await cleanupTestCache();
});
tap.test('ContextCache should persist to disk and reload', async () => {
await cleanupTestCache();
// Create first cache instance and add entry
const cache1 = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache1.init();
const entry: ICacheEntry = {
path: '/test/persistent-file.ts',
contents: 'persistent content',
tokenCount: 150,
mtime: Date.now(),
cachedAt: Date.now()
};
await cache1.set(entry);
// Wait for persist
await new Promise(resolve => setTimeout(resolve, 500));
// Create second cache instance (should reload from disk)
const cache2 = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache2.init();
const stats = cache2.getStats();
expect(stats.entries).toBeGreaterThan(0);
await cleanupTestCache();
});
tap.test('ContextCache should handle invalid cache index gracefully', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
// Create cache dir manually
await fs.promises.mkdir(testCacheDir, { recursive: true });
// Write invalid JSON to cache index
const cacheIndexPath = path.join(testCacheDir, 'index.json');
await fs.promises.writeFile(cacheIndexPath, 'invalid json {', 'utf-8');
// Should not throw, should just start with empty cache
await cache.init();
const stats = cache.getStats();
expect(stats.entries).toEqual(0);
await cleanupTestCache();
});
tap.test('ContextCache should return proper stats for empty cache', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const stats = cache.getStats();
expect(stats.entries).toEqual(0);
expect(stats.totalSize).toEqual(0);
expect(stats.oldestEntry).toBeNull();
expect(stats.newestEntry).toBeNull();
await cleanupTestCache();
});
export default tap.start();

View File

@@ -0,0 +1,242 @@
import { tap, expect } from '@git.zone/tstest/tapbundle';
import * as path from 'path';
import { LazyFileLoader } from '../ts/context/lazy-file-loader.js';
import type { IFileMetadata } from '../ts/context/types.js';
const testProjectRoot = process.cwd();
tap.test('LazyFileLoader should create instance with project root', async () => {
const loader = new LazyFileLoader(testProjectRoot);
expect(loader).toBeInstanceOf(LazyFileLoader);
});
tap.test('LazyFileLoader.getMetadata should return file metadata without loading contents', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const packageJsonPath = path.join(testProjectRoot, 'package.json');
const metadata = await loader.getMetadata(packageJsonPath);
expect(metadata.path).toEqual(packageJsonPath);
expect(metadata.relativePath).toEqual('package.json');
expect(metadata.size).toBeGreaterThan(0);
expect(metadata.mtime).toBeGreaterThan(0);
expect(metadata.estimatedTokens).toBeGreaterThan(0);
// Rough estimate: size / 4
expect(metadata.estimatedTokens).toBeCloseTo(metadata.size / 4, 10);
});
tap.test('LazyFileLoader.getMetadata should cache metadata for same file', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const packageJsonPath = path.join(testProjectRoot, 'package.json');
const metadata1 = await loader.getMetadata(packageJsonPath);
const metadata2 = await loader.getMetadata(packageJsonPath);
// Should return identical metadata from cache
expect(metadata1.mtime).toEqual(metadata2.mtime);
expect(metadata1.size).toEqual(metadata2.size);
expect(metadata1.estimatedTokens).toEqual(metadata2.estimatedTokens);
});
tap.test('LazyFileLoader.scanFiles should scan TypeScript files', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const metadata = await loader.scanFiles(['ts/context/types.ts']);
expect(metadata.length).toBeGreaterThan(0);
const typesFile = metadata.find(m => m.relativePath.includes('types.ts'));
expect(typesFile).toBeDefined();
expect(typesFile!.size).toBeGreaterThan(0);
expect(typesFile!.estimatedTokens).toBeGreaterThan(0);
});
tap.test('LazyFileLoader.scanFiles should handle multiple globs', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const metadata = await loader.scanFiles([
'package.json',
'readme.md'
]);
expect(metadata.length).toBeGreaterThanOrEqual(2);
const hasPackageJson = metadata.some(m => m.relativePath === 'package.json');
const hasReadme = metadata.some(m => m.relativePath.toLowerCase() === 'readme.md');
expect(hasPackageJson).toBe(true);
expect(hasReadme).toBe(true);
});
tap.test('LazyFileLoader.loadFile should load file with actual token count', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const packageJsonPath = path.join(testProjectRoot, 'package.json');
const tokenizer = (content: string) => Math.ceil(content.length / 4);
const fileInfo = await loader.loadFile(packageJsonPath, tokenizer);
expect(fileInfo.path).toEqual(packageJsonPath);
expect(fileInfo.contents).toBeDefined();
expect(fileInfo.contents.length).toBeGreaterThan(0);
expect(fileInfo.tokenCount).toBeGreaterThan(0);
expect(fileInfo.relativePath).toEqual('package.json');
});
tap.test('LazyFileLoader.loadFiles should load multiple files in parallel', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'package.json'),
relativePath: 'package.json',
size: 100,
mtime: Date.now(),
estimatedTokens: 25
},
{
path: path.join(testProjectRoot, 'readme.md'),
relativePath: 'readme.md',
size: 200,
mtime: Date.now(),
estimatedTokens: 50
}
];
const tokenizer = (content: string) => Math.ceil(content.length / 4);
const startTime = Date.now();
const files = await loader.loadFiles(metadata, tokenizer);
const endTime = Date.now();
expect(files.length).toEqual(2);
expect(files[0].contents).toBeDefined();
expect(files[1].contents).toBeDefined();
// Should be fast (parallel loading)
expect(endTime - startTime).toBeLessThan(5000); // 5 seconds max
});
tap.test('LazyFileLoader.updateImportanceScores should update cached metadata', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const packageJsonPath = path.join(testProjectRoot, 'package.json');
// Get initial metadata
await loader.getMetadata(packageJsonPath);
// Update importance scores
const scores = new Map<string, number>();
scores.set(packageJsonPath, 0.95);
loader.updateImportanceScores(scores);
// Check cached metadata has updated score
const cached = loader.getCachedMetadata();
const packageJsonMeta = cached.find(m => m.path === packageJsonPath);
expect(packageJsonMeta).toBeDefined();
expect(packageJsonMeta!.importanceScore).toEqual(0.95);
});
tap.test('LazyFileLoader.getTotalEstimatedTokens should sum all cached metadata tokens', async () => {
const loader = new LazyFileLoader(testProjectRoot);
// Scan some files
await loader.scanFiles(['package.json', 'readme.md']);
const totalTokens = loader.getTotalEstimatedTokens();
expect(totalTokens).toBeGreaterThan(0);
});
tap.test('LazyFileLoader.clearCache should clear metadata cache', async () => {
const loader = new LazyFileLoader(testProjectRoot);
// Scan files to populate cache
await loader.scanFiles(['package.json']);
expect(loader.getCachedMetadata().length).toBeGreaterThan(0);
// Clear cache
loader.clearCache();
expect(loader.getCachedMetadata().length).toEqual(0);
});
tap.test('LazyFileLoader.getCachedMetadata should return all cached entries', async () => {
const loader = new LazyFileLoader(testProjectRoot);
// Scan files
await loader.scanFiles(['package.json', 'readme.md']);
const cached = loader.getCachedMetadata();
expect(cached.length).toBeGreaterThanOrEqual(2);
expect(cached.every(m => m.path && m.size && m.estimatedTokens)).toBe(true);
});
tap.test('LazyFileLoader should handle non-existent files gracefully', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const nonExistentPath = path.join(testProjectRoot, 'this-file-does-not-exist.ts');
try {
await loader.getMetadata(nonExistentPath);
expect(false).toBe(true); // Should not reach here
} catch (error) {
expect(error).toBeDefined();
}
});
tap.test('LazyFileLoader.loadFiles should filter out failed file loads', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'package.json'),
relativePath: 'package.json',
size: 100,
mtime: Date.now(),
estimatedTokens: 25
},
{
path: path.join(testProjectRoot, 'non-existent-file.txt'),
relativePath: 'non-existent-file.txt',
size: 100,
mtime: Date.now(),
estimatedTokens: 25
}
];
const tokenizer = (content: string) => Math.ceil(content.length / 4);
const files = await loader.loadFiles(metadata, tokenizer);
// Should only include the successfully loaded file
expect(files.length).toEqual(1);
expect(files[0].relativePath).toEqual('package.json');
});
tap.test('LazyFileLoader should handle glob patterns for TypeScript source files', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const metadata = await loader.scanFiles(['ts/context/*.ts']);
expect(metadata.length).toBeGreaterThan(0);
// Should find multiple context files
const hasEnhancedContext = metadata.some(m => m.relativePath.includes('enhanced-context.ts'));
const hasTypes = metadata.some(m => m.relativePath.includes('types.ts'));
expect(hasEnhancedContext).toBe(true);
expect(hasTypes).toBe(true);
});
tap.test('LazyFileLoader should estimate tokens reasonably accurately', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const packageJsonPath = path.join(testProjectRoot, 'package.json');
const metadata = await loader.getMetadata(packageJsonPath);
const tokenizer = (content: string) => Math.ceil(content.length / 4);
const fileInfo = await loader.loadFile(packageJsonPath, tokenizer);
// Estimated tokens should be close to actual (within reasonable range)
const difference = Math.abs(metadata.estimatedTokens - fileInfo.tokenCount);
const percentDiff = (difference / fileInfo.tokenCount) * 100;
// Should be within 20% accuracy (since it's just an estimate)
expect(percentDiff).toBeLessThan(20);
});
export default tap.start();