update
This commit is contained in:
@@ -1,465 +0,0 @@
|
|||||||
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
|
||||||
import * as path from 'path';
|
|
||||||
import { ContextAnalyzer } from '../ts/context/context-analyzer.js';
|
|
||||||
import type { IFileMetadata } from '../ts/context/types.js';
|
|
||||||
|
|
||||||
const testProjectRoot = process.cwd();
|
|
||||||
|
|
||||||
tap.test('ContextAnalyzer should create instance with default weights', async () => {
|
|
||||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
|
||||||
expect(analyzer).toBeInstanceOf(ContextAnalyzer);
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextAnalyzer should create instance with custom weights', async () => {
|
|
||||||
const analyzer = new ContextAnalyzer(
|
|
||||||
testProjectRoot,
|
|
||||||
{
|
|
||||||
dependencyWeight: 0.5,
|
|
||||||
relevanceWeight: 0.3,
|
|
||||||
efficiencyWeight: 0.1,
|
|
||||||
recencyWeight: 0.1
|
|
||||||
}
|
|
||||||
);
|
|
||||||
expect(analyzer).toBeInstanceOf(ContextAnalyzer);
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextAnalyzer.analyze should return analysis result with files', async () => {
|
|
||||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
|
||||||
|
|
||||||
const metadata: IFileMetadata[] = [
|
|
||||||
{
|
|
||||||
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
|
||||||
relativePath: 'ts/context/types.ts',
|
|
||||||
size: 5000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 1250
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: path.join(testProjectRoot, 'ts/context/enhanced-context.ts'),
|
|
||||||
relativePath: 'ts/context/enhanced-context.ts',
|
|
||||||
size: 10000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 2500
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
const result = await analyzer.analyze(metadata, 'readme');
|
|
||||||
|
|
||||||
expect(result.taskType).toEqual('readme');
|
|
||||||
expect(result.files.length).toEqual(2);
|
|
||||||
expect(result.totalFiles).toEqual(2);
|
|
||||||
expect(result.analysisDuration).toBeGreaterThan(0);
|
|
||||||
expect(result.dependencyGraph).toBeDefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextAnalyzer.analyze should assign importance scores to files', async () => {
|
|
||||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
|
||||||
|
|
||||||
const metadata: IFileMetadata[] = [
|
|
||||||
{
|
|
||||||
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
|
||||||
relativePath: 'ts/context/types.ts',
|
|
||||||
size: 3000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 750
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
const result = await analyzer.analyze(metadata, 'readme');
|
|
||||||
|
|
||||||
expect(result.files[0].importanceScore).toBeGreaterThanOrEqual(0);
|
|
||||||
expect(result.files[0].importanceScore).toBeLessThanOrEqual(1);
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextAnalyzer.analyze should sort files by importance score', async () => {
|
|
||||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
|
||||||
|
|
||||||
const metadata: IFileMetadata[] = [
|
|
||||||
{
|
|
||||||
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
|
||||||
relativePath: 'ts/context/types.ts',
|
|
||||||
size: 3000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 750
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: path.join(testProjectRoot, 'test/test.basic.node.ts'),
|
|
||||||
relativePath: 'test/test.basic.node.ts',
|
|
||||||
size: 2000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 500
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
const result = await analyzer.analyze(metadata, 'readme');
|
|
||||||
|
|
||||||
// Files should be sorted by importance (highest first)
|
|
||||||
for (let i = 0; i < result.files.length - 1; i++) {
|
|
||||||
expect(result.files[i].importanceScore).toBeGreaterThanOrEqual(
|
|
||||||
result.files[i + 1].importanceScore
|
|
||||||
);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextAnalyzer.analyze should assign tiers based on scores', async () => {
|
|
||||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
|
||||||
|
|
||||||
const metadata: IFileMetadata[] = [
|
|
||||||
{
|
|
||||||
path: path.join(testProjectRoot, 'ts/index.ts'),
|
|
||||||
relativePath: 'ts/index.ts',
|
|
||||||
size: 3000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 750
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
const result = await analyzer.analyze(metadata, 'readme');
|
|
||||||
|
|
||||||
const file = result.files[0];
|
|
||||||
expect(['essential', 'important', 'optional', 'excluded']).toContain(file.tier);
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextAnalyzer should prioritize index.ts files for README task', async () => {
|
|
||||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
|
||||||
|
|
||||||
const metadata: IFileMetadata[] = [
|
|
||||||
{
|
|
||||||
path: path.join(testProjectRoot, 'ts/index.ts'),
|
|
||||||
relativePath: 'ts/index.ts',
|
|
||||||
size: 3000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 750
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: path.join(testProjectRoot, 'ts/some-helper.ts'),
|
|
||||||
relativePath: 'ts/some-helper.ts',
|
|
||||||
size: 3000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 750
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
const result = await analyzer.analyze(metadata, 'readme');
|
|
||||||
|
|
||||||
// index.ts should have higher relevance score
|
|
||||||
const indexFile = result.files.find(f => f.path.includes('index.ts'));
|
|
||||||
const helperFile = result.files.find(f => f.path.includes('some-helper.ts'));
|
|
||||||
|
|
||||||
if (indexFile && helperFile) {
|
|
||||||
expect(indexFile.relevanceScore).toBeGreaterThan(helperFile.relevanceScore);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextAnalyzer should deprioritize test files for README task', async () => {
|
|
||||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
|
||||||
|
|
||||||
const metadata: IFileMetadata[] = [
|
|
||||||
{
|
|
||||||
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
|
||||||
relativePath: 'ts/context/types.ts',
|
|
||||||
size: 3000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 750
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: path.join(testProjectRoot, 'test/test.basic.node.ts'),
|
|
||||||
relativePath: 'test/test.basic.node.ts',
|
|
||||||
size: 3000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 750
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
const result = await analyzer.analyze(metadata, 'readme');
|
|
||||||
|
|
||||||
// Source file should have higher relevance than test file
|
|
||||||
const sourceFile = result.files.find(f => f.path.includes('ts/context/types.ts'));
|
|
||||||
const testFile = result.files.find(f => f.path.includes('test/test.basic.node.ts'));
|
|
||||||
|
|
||||||
if (sourceFile && testFile) {
|
|
||||||
expect(sourceFile.relevanceScore).toBeGreaterThan(testFile.relevanceScore);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextAnalyzer should prioritize changed files for commit task', async () => {
|
|
||||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
|
||||||
|
|
||||||
const changedFile = path.join(testProjectRoot, 'ts/context/types.ts');
|
|
||||||
const unchangedFile = path.join(testProjectRoot, 'ts/index.ts');
|
|
||||||
|
|
||||||
const metadata: IFileMetadata[] = [
|
|
||||||
{
|
|
||||||
path: changedFile,
|
|
||||||
relativePath: 'ts/context/types.ts',
|
|
||||||
size: 3000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 750
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: unchangedFile,
|
|
||||||
relativePath: 'ts/index.ts',
|
|
||||||
size: 3000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 750
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
const result = await analyzer.analyze(metadata, 'commit', [changedFile]);
|
|
||||||
|
|
||||||
const changed = result.files.find(f => f.path === changedFile);
|
|
||||||
const unchanged = result.files.find(f => f.path === unchangedFile);
|
|
||||||
|
|
||||||
if (changed && unchanged) {
|
|
||||||
// Changed file should have recency score of 1.0
|
|
||||||
expect(changed.recencyScore).toEqual(1.0);
|
|
||||||
// Unchanged file should have recency score of 0
|
|
||||||
expect(unchanged.recencyScore).toEqual(0);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextAnalyzer should calculate efficiency scores', async () => {
|
|
||||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
|
||||||
|
|
||||||
const metadata: IFileMetadata[] = [
|
|
||||||
{
|
|
||||||
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
|
||||||
relativePath: 'ts/context/types.ts',
|
|
||||||
size: 5000, // Optimal size
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 1250
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: path.join(testProjectRoot, 'ts/very-large-file.ts'),
|
|
||||||
relativePath: 'ts/very-large-file.ts',
|
|
||||||
size: 50000, // Too large
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 12500
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
const result = await analyzer.analyze(metadata, 'readme');
|
|
||||||
|
|
||||||
// Optimal size file should have better efficiency score
|
|
||||||
const optimalFile = result.files.find(f => f.path.includes('types.ts'));
|
|
||||||
const largeFile = result.files.find(f => f.path.includes('very-large-file.ts'));
|
|
||||||
|
|
||||||
if (optimalFile && largeFile) {
|
|
||||||
expect(optimalFile.efficiencyScore).toBeGreaterThan(largeFile.efficiencyScore);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextAnalyzer should build dependency graph', async () => {
|
|
||||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
|
||||||
|
|
||||||
const metadata: IFileMetadata[] = [
|
|
||||||
{
|
|
||||||
path: path.join(testProjectRoot, 'ts/context/enhanced-context.ts'),
|
|
||||||
relativePath: 'ts/context/enhanced-context.ts',
|
|
||||||
size: 10000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 2500
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
|
||||||
relativePath: 'ts/context/types.ts',
|
|
||||||
size: 5000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 1250
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
const result = await analyzer.analyze(metadata, 'readme');
|
|
||||||
|
|
||||||
expect(result.dependencyGraph.size).toBeGreaterThan(0);
|
|
||||||
|
|
||||||
// Check that each file has dependency info
|
|
||||||
for (const meta of metadata) {
|
|
||||||
const deps = result.dependencyGraph.get(meta.path);
|
|
||||||
expect(deps).toBeDefined();
|
|
||||||
expect(deps!.path).toEqual(meta.path);
|
|
||||||
expect(deps!.imports).toBeDefined();
|
|
||||||
expect(deps!.importedBy).toBeDefined();
|
|
||||||
expect(deps!.centrality).toBeGreaterThanOrEqual(0);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextAnalyzer should calculate centrality scores', async () => {
|
|
||||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
|
||||||
|
|
||||||
const metadata: IFileMetadata[] = [
|
|
||||||
{
|
|
||||||
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
|
||||||
relativePath: 'ts/context/types.ts',
|
|
||||||
size: 5000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 1250
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: path.join(testProjectRoot, 'ts/context/enhanced-context.ts'),
|
|
||||||
relativePath: 'ts/context/enhanced-context.ts',
|
|
||||||
size: 10000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 2500
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
const result = await analyzer.analyze(metadata, 'readme');
|
|
||||||
|
|
||||||
// All centrality scores should be between 0 and 1
|
|
||||||
for (const [, deps] of result.dependencyGraph) {
|
|
||||||
expect(deps.centrality).toBeGreaterThanOrEqual(0);
|
|
||||||
expect(deps.centrality).toBeLessThanOrEqual(1);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextAnalyzer should assign higher centrality to highly imported files', async () => {
|
|
||||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
|
||||||
|
|
||||||
// types.ts is likely imported by many files
|
|
||||||
const typesPath = path.join(testProjectRoot, 'ts/context/types.ts');
|
|
||||||
// A test file is likely imported by fewer files
|
|
||||||
const testPath = path.join(testProjectRoot, 'test/test.basic.node.ts');
|
|
||||||
|
|
||||||
const metadata: IFileMetadata[] = [
|
|
||||||
{
|
|
||||||
path: typesPath,
|
|
||||||
relativePath: 'ts/context/types.ts',
|
|
||||||
size: 5000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 1250
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: testPath,
|
|
||||||
relativePath: 'test/test.basic.node.ts',
|
|
||||||
size: 3000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 750
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
const result = await analyzer.analyze(metadata, 'readme');
|
|
||||||
|
|
||||||
const typesDeps = result.dependencyGraph.get(typesPath);
|
|
||||||
const testDeps = result.dependencyGraph.get(testPath);
|
|
||||||
|
|
||||||
if (typesDeps && testDeps) {
|
|
||||||
// types.ts should generally have higher centrality due to being imported more
|
|
||||||
expect(typesDeps.centrality).toBeGreaterThanOrEqual(0);
|
|
||||||
expect(testDeps.centrality).toBeGreaterThanOrEqual(0);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextAnalyzer should provide reason for scoring', async () => {
|
|
||||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
|
||||||
|
|
||||||
const metadata: IFileMetadata[] = [
|
|
||||||
{
|
|
||||||
path: path.join(testProjectRoot, 'ts/index.ts'),
|
|
||||||
relativePath: 'ts/index.ts',
|
|
||||||
size: 3000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 750
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
const result = await analyzer.analyze(metadata, 'readme');
|
|
||||||
|
|
||||||
expect(result.files[0].reason).toBeDefined();
|
|
||||||
expect(result.files[0].reason!.length).toBeGreaterThan(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextAnalyzer should handle empty metadata array', async () => {
|
|
||||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
|
||||||
|
|
||||||
const result = await analyzer.analyze([], 'readme');
|
|
||||||
|
|
||||||
expect(result.files.length).toEqual(0);
|
|
||||||
expect(result.totalFiles).toEqual(0);
|
|
||||||
expect(result.dependencyGraph.size).toEqual(0);
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextAnalyzer should respect custom tier configuration', async () => {
|
|
||||||
const analyzer = new ContextAnalyzer(
|
|
||||||
testProjectRoot,
|
|
||||||
{},
|
|
||||||
{
|
|
||||||
essential: { minScore: 0.9, trimLevel: 'none' },
|
|
||||||
important: { minScore: 0.7, trimLevel: 'light' },
|
|
||||||
optional: { minScore: 0.5, trimLevel: 'aggressive' }
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
const metadata: IFileMetadata[] = [
|
|
||||||
{
|
|
||||||
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
|
||||||
relativePath: 'ts/context/types.ts',
|
|
||||||
size: 3000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 750
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
const result = await analyzer.analyze(metadata, 'readme');
|
|
||||||
|
|
||||||
// Should use custom tier thresholds
|
|
||||||
const file = result.files[0];
|
|
||||||
expect(['essential', 'important', 'optional', 'excluded']).toContain(file.tier);
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextAnalyzer should calculate combined importance score from all factors', async () => {
|
|
||||||
const analyzer = new ContextAnalyzer(testProjectRoot, {
|
|
||||||
dependencyWeight: 0.25,
|
|
||||||
relevanceWeight: 0.25,
|
|
||||||
efficiencyWeight: 0.25,
|
|
||||||
recencyWeight: 0.25
|
|
||||||
});
|
|
||||||
|
|
||||||
const metadata: IFileMetadata[] = [
|
|
||||||
{
|
|
||||||
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
|
||||||
relativePath: 'ts/context/types.ts',
|
|
||||||
size: 5000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 1250
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
const result = await analyzer.analyze(metadata, 'readme');
|
|
||||||
|
|
||||||
const file = result.files[0];
|
|
||||||
|
|
||||||
// Importance score should be weighted sum of all factors
|
|
||||||
// With equal weights (0.25 each), importance should be average of all scores
|
|
||||||
const expectedImportance =
|
|
||||||
(file.relevanceScore * 0.25) +
|
|
||||||
(file.centralityScore * 0.25) +
|
|
||||||
(file.efficiencyScore * 0.25) +
|
|
||||||
(file.recencyScore * 0.25);
|
|
||||||
|
|
||||||
expect(file.importanceScore).toBeCloseTo(expectedImportance, 2);
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextAnalyzer should complete analysis within reasonable time', async () => {
|
|
||||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
|
||||||
|
|
||||||
const metadata: IFileMetadata[] = Array.from({ length: 10 }, (_, i) => ({
|
|
||||||
path: path.join(testProjectRoot, `ts/file${i}.ts`),
|
|
||||||
relativePath: `ts/file${i}.ts`,
|
|
||||||
size: 3000,
|
|
||||||
mtime: Date.now(),
|
|
||||||
estimatedTokens: 750
|
|
||||||
}));
|
|
||||||
|
|
||||||
const startTime = Date.now();
|
|
||||||
const result = await analyzer.analyze(metadata, 'readme');
|
|
||||||
const endTime = Date.now();
|
|
||||||
|
|
||||||
const duration = endTime - startTime;
|
|
||||||
|
|
||||||
// Analysis duration should be recorded (can be 0 for fast operations)
|
|
||||||
expect(result.analysisDuration).toBeGreaterThanOrEqual(0);
|
|
||||||
expect(duration).toBeLessThan(10000); // Should complete within 10 seconds
|
|
||||||
});
|
|
||||||
|
|
||||||
export default tap.start();
|
|
||||||
@@ -1,465 +0,0 @@
|
|||||||
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
|
||||||
import * as path from 'path';
|
|
||||||
import * as fs from 'fs';
|
|
||||||
import { ContextCache } from '../ts/context/context-cache.js';
|
|
||||||
import type { ICacheEntry } from '../ts/context/types.js';
|
|
||||||
|
|
||||||
const testProjectRoot = process.cwd();
|
|
||||||
const testCacheDir = path.join(testProjectRoot, '.nogit', 'test-cache');
|
|
||||||
|
|
||||||
// Helper to clean up test cache directory
|
|
||||||
async function cleanupTestCache() {
|
|
||||||
try {
|
|
||||||
await fs.promises.rm(testCacheDir, { recursive: true, force: true });
|
|
||||||
} catch (error) {
|
|
||||||
// Ignore if directory doesn't exist
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
tap.test('ContextCache should create instance with default config', async () => {
|
|
||||||
await cleanupTestCache();
|
|
||||||
|
|
||||||
const cache = new ContextCache(testProjectRoot, {
|
|
||||||
directory: testCacheDir,
|
|
||||||
enabled: true
|
|
||||||
});
|
|
||||||
|
|
||||||
expect(cache).toBeInstanceOf(ContextCache);
|
|
||||||
|
|
||||||
await cleanupTestCache();
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextCache.init should create cache directory', async () => {
|
|
||||||
await cleanupTestCache();
|
|
||||||
|
|
||||||
const cache = new ContextCache(testProjectRoot, {
|
|
||||||
directory: testCacheDir,
|
|
||||||
enabled: true
|
|
||||||
});
|
|
||||||
|
|
||||||
await cache.init();
|
|
||||||
|
|
||||||
// Check that cache directory was created
|
|
||||||
const exists = await fs.promises.access(testCacheDir).then(() => true).catch(() => false);
|
|
||||||
expect(exists).toEqual(true);
|
|
||||||
|
|
||||||
await cleanupTestCache();
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextCache.set should store cache entry', async () => {
|
|
||||||
await cleanupTestCache();
|
|
||||||
|
|
||||||
const cache = new ContextCache(testProjectRoot, {
|
|
||||||
directory: testCacheDir,
|
|
||||||
enabled: true
|
|
||||||
});
|
|
||||||
await cache.init();
|
|
||||||
|
|
||||||
const testPath = path.join(testProjectRoot, 'package.json');
|
|
||||||
// Get actual file mtime for validation to work
|
|
||||||
const stats = await fs.promises.stat(testPath);
|
|
||||||
const fileMtime = Math.floor(stats.mtimeMs);
|
|
||||||
|
|
||||||
const entry: ICacheEntry = {
|
|
||||||
path: testPath,
|
|
||||||
contents: 'test content',
|
|
||||||
tokenCount: 100,
|
|
||||||
mtime: fileMtime,
|
|
||||||
cachedAt: Date.now()
|
|
||||||
};
|
|
||||||
|
|
||||||
await cache.set(entry);
|
|
||||||
|
|
||||||
const retrieved = await cache.get(testPath);
|
|
||||||
expect(retrieved).toBeDefined();
|
|
||||||
expect(retrieved!.contents).toEqual('test content');
|
|
||||||
expect(retrieved!.tokenCount).toEqual(100);
|
|
||||||
|
|
||||||
await cleanupTestCache();
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextCache.get should return null for non-existent entry', async () => {
|
|
||||||
await cleanupTestCache();
|
|
||||||
|
|
||||||
const cache = new ContextCache(testProjectRoot, {
|
|
||||||
directory: testCacheDir,
|
|
||||||
enabled: true
|
|
||||||
});
|
|
||||||
await cache.init();
|
|
||||||
|
|
||||||
const retrieved = await cache.get('/non/existent/path.ts');
|
|
||||||
expect(retrieved).toBeNull();
|
|
||||||
|
|
||||||
await cleanupTestCache();
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextCache.get should invalidate expired entries', async () => {
|
|
||||||
await cleanupTestCache();
|
|
||||||
|
|
||||||
const cache = new ContextCache(testProjectRoot, {
|
|
||||||
directory: testCacheDir,
|
|
||||||
enabled: true,
|
|
||||||
ttl: 1 // 1 second TTL
|
|
||||||
});
|
|
||||||
await cache.init();
|
|
||||||
|
|
||||||
const testPath = path.join(testProjectRoot, 'test-file.ts');
|
|
||||||
const entry: ICacheEntry = {
|
|
||||||
path: testPath,
|
|
||||||
contents: 'test content',
|
|
||||||
tokenCount: 100,
|
|
||||||
mtime: Date.now(),
|
|
||||||
cachedAt: Date.now() - 2000 // Cached 2 seconds ago (expired)
|
|
||||||
};
|
|
||||||
|
|
||||||
await cache.set(entry);
|
|
||||||
|
|
||||||
// Wait a bit to ensure expiration logic runs
|
|
||||||
await new Promise(resolve => setTimeout(resolve, 100));
|
|
||||||
|
|
||||||
const retrieved = await cache.get(testPath);
|
|
||||||
expect(retrieved).toBeNull(); // Should be expired
|
|
||||||
|
|
||||||
await cleanupTestCache();
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextCache.get should invalidate entries when file mtime changes', async () => {
|
|
||||||
await cleanupTestCache();
|
|
||||||
|
|
||||||
const cache = new ContextCache(testProjectRoot, {
|
|
||||||
directory: testCacheDir,
|
|
||||||
enabled: true
|
|
||||||
});
|
|
||||||
await cache.init();
|
|
||||||
|
|
||||||
const testPath = path.join(testProjectRoot, 'package.json');
|
|
||||||
const stats = await fs.promises.stat(testPath);
|
|
||||||
const oldMtime = Math.floor(stats.mtimeMs);
|
|
||||||
|
|
||||||
const entry: ICacheEntry = {
|
|
||||||
path: testPath,
|
|
||||||
contents: 'test content',
|
|
||||||
tokenCount: 100,
|
|
||||||
mtime: oldMtime - 1000, // Old mtime (file has changed)
|
|
||||||
cachedAt: Date.now()
|
|
||||||
};
|
|
||||||
|
|
||||||
await cache.set(entry);
|
|
||||||
|
|
||||||
const retrieved = await cache.get(testPath);
|
|
||||||
expect(retrieved).toBeNull(); // Should be invalidated due to mtime mismatch
|
|
||||||
|
|
||||||
await cleanupTestCache();
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextCache.has should check if file is cached and valid', async () => {
|
|
||||||
await cleanupTestCache();
|
|
||||||
|
|
||||||
const cache = new ContextCache(testProjectRoot, {
|
|
||||||
directory: testCacheDir,
|
|
||||||
enabled: true
|
|
||||||
});
|
|
||||||
await cache.init();
|
|
||||||
|
|
||||||
const testPath = path.join(testProjectRoot, 'package.json');
|
|
||||||
const stats = await fs.promises.stat(testPath);
|
|
||||||
|
|
||||||
const entry: ICacheEntry = {
|
|
||||||
path: testPath,
|
|
||||||
contents: 'test content',
|
|
||||||
tokenCount: 100,
|
|
||||||
mtime: Math.floor(stats.mtimeMs),
|
|
||||||
cachedAt: Date.now()
|
|
||||||
};
|
|
||||||
|
|
||||||
await cache.set(entry);
|
|
||||||
|
|
||||||
const hasIt = await cache.has(testPath);
|
|
||||||
expect(hasIt).toEqual(true);
|
|
||||||
|
|
||||||
const doesNotHaveIt = await cache.has('/non/existent/path.ts');
|
|
||||||
expect(doesNotHaveIt).toEqual(false);
|
|
||||||
|
|
||||||
await cleanupTestCache();
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextCache.setMany should store multiple entries', async () => {
|
|
||||||
await cleanupTestCache();
|
|
||||||
|
|
||||||
const cache = new ContextCache(testProjectRoot, {
|
|
||||||
directory: testCacheDir,
|
|
||||||
enabled: true
|
|
||||||
});
|
|
||||||
await cache.init();
|
|
||||||
|
|
||||||
const entries: ICacheEntry[] = [
|
|
||||||
{
|
|
||||||
path: '/test/file1.ts',
|
|
||||||
contents: 'content 1',
|
|
||||||
tokenCount: 100,
|
|
||||||
mtime: Date.now(),
|
|
||||||
cachedAt: Date.now()
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: '/test/file2.ts',
|
|
||||||
contents: 'content 2',
|
|
||||||
tokenCount: 200,
|
|
||||||
mtime: Date.now(),
|
|
||||||
cachedAt: Date.now()
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
await cache.setMany(entries);
|
|
||||||
|
|
||||||
const stats = cache.getStats();
|
|
||||||
expect(stats.entries).toBeGreaterThanOrEqual(2);
|
|
||||||
|
|
||||||
await cleanupTestCache();
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextCache.getStats should return cache statistics', async () => {
|
|
||||||
await cleanupTestCache();
|
|
||||||
|
|
||||||
const cache = new ContextCache(testProjectRoot, {
|
|
||||||
directory: testCacheDir,
|
|
||||||
enabled: true
|
|
||||||
});
|
|
||||||
await cache.init();
|
|
||||||
|
|
||||||
const entry: ICacheEntry = {
|
|
||||||
path: '/test/file.ts',
|
|
||||||
contents: 'test content with some length',
|
|
||||||
tokenCount: 100,
|
|
||||||
mtime: Date.now(),
|
|
||||||
cachedAt: Date.now()
|
|
||||||
};
|
|
||||||
|
|
||||||
await cache.set(entry);
|
|
||||||
|
|
||||||
const stats = cache.getStats();
|
|
||||||
|
|
||||||
expect(stats.entries).toEqual(1);
|
|
||||||
expect(stats.totalSize).toBeGreaterThan(0);
|
|
||||||
expect(stats.oldestEntry).toBeDefined();
|
|
||||||
expect(stats.newestEntry).toBeDefined();
|
|
||||||
|
|
||||||
await cleanupTestCache();
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextCache.clear should clear all entries', async () => {
|
|
||||||
await cleanupTestCache();
|
|
||||||
|
|
||||||
const cache = new ContextCache(testProjectRoot, {
|
|
||||||
directory: testCacheDir,
|
|
||||||
enabled: true
|
|
||||||
});
|
|
||||||
await cache.init();
|
|
||||||
|
|
||||||
const entry: ICacheEntry = {
|
|
||||||
path: '/test/file.ts',
|
|
||||||
contents: 'test content',
|
|
||||||
tokenCount: 100,
|
|
||||||
mtime: Date.now(),
|
|
||||||
cachedAt: Date.now()
|
|
||||||
};
|
|
||||||
|
|
||||||
await cache.set(entry);
|
|
||||||
expect(cache.getStats().entries).toEqual(1);
|
|
||||||
|
|
||||||
await cache.clear();
|
|
||||||
expect(cache.getStats().entries).toEqual(0);
|
|
||||||
|
|
||||||
await cleanupTestCache();
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextCache.clearPaths should clear specific entries', async () => {
|
|
||||||
await cleanupTestCache();
|
|
||||||
|
|
||||||
const cache = new ContextCache(testProjectRoot, {
|
|
||||||
directory: testCacheDir,
|
|
||||||
enabled: true
|
|
||||||
});
|
|
||||||
await cache.init();
|
|
||||||
|
|
||||||
const entries: ICacheEntry[] = [
|
|
||||||
{
|
|
||||||
path: '/test/file1.ts',
|
|
||||||
contents: 'content 1',
|
|
||||||
tokenCount: 100,
|
|
||||||
mtime: Date.now(),
|
|
||||||
cachedAt: Date.now()
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: '/test/file2.ts',
|
|
||||||
contents: 'content 2',
|
|
||||||
tokenCount: 200,
|
|
||||||
mtime: Date.now(),
|
|
||||||
cachedAt: Date.now()
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
await cache.setMany(entries);
|
|
||||||
expect(cache.getStats().entries).toEqual(2);
|
|
||||||
|
|
||||||
await cache.clearPaths(['/test/file1.ts']);
|
|
||||||
expect(cache.getStats().entries).toEqual(1);
|
|
||||||
|
|
||||||
await cleanupTestCache();
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextCache should enforce max size by evicting oldest entries', async () => {
|
|
||||||
await cleanupTestCache();
|
|
||||||
|
|
||||||
const cache = new ContextCache(testProjectRoot, {
|
|
||||||
directory: testCacheDir,
|
|
||||||
enabled: true,
|
|
||||||
maxSize: 0.001 // Very small: 0.001 MB = 1KB
|
|
||||||
});
|
|
||||||
await cache.init();
|
|
||||||
|
|
||||||
// Add entries that exceed the max size
|
|
||||||
const largeContent = 'x'.repeat(500); // 500 bytes
|
|
||||||
|
|
||||||
const entries: ICacheEntry[] = [
|
|
||||||
{
|
|
||||||
path: '/test/file1.ts',
|
|
||||||
contents: largeContent,
|
|
||||||
tokenCount: 100,
|
|
||||||
mtime: Date.now(),
|
|
||||||
cachedAt: Date.now() - 3000 // Oldest
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: '/test/file2.ts',
|
|
||||||
contents: largeContent,
|
|
||||||
tokenCount: 100,
|
|
||||||
mtime: Date.now(),
|
|
||||||
cachedAt: Date.now() - 2000
|
|
||||||
},
|
|
||||||
{
|
|
||||||
path: '/test/file3.ts',
|
|
||||||
contents: largeContent,
|
|
||||||
tokenCount: 100,
|
|
||||||
mtime: Date.now(),
|
|
||||||
cachedAt: Date.now() - 1000 // Newest
|
|
||||||
}
|
|
||||||
];
|
|
||||||
|
|
||||||
await cache.setMany(entries);
|
|
||||||
|
|
||||||
const stats = cache.getStats();
|
|
||||||
// Should have evicted oldest entries to stay under size limit
|
|
||||||
expect(stats.totalSize).toBeLessThanOrEqual(1024); // 1KB
|
|
||||||
|
|
||||||
await cleanupTestCache();
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextCache should not cache when disabled', async () => {
|
|
||||||
await cleanupTestCache();
|
|
||||||
|
|
||||||
const cache = new ContextCache(testProjectRoot, {
|
|
||||||
directory: testCacheDir,
|
|
||||||
enabled: false
|
|
||||||
});
|
|
||||||
await cache.init();
|
|
||||||
|
|
||||||
const entry: ICacheEntry = {
|
|
||||||
path: '/test/file.ts',
|
|
||||||
contents: 'test content',
|
|
||||||
tokenCount: 100,
|
|
||||||
mtime: Date.now(),
|
|
||||||
cachedAt: Date.now()
|
|
||||||
};
|
|
||||||
|
|
||||||
await cache.set(entry);
|
|
||||||
|
|
||||||
const retrieved = await cache.get('/test/file.ts');
|
|
||||||
expect(retrieved).toBeNull();
|
|
||||||
|
|
||||||
await cleanupTestCache();
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextCache should persist to disk and reload', async () => {
|
|
||||||
await cleanupTestCache();
|
|
||||||
|
|
||||||
// Create first cache instance and add entry
|
|
||||||
const cache1 = new ContextCache(testProjectRoot, {
|
|
||||||
directory: testCacheDir,
|
|
||||||
enabled: true
|
|
||||||
});
|
|
||||||
await cache1.init();
|
|
||||||
|
|
||||||
// Use a real file that exists so validation passes
|
|
||||||
const testPath = path.join(testProjectRoot, 'package.json');
|
|
||||||
const stats = await fs.promises.stat(testPath);
|
|
||||||
const fileMtime = Math.floor(stats.mtimeMs);
|
|
||||||
|
|
||||||
const entry: ICacheEntry = {
|
|
||||||
path: testPath,
|
|
||||||
contents: 'persistent content',
|
|
||||||
tokenCount: 150,
|
|
||||||
mtime: fileMtime,
|
|
||||||
cachedAt: Date.now()
|
|
||||||
};
|
|
||||||
|
|
||||||
await cache1.set(entry);
|
|
||||||
|
|
||||||
// Wait for persist
|
|
||||||
await new Promise(resolve => setTimeout(resolve, 500));
|
|
||||||
|
|
||||||
// Create second cache instance (should reload from disk)
|
|
||||||
const cache2 = new ContextCache(testProjectRoot, {
|
|
||||||
directory: testCacheDir,
|
|
||||||
enabled: true
|
|
||||||
});
|
|
||||||
await cache2.init();
|
|
||||||
|
|
||||||
const cacheStats = cache2.getStats();
|
|
||||||
expect(cacheStats.entries).toBeGreaterThan(0);
|
|
||||||
|
|
||||||
await cleanupTestCache();
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextCache should handle invalid cache index gracefully', async () => {
|
|
||||||
await cleanupTestCache();
|
|
||||||
|
|
||||||
const cache = new ContextCache(testProjectRoot, {
|
|
||||||
directory: testCacheDir,
|
|
||||||
enabled: true
|
|
||||||
});
|
|
||||||
|
|
||||||
// Create cache dir manually
|
|
||||||
await fs.promises.mkdir(testCacheDir, { recursive: true });
|
|
||||||
|
|
||||||
// Write invalid JSON to cache index
|
|
||||||
const cacheIndexPath = path.join(testCacheDir, 'index.json');
|
|
||||||
await fs.promises.writeFile(cacheIndexPath, 'invalid json {', 'utf-8');
|
|
||||||
|
|
||||||
// Should not throw, should just start with empty cache
|
|
||||||
await cache.init();
|
|
||||||
|
|
||||||
const stats = cache.getStats();
|
|
||||||
expect(stats.entries).toEqual(0);
|
|
||||||
|
|
||||||
await cleanupTestCache();
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('ContextCache should return proper stats for empty cache', async () => {
|
|
||||||
await cleanupTestCache();
|
|
||||||
|
|
||||||
const cache = new ContextCache(testProjectRoot, {
|
|
||||||
directory: testCacheDir,
|
|
||||||
enabled: true
|
|
||||||
});
|
|
||||||
await cache.init();
|
|
||||||
|
|
||||||
const stats = cache.getStats();
|
|
||||||
|
|
||||||
expect(stats.entries).toEqual(0);
|
|
||||||
expect(stats.totalSize).toEqual(0);
|
|
||||||
expect(stats.oldestEntry).toBeNull();
|
|
||||||
expect(stats.newestEntry).toBeNull();
|
|
||||||
|
|
||||||
await cleanupTestCache();
|
|
||||||
});
|
|
||||||
|
|
||||||
export default tap.start();
|
|
||||||
Reference in New Issue
Block a user