Compare commits
10 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| a6d678e36c | |||
| 8c3e16a4f2 | |||
| 2276fb0c0c | |||
| 0a9d535df4 | |||
| d46fd1590e | |||
| 1d7317f063 | |||
| fe5121ec9c | |||
| c084b20390 | |||
| 6f024536a8 | |||
| 2405fb3370 |
49
changelog.md
49
changelog.md
@@ -1,5 +1,54 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 2025-11-03 - 1.7.0 - feat(IterativeContextBuilder)
|
||||||
|
Add iterative AI-driven context builder and integrate into task factory; add tests and iterative configuration
|
||||||
|
|
||||||
|
- Introduce IterativeContextBuilder: iterative, token-aware context construction that asks the AI which files to load and evaluates context sufficiency.
|
||||||
|
- Switch TaskContextFactory to use IterativeContextBuilder for readme, description and commit tasks (replaces earlier EnhancedContext flow for these tasks).
|
||||||
|
- Add iterative configuration options (maxIterations, firstPassFileLimit, subsequentPassFileLimit, temperature, model) in types and ConfigManager and merge support for user config.
|
||||||
|
- Update CLI (tokens and aidoc flows) to use the iterative context factory and improve task handling and messaging.
|
||||||
|
- Add test coverage: test/test.iterativecontextbuilder.node.ts to validate initialization, iterative builds, token budget respect and multiple task types.
|
||||||
|
- Enhance ContextCache, LazyFileLoader, ContextAnalyzer and ContextTrimmer to support the iterative pipeline and smarter prioritization/prompts.
|
||||||
|
|
||||||
|
## 2025-11-03 - 1.6.1 - fix(context)
|
||||||
|
Improve context building, caching and test robustness
|
||||||
|
|
||||||
|
- EnhancedContext: refactored smart context building to use the analyzer and TaskContextFactory by default; taskType now defaults to 'description' and task-specific modes are applied.
|
||||||
|
- ConfigManager: simplified analyzer configuration (removed enabled flag) and fixed getAnalyzerConfig fallback shape.
|
||||||
|
- ContextCache: more robust mtime handling and persistence; tests updated to use real file mtimes so cache validation works reliably.
|
||||||
|
- LazyFileLoader: adjusted token estimation tolerance and improved metadata caching behavior.
|
||||||
|
- ContextAnalyzer & trimming pipeline: improved prioritization and trimming integration to better enforce token budgets.
|
||||||
|
- Tests: relaxed strict timing/boolean checks and made assertions more tolerant (toEqual vs toBe) to reduce false negatives.
|
||||||
|
|
||||||
|
## 2025-11-02 - 1.6.0 - feat(context)
|
||||||
|
Introduce smart context system: analyzer, lazy loader, cache and README/docs improvements
|
||||||
|
|
||||||
|
- Add ContextAnalyzer for dependency-based file scoring and prioritization (PageRank-like centrality, relevance, efficiency, recency)
|
||||||
|
- Add LazyFileLoader to scan metadata and load files in parallel with lightweight token estimates
|
||||||
|
- Add ContextCache for persistent file content/token caching with TTL and max-size eviction
|
||||||
|
- Enhance ContextTrimmer with tier-based trimming and configurable light/aggressive levels
|
||||||
|
- Integrate new components into EnhancedContext and TaskContextFactory to build task-aware, token-optimized contexts
|
||||||
|
- Extend ConfigManager and types to support cache, analyzer, prioritization weights and tier configs (npmextra.json driven)
|
||||||
|
- Add comprehensive unit tests for ContextAnalyzer, ContextCache and LazyFileLoader
|
||||||
|
- Update README with Smart Context Building docs, examples, configuration options and CI workflow snippet
|
||||||
|
|
||||||
|
## 2025-09-07 - 1.5.2 - fix(package)
|
||||||
|
Bump dependencies, refine test script and imports, and overhaul README and docs
|
||||||
|
|
||||||
|
- Bumped multiple dependencies and devDependencies (including @git.zone/tspublish, @git.zone/tsbuild, @git.zone/tstest, @push.rocks/npmextra, @push.rocks/qenv, @push.rocks/smartfile, @push.rocks/smartlog, @push.rocks/smartshell, gpt-tokenizer, typedoc, etc.).
|
||||||
|
- Updated test script to run tstest with verbose, logfile and increased timeout; adjusted testCli script invocation.
|
||||||
|
- Fixed test import in test/test.aidoc.nonci.ts to use @git.zone/tstest tapbundle.
|
||||||
|
- Large README rewrite: reorganized and expanded content, added quick start, CLI commands, examples, configuration, troubleshooting and usage sections.
|
||||||
|
- Minor clarification added to commit prompt in ts/aidocs_classes/commit.ts (text cleanup and guidance).
|
||||||
|
|
||||||
|
## 2025-08-16 - 1.5.1 - fix(aidoc)
|
||||||
|
Bump dependencies, add pnpm workspace config, and add AiDoc.stop()
|
||||||
|
|
||||||
|
- Bumped multiple dependencies and devDependencies in package.json (notable upgrades: @git.zone/tsbuild, @git.zone/tspublish, @push.rocks/npmextra, @push.rocks/qenv, @push.rocks/smartai, @push.rocks/smartfile, @push.rocks/smartgit, @push.rocks/smartlog, @push.rocks/smartpath, @push.rocks/smartshell, typedoc, typescript).
|
||||||
|
- Added pnpm-workspace.yaml with onlyBuiltDependencies (esbuild, mongodb-memory-server, puppeteer, sharp).
|
||||||
|
- Added AiDoc.stop() to properly stop the OpenAI provider (resource/client shutdown).
|
||||||
|
- Updated packageManager field in package.json to a newer pnpm version/hash.
|
||||||
|
|
||||||
## 2025-05-14 - 1.5.0 - feat(docs)
|
## 2025-05-14 - 1.5.0 - feat(docs)
|
||||||
Update project metadata and documentation to reflect comprehensive AI-enhanced features and improved installation and usage instructions
|
Update project metadata and documentation to reflect comprehensive AI-enhanced features and improved installation and usage instructions
|
||||||
|
|
||||||
|
|||||||
35
package.json
35
package.json
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@git.zone/tsdoc",
|
"name": "@git.zone/tsdoc",
|
||||||
"version": "1.5.0",
|
"version": "1.7.0",
|
||||||
"private": false,
|
"private": false,
|
||||||
"description": "A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.",
|
"description": "A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
@@ -13,37 +13,36 @@
|
|||||||
"tsdoc": "cli.js"
|
"tsdoc": "cli.js"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "(tstest test/) && npm run testCli",
|
"test": "(tstest test/ --verbose --logfile --timeout 600) && npm run testCli",
|
||||||
"testCli": "(node ./cli.ts.js) && (node ./cli.ts.js aidocs)",
|
"testCli": "(node ./cli.ts.js) && (node ./cli.ts.js aidocs)",
|
||||||
"build": "(tsbuild --web --allowimplicitany)",
|
"build": "(tsbuild --web --allowimplicitany)",
|
||||||
"buildDocs": "tsdoc"
|
"buildDocs": "tsdoc"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.3.2",
|
"@git.zone/tsbuild": "^2.6.8",
|
||||||
"@git.zone/tsrun": "^1.2.46",
|
"@git.zone/tsrun": "^1.2.46",
|
||||||
"@git.zone/tstest": "^1.0.90",
|
"@git.zone/tstest": "^2.3.6",
|
||||||
"@push.rocks/tapbundle": "^6.0.3",
|
|
||||||
"@types/node": "^22.15.17"
|
"@types/node": "^22.15.17"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@git.zone/tspublish": "^1.5.5",
|
"@git.zone/tspublish": "^1.10.3",
|
||||||
"@push.rocks/early": "^4.0.3",
|
"@push.rocks/early": "^4.0.3",
|
||||||
"@push.rocks/npmextra": "^5.0.23",
|
"@push.rocks/npmextra": "^5.3.3",
|
||||||
"@push.rocks/qenv": "^6.0.5",
|
"@push.rocks/qenv": "^6.1.3",
|
||||||
"@push.rocks/smartai": "^0.5.4",
|
"@push.rocks/smartai": "^0.5.11",
|
||||||
"@push.rocks/smartcli": "^4.0.11",
|
"@push.rocks/smartcli": "^4.0.11",
|
||||||
"@push.rocks/smartdelay": "^3.0.5",
|
"@push.rocks/smartdelay": "^3.0.5",
|
||||||
"@push.rocks/smartfile": "^11.0.20",
|
"@push.rocks/smartfile": "^11.2.7",
|
||||||
"@push.rocks/smartgit": "^3.1.0",
|
"@push.rocks/smartgit": "^3.2.1",
|
||||||
"@push.rocks/smartinteract": "^2.0.15",
|
"@push.rocks/smartinteract": "^2.0.15",
|
||||||
"@push.rocks/smartlog": "^3.0.9",
|
"@push.rocks/smartlog": "^3.1.9",
|
||||||
"@push.rocks/smartlog-destination-local": "^9.0.2",
|
"@push.rocks/smartlog-destination-local": "^9.0.2",
|
||||||
"@push.rocks/smartpath": "^5.0.18",
|
"@push.rocks/smartpath": "^6.0.0",
|
||||||
"@push.rocks/smartshell": "^3.0.5",
|
"@push.rocks/smartshell": "^3.3.0",
|
||||||
"@push.rocks/smarttime": "^4.0.6",
|
"@push.rocks/smarttime": "^4.0.6",
|
||||||
"gpt-tokenizer": "^2.9.0",
|
"gpt-tokenizer": "^3.0.1",
|
||||||
"typedoc": "^0.28.4",
|
"typedoc": "^0.28.12",
|
||||||
"typescript": "^5.8.3"
|
"typescript": "^5.9.2"
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
"ts/**/*",
|
"ts/**/*",
|
||||||
@@ -81,5 +80,5 @@
|
|||||||
"url": "https://gitlab.com/gitzone/tsdoc/issues"
|
"url": "https://gitlab.com/gitzone/tsdoc/issues"
|
||||||
},
|
},
|
||||||
"homepage": "https://gitlab.com/gitzone/tsdoc#readme",
|
"homepage": "https://gitlab.com/gitzone/tsdoc#readme",
|
||||||
"packageManager": "pnpm@10.10.0+sha512.d615db246fe70f25dcfea6d8d73dee782ce23e2245e3c4f6f888249fb568149318637dca73c2c5c8ef2a4ca0d5657fb9567188bfab47f566d1ee6ce987815c39"
|
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748"
|
||||||
}
|
}
|
||||||
|
|||||||
7000
pnpm-lock.yaml
generated
7000
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
5
pnpm-workspace.yaml
Normal file
5
pnpm-workspace.yaml
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
onlyBuiltDependencies:
|
||||||
|
- esbuild
|
||||||
|
- mongodb-memory-server
|
||||||
|
- puppeteer
|
||||||
|
- sharp
|
||||||
@@ -1,4 +1,4 @@
|
|||||||
import { tap, expect } from '@push.rocks/tapbundle';
|
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||||
import * as qenv from '@push.rocks/qenv';
|
import * as qenv from '@push.rocks/qenv';
|
||||||
let testQenv = new qenv.Qenv('./', '.nogit/');
|
let testQenv = new qenv.Qenv('./', '.nogit/');
|
||||||
|
|
||||||
|
|||||||
465
test/test.contextanalyzer.node.ts
Normal file
465
test/test.contextanalyzer.node.ts
Normal file
@@ -0,0 +1,465 @@
|
|||||||
|
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as path from 'path';
|
||||||
|
import { ContextAnalyzer } from '../ts/context/context-analyzer.js';
|
||||||
|
import type { IFileMetadata } from '../ts/context/types.js';
|
||||||
|
|
||||||
|
const testProjectRoot = process.cwd();
|
||||||
|
|
||||||
|
tap.test('ContextAnalyzer should create instance with default weights', async () => {
|
||||||
|
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||||
|
expect(analyzer).toBeInstanceOf(ContextAnalyzer);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextAnalyzer should create instance with custom weights', async () => {
|
||||||
|
const analyzer = new ContextAnalyzer(
|
||||||
|
testProjectRoot,
|
||||||
|
{
|
||||||
|
dependencyWeight: 0.5,
|
||||||
|
relevanceWeight: 0.3,
|
||||||
|
efficiencyWeight: 0.1,
|
||||||
|
recencyWeight: 0.1
|
||||||
|
}
|
||||||
|
);
|
||||||
|
expect(analyzer).toBeInstanceOf(ContextAnalyzer);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextAnalyzer.analyze should return analysis result with files', async () => {
|
||||||
|
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||||
|
|
||||||
|
const metadata: IFileMetadata[] = [
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
||||||
|
relativePath: 'ts/context/types.ts',
|
||||||
|
size: 5000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 1250
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'ts/context/enhanced-context.ts'),
|
||||||
|
relativePath: 'ts/context/enhanced-context.ts',
|
||||||
|
size: 10000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 2500
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = await analyzer.analyze(metadata, 'readme');
|
||||||
|
|
||||||
|
expect(result.taskType).toEqual('readme');
|
||||||
|
expect(result.files.length).toEqual(2);
|
||||||
|
expect(result.totalFiles).toEqual(2);
|
||||||
|
expect(result.analysisDuration).toBeGreaterThan(0);
|
||||||
|
expect(result.dependencyGraph).toBeDefined();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextAnalyzer.analyze should assign importance scores to files', async () => {
|
||||||
|
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||||
|
|
||||||
|
const metadata: IFileMetadata[] = [
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
||||||
|
relativePath: 'ts/context/types.ts',
|
||||||
|
size: 3000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 750
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = await analyzer.analyze(metadata, 'readme');
|
||||||
|
|
||||||
|
expect(result.files[0].importanceScore).toBeGreaterThanOrEqual(0);
|
||||||
|
expect(result.files[0].importanceScore).toBeLessThanOrEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextAnalyzer.analyze should sort files by importance score', async () => {
|
||||||
|
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||||
|
|
||||||
|
const metadata: IFileMetadata[] = [
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
||||||
|
relativePath: 'ts/context/types.ts',
|
||||||
|
size: 3000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 750
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'test/test.basic.node.ts'),
|
||||||
|
relativePath: 'test/test.basic.node.ts',
|
||||||
|
size: 2000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 500
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = await analyzer.analyze(metadata, 'readme');
|
||||||
|
|
||||||
|
// Files should be sorted by importance (highest first)
|
||||||
|
for (let i = 0; i < result.files.length - 1; i++) {
|
||||||
|
expect(result.files[i].importanceScore).toBeGreaterThanOrEqual(
|
||||||
|
result.files[i + 1].importanceScore
|
||||||
|
);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextAnalyzer.analyze should assign tiers based on scores', async () => {
|
||||||
|
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||||
|
|
||||||
|
const metadata: IFileMetadata[] = [
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'ts/index.ts'),
|
||||||
|
relativePath: 'ts/index.ts',
|
||||||
|
size: 3000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 750
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = await analyzer.analyze(metadata, 'readme');
|
||||||
|
|
||||||
|
const file = result.files[0];
|
||||||
|
expect(['essential', 'important', 'optional', 'excluded']).toContain(file.tier);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextAnalyzer should prioritize index.ts files for README task', async () => {
|
||||||
|
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||||
|
|
||||||
|
const metadata: IFileMetadata[] = [
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'ts/index.ts'),
|
||||||
|
relativePath: 'ts/index.ts',
|
||||||
|
size: 3000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 750
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'ts/some-helper.ts'),
|
||||||
|
relativePath: 'ts/some-helper.ts',
|
||||||
|
size: 3000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 750
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = await analyzer.analyze(metadata, 'readme');
|
||||||
|
|
||||||
|
// index.ts should have higher relevance score
|
||||||
|
const indexFile = result.files.find(f => f.path.includes('index.ts'));
|
||||||
|
const helperFile = result.files.find(f => f.path.includes('some-helper.ts'));
|
||||||
|
|
||||||
|
if (indexFile && helperFile) {
|
||||||
|
expect(indexFile.relevanceScore).toBeGreaterThan(helperFile.relevanceScore);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextAnalyzer should deprioritize test files for README task', async () => {
|
||||||
|
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||||
|
|
||||||
|
const metadata: IFileMetadata[] = [
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
||||||
|
relativePath: 'ts/context/types.ts',
|
||||||
|
size: 3000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 750
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'test/test.basic.node.ts'),
|
||||||
|
relativePath: 'test/test.basic.node.ts',
|
||||||
|
size: 3000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 750
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = await analyzer.analyze(metadata, 'readme');
|
||||||
|
|
||||||
|
// Source file should have higher relevance than test file
|
||||||
|
const sourceFile = result.files.find(f => f.path.includes('ts/context/types.ts'));
|
||||||
|
const testFile = result.files.find(f => f.path.includes('test/test.basic.node.ts'));
|
||||||
|
|
||||||
|
if (sourceFile && testFile) {
|
||||||
|
expect(sourceFile.relevanceScore).toBeGreaterThan(testFile.relevanceScore);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextAnalyzer should prioritize changed files for commit task', async () => {
|
||||||
|
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||||
|
|
||||||
|
const changedFile = path.join(testProjectRoot, 'ts/context/types.ts');
|
||||||
|
const unchangedFile = path.join(testProjectRoot, 'ts/index.ts');
|
||||||
|
|
||||||
|
const metadata: IFileMetadata[] = [
|
||||||
|
{
|
||||||
|
path: changedFile,
|
||||||
|
relativePath: 'ts/context/types.ts',
|
||||||
|
size: 3000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 750
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: unchangedFile,
|
||||||
|
relativePath: 'ts/index.ts',
|
||||||
|
size: 3000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 750
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = await analyzer.analyze(metadata, 'commit', [changedFile]);
|
||||||
|
|
||||||
|
const changed = result.files.find(f => f.path === changedFile);
|
||||||
|
const unchanged = result.files.find(f => f.path === unchangedFile);
|
||||||
|
|
||||||
|
if (changed && unchanged) {
|
||||||
|
// Changed file should have recency score of 1.0
|
||||||
|
expect(changed.recencyScore).toEqual(1.0);
|
||||||
|
// Unchanged file should have recency score of 0
|
||||||
|
expect(unchanged.recencyScore).toEqual(0);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextAnalyzer should calculate efficiency scores', async () => {
|
||||||
|
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||||
|
|
||||||
|
const metadata: IFileMetadata[] = [
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
||||||
|
relativePath: 'ts/context/types.ts',
|
||||||
|
size: 5000, // Optimal size
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 1250
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'ts/very-large-file.ts'),
|
||||||
|
relativePath: 'ts/very-large-file.ts',
|
||||||
|
size: 50000, // Too large
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 12500
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = await analyzer.analyze(metadata, 'readme');
|
||||||
|
|
||||||
|
// Optimal size file should have better efficiency score
|
||||||
|
const optimalFile = result.files.find(f => f.path.includes('types.ts'));
|
||||||
|
const largeFile = result.files.find(f => f.path.includes('very-large-file.ts'));
|
||||||
|
|
||||||
|
if (optimalFile && largeFile) {
|
||||||
|
expect(optimalFile.efficiencyScore).toBeGreaterThan(largeFile.efficiencyScore);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextAnalyzer should build dependency graph', async () => {
|
||||||
|
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||||
|
|
||||||
|
const metadata: IFileMetadata[] = [
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'ts/context/enhanced-context.ts'),
|
||||||
|
relativePath: 'ts/context/enhanced-context.ts',
|
||||||
|
size: 10000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 2500
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
||||||
|
relativePath: 'ts/context/types.ts',
|
||||||
|
size: 5000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 1250
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = await analyzer.analyze(metadata, 'readme');
|
||||||
|
|
||||||
|
expect(result.dependencyGraph.size).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
// Check that each file has dependency info
|
||||||
|
for (const meta of metadata) {
|
||||||
|
const deps = result.dependencyGraph.get(meta.path);
|
||||||
|
expect(deps).toBeDefined();
|
||||||
|
expect(deps!.path).toEqual(meta.path);
|
||||||
|
expect(deps!.imports).toBeDefined();
|
||||||
|
expect(deps!.importedBy).toBeDefined();
|
||||||
|
expect(deps!.centrality).toBeGreaterThanOrEqual(0);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextAnalyzer should calculate centrality scores', async () => {
|
||||||
|
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||||
|
|
||||||
|
const metadata: IFileMetadata[] = [
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
||||||
|
relativePath: 'ts/context/types.ts',
|
||||||
|
size: 5000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 1250
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'ts/context/enhanced-context.ts'),
|
||||||
|
relativePath: 'ts/context/enhanced-context.ts',
|
||||||
|
size: 10000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 2500
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = await analyzer.analyze(metadata, 'readme');
|
||||||
|
|
||||||
|
// All centrality scores should be between 0 and 1
|
||||||
|
for (const [, deps] of result.dependencyGraph) {
|
||||||
|
expect(deps.centrality).toBeGreaterThanOrEqual(0);
|
||||||
|
expect(deps.centrality).toBeLessThanOrEqual(1);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextAnalyzer should assign higher centrality to highly imported files', async () => {
|
||||||
|
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||||
|
|
||||||
|
// types.ts is likely imported by many files
|
||||||
|
const typesPath = path.join(testProjectRoot, 'ts/context/types.ts');
|
||||||
|
// A test file is likely imported by fewer files
|
||||||
|
const testPath = path.join(testProjectRoot, 'test/test.basic.node.ts');
|
||||||
|
|
||||||
|
const metadata: IFileMetadata[] = [
|
||||||
|
{
|
||||||
|
path: typesPath,
|
||||||
|
relativePath: 'ts/context/types.ts',
|
||||||
|
size: 5000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 1250
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: testPath,
|
||||||
|
relativePath: 'test/test.basic.node.ts',
|
||||||
|
size: 3000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 750
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = await analyzer.analyze(metadata, 'readme');
|
||||||
|
|
||||||
|
const typesDeps = result.dependencyGraph.get(typesPath);
|
||||||
|
const testDeps = result.dependencyGraph.get(testPath);
|
||||||
|
|
||||||
|
if (typesDeps && testDeps) {
|
||||||
|
// types.ts should generally have higher centrality due to being imported more
|
||||||
|
expect(typesDeps.centrality).toBeGreaterThanOrEqual(0);
|
||||||
|
expect(testDeps.centrality).toBeGreaterThanOrEqual(0);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextAnalyzer should provide reason for scoring', async () => {
|
||||||
|
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||||
|
|
||||||
|
const metadata: IFileMetadata[] = [
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'ts/index.ts'),
|
||||||
|
relativePath: 'ts/index.ts',
|
||||||
|
size: 3000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 750
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = await analyzer.analyze(metadata, 'readme');
|
||||||
|
|
||||||
|
expect(result.files[0].reason).toBeDefined();
|
||||||
|
expect(result.files[0].reason!.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextAnalyzer should handle empty metadata array', async () => {
|
||||||
|
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||||
|
|
||||||
|
const result = await analyzer.analyze([], 'readme');
|
||||||
|
|
||||||
|
expect(result.files.length).toEqual(0);
|
||||||
|
expect(result.totalFiles).toEqual(0);
|
||||||
|
expect(result.dependencyGraph.size).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextAnalyzer should respect custom tier configuration', async () => {
|
||||||
|
const analyzer = new ContextAnalyzer(
|
||||||
|
testProjectRoot,
|
||||||
|
{},
|
||||||
|
{
|
||||||
|
essential: { minScore: 0.9, trimLevel: 'none' },
|
||||||
|
important: { minScore: 0.7, trimLevel: 'light' },
|
||||||
|
optional: { minScore: 0.5, trimLevel: 'aggressive' }
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
const metadata: IFileMetadata[] = [
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
||||||
|
relativePath: 'ts/context/types.ts',
|
||||||
|
size: 3000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 750
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = await analyzer.analyze(metadata, 'readme');
|
||||||
|
|
||||||
|
// Should use custom tier thresholds
|
||||||
|
const file = result.files[0];
|
||||||
|
expect(['essential', 'important', 'optional', 'excluded']).toContain(file.tier);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextAnalyzer should calculate combined importance score from all factors', async () => {
|
||||||
|
const analyzer = new ContextAnalyzer(testProjectRoot, {
|
||||||
|
dependencyWeight: 0.25,
|
||||||
|
relevanceWeight: 0.25,
|
||||||
|
efficiencyWeight: 0.25,
|
||||||
|
recencyWeight: 0.25
|
||||||
|
});
|
||||||
|
|
||||||
|
const metadata: IFileMetadata[] = [
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
||||||
|
relativePath: 'ts/context/types.ts',
|
||||||
|
size: 5000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 1250
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = await analyzer.analyze(metadata, 'readme');
|
||||||
|
|
||||||
|
const file = result.files[0];
|
||||||
|
|
||||||
|
// Importance score should be weighted sum of all factors
|
||||||
|
// With equal weights (0.25 each), importance should be average of all scores
|
||||||
|
const expectedImportance =
|
||||||
|
(file.relevanceScore * 0.25) +
|
||||||
|
(file.centralityScore * 0.25) +
|
||||||
|
(file.efficiencyScore * 0.25) +
|
||||||
|
(file.recencyScore * 0.25);
|
||||||
|
|
||||||
|
expect(file.importanceScore).toBeCloseTo(expectedImportance, 2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextAnalyzer should complete analysis within reasonable time', async () => {
|
||||||
|
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||||
|
|
||||||
|
const metadata: IFileMetadata[] = Array.from({ length: 10 }, (_, i) => ({
|
||||||
|
path: path.join(testProjectRoot, `ts/file${i}.ts`),
|
||||||
|
relativePath: `ts/file${i}.ts`,
|
||||||
|
size: 3000,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 750
|
||||||
|
}));
|
||||||
|
|
||||||
|
const startTime = Date.now();
|
||||||
|
const result = await analyzer.analyze(metadata, 'readme');
|
||||||
|
const endTime = Date.now();
|
||||||
|
|
||||||
|
const duration = endTime - startTime;
|
||||||
|
|
||||||
|
// Analysis duration should be recorded (can be 0 for fast operations)
|
||||||
|
expect(result.analysisDuration).toBeGreaterThanOrEqual(0);
|
||||||
|
expect(duration).toBeLessThan(10000); // Should complete within 10 seconds
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
465
test/test.contextcache.node.ts
Normal file
465
test/test.contextcache.node.ts
Normal file
@@ -0,0 +1,465 @@
|
|||||||
|
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as path from 'path';
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import { ContextCache } from '../ts/context/context-cache.js';
|
||||||
|
import type { ICacheEntry } from '../ts/context/types.js';
|
||||||
|
|
||||||
|
const testProjectRoot = process.cwd();
|
||||||
|
const testCacheDir = path.join(testProjectRoot, '.nogit', 'test-cache');
|
||||||
|
|
||||||
|
// Helper to clean up test cache directory
|
||||||
|
async function cleanupTestCache() {
|
||||||
|
try {
|
||||||
|
await fs.promises.rm(testCacheDir, { recursive: true, force: true });
|
||||||
|
} catch (error) {
|
||||||
|
// Ignore if directory doesn't exist
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tap.test('ContextCache should create instance with default config', async () => {
|
||||||
|
await cleanupTestCache();
|
||||||
|
|
||||||
|
const cache = new ContextCache(testProjectRoot, {
|
||||||
|
directory: testCacheDir,
|
||||||
|
enabled: true
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(cache).toBeInstanceOf(ContextCache);
|
||||||
|
|
||||||
|
await cleanupTestCache();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextCache.init should create cache directory', async () => {
|
||||||
|
await cleanupTestCache();
|
||||||
|
|
||||||
|
const cache = new ContextCache(testProjectRoot, {
|
||||||
|
directory: testCacheDir,
|
||||||
|
enabled: true
|
||||||
|
});
|
||||||
|
|
||||||
|
await cache.init();
|
||||||
|
|
||||||
|
// Check that cache directory was created
|
||||||
|
const exists = await fs.promises.access(testCacheDir).then(() => true).catch(() => false);
|
||||||
|
expect(exists).toEqual(true);
|
||||||
|
|
||||||
|
await cleanupTestCache();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextCache.set should store cache entry', async () => {
|
||||||
|
await cleanupTestCache();
|
||||||
|
|
||||||
|
const cache = new ContextCache(testProjectRoot, {
|
||||||
|
directory: testCacheDir,
|
||||||
|
enabled: true
|
||||||
|
});
|
||||||
|
await cache.init();
|
||||||
|
|
||||||
|
const testPath = path.join(testProjectRoot, 'package.json');
|
||||||
|
// Get actual file mtime for validation to work
|
||||||
|
const stats = await fs.promises.stat(testPath);
|
||||||
|
const fileMtime = Math.floor(stats.mtimeMs);
|
||||||
|
|
||||||
|
const entry: ICacheEntry = {
|
||||||
|
path: testPath,
|
||||||
|
contents: 'test content',
|
||||||
|
tokenCount: 100,
|
||||||
|
mtime: fileMtime,
|
||||||
|
cachedAt: Date.now()
|
||||||
|
};
|
||||||
|
|
||||||
|
await cache.set(entry);
|
||||||
|
|
||||||
|
const retrieved = await cache.get(testPath);
|
||||||
|
expect(retrieved).toBeDefined();
|
||||||
|
expect(retrieved!.contents).toEqual('test content');
|
||||||
|
expect(retrieved!.tokenCount).toEqual(100);
|
||||||
|
|
||||||
|
await cleanupTestCache();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextCache.get should return null for non-existent entry', async () => {
|
||||||
|
await cleanupTestCache();
|
||||||
|
|
||||||
|
const cache = new ContextCache(testProjectRoot, {
|
||||||
|
directory: testCacheDir,
|
||||||
|
enabled: true
|
||||||
|
});
|
||||||
|
await cache.init();
|
||||||
|
|
||||||
|
const retrieved = await cache.get('/non/existent/path.ts');
|
||||||
|
expect(retrieved).toBeNull();
|
||||||
|
|
||||||
|
await cleanupTestCache();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextCache.get should invalidate expired entries', async () => {
|
||||||
|
await cleanupTestCache();
|
||||||
|
|
||||||
|
const cache = new ContextCache(testProjectRoot, {
|
||||||
|
directory: testCacheDir,
|
||||||
|
enabled: true,
|
||||||
|
ttl: 1 // 1 second TTL
|
||||||
|
});
|
||||||
|
await cache.init();
|
||||||
|
|
||||||
|
const testPath = path.join(testProjectRoot, 'test-file.ts');
|
||||||
|
const entry: ICacheEntry = {
|
||||||
|
path: testPath,
|
||||||
|
contents: 'test content',
|
||||||
|
tokenCount: 100,
|
||||||
|
mtime: Date.now(),
|
||||||
|
cachedAt: Date.now() - 2000 // Cached 2 seconds ago (expired)
|
||||||
|
};
|
||||||
|
|
||||||
|
await cache.set(entry);
|
||||||
|
|
||||||
|
// Wait a bit to ensure expiration logic runs
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 100));
|
||||||
|
|
||||||
|
const retrieved = await cache.get(testPath);
|
||||||
|
expect(retrieved).toBeNull(); // Should be expired
|
||||||
|
|
||||||
|
await cleanupTestCache();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextCache.get should invalidate entries when file mtime changes', async () => {
|
||||||
|
await cleanupTestCache();
|
||||||
|
|
||||||
|
const cache = new ContextCache(testProjectRoot, {
|
||||||
|
directory: testCacheDir,
|
||||||
|
enabled: true
|
||||||
|
});
|
||||||
|
await cache.init();
|
||||||
|
|
||||||
|
const testPath = path.join(testProjectRoot, 'package.json');
|
||||||
|
const stats = await fs.promises.stat(testPath);
|
||||||
|
const oldMtime = Math.floor(stats.mtimeMs);
|
||||||
|
|
||||||
|
const entry: ICacheEntry = {
|
||||||
|
path: testPath,
|
||||||
|
contents: 'test content',
|
||||||
|
tokenCount: 100,
|
||||||
|
mtime: oldMtime - 1000, // Old mtime (file has changed)
|
||||||
|
cachedAt: Date.now()
|
||||||
|
};
|
||||||
|
|
||||||
|
await cache.set(entry);
|
||||||
|
|
||||||
|
const retrieved = await cache.get(testPath);
|
||||||
|
expect(retrieved).toBeNull(); // Should be invalidated due to mtime mismatch
|
||||||
|
|
||||||
|
await cleanupTestCache();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextCache.has should check if file is cached and valid', async () => {
|
||||||
|
await cleanupTestCache();
|
||||||
|
|
||||||
|
const cache = new ContextCache(testProjectRoot, {
|
||||||
|
directory: testCacheDir,
|
||||||
|
enabled: true
|
||||||
|
});
|
||||||
|
await cache.init();
|
||||||
|
|
||||||
|
const testPath = path.join(testProjectRoot, 'package.json');
|
||||||
|
const stats = await fs.promises.stat(testPath);
|
||||||
|
|
||||||
|
const entry: ICacheEntry = {
|
||||||
|
path: testPath,
|
||||||
|
contents: 'test content',
|
||||||
|
tokenCount: 100,
|
||||||
|
mtime: Math.floor(stats.mtimeMs),
|
||||||
|
cachedAt: Date.now()
|
||||||
|
};
|
||||||
|
|
||||||
|
await cache.set(entry);
|
||||||
|
|
||||||
|
const hasIt = await cache.has(testPath);
|
||||||
|
expect(hasIt).toEqual(true);
|
||||||
|
|
||||||
|
const doesNotHaveIt = await cache.has('/non/existent/path.ts');
|
||||||
|
expect(doesNotHaveIt).toEqual(false);
|
||||||
|
|
||||||
|
await cleanupTestCache();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextCache.setMany should store multiple entries', async () => {
|
||||||
|
await cleanupTestCache();
|
||||||
|
|
||||||
|
const cache = new ContextCache(testProjectRoot, {
|
||||||
|
directory: testCacheDir,
|
||||||
|
enabled: true
|
||||||
|
});
|
||||||
|
await cache.init();
|
||||||
|
|
||||||
|
const entries: ICacheEntry[] = [
|
||||||
|
{
|
||||||
|
path: '/test/file1.ts',
|
||||||
|
contents: 'content 1',
|
||||||
|
tokenCount: 100,
|
||||||
|
mtime: Date.now(),
|
||||||
|
cachedAt: Date.now()
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: '/test/file2.ts',
|
||||||
|
contents: 'content 2',
|
||||||
|
tokenCount: 200,
|
||||||
|
mtime: Date.now(),
|
||||||
|
cachedAt: Date.now()
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
await cache.setMany(entries);
|
||||||
|
|
||||||
|
const stats = cache.getStats();
|
||||||
|
expect(stats.entries).toBeGreaterThanOrEqual(2);
|
||||||
|
|
||||||
|
await cleanupTestCache();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextCache.getStats should return cache statistics', async () => {
|
||||||
|
await cleanupTestCache();
|
||||||
|
|
||||||
|
const cache = new ContextCache(testProjectRoot, {
|
||||||
|
directory: testCacheDir,
|
||||||
|
enabled: true
|
||||||
|
});
|
||||||
|
await cache.init();
|
||||||
|
|
||||||
|
const entry: ICacheEntry = {
|
||||||
|
path: '/test/file.ts',
|
||||||
|
contents: 'test content with some length',
|
||||||
|
tokenCount: 100,
|
||||||
|
mtime: Date.now(),
|
||||||
|
cachedAt: Date.now()
|
||||||
|
};
|
||||||
|
|
||||||
|
await cache.set(entry);
|
||||||
|
|
||||||
|
const stats = cache.getStats();
|
||||||
|
|
||||||
|
expect(stats.entries).toEqual(1);
|
||||||
|
expect(stats.totalSize).toBeGreaterThan(0);
|
||||||
|
expect(stats.oldestEntry).toBeDefined();
|
||||||
|
expect(stats.newestEntry).toBeDefined();
|
||||||
|
|
||||||
|
await cleanupTestCache();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextCache.clear should clear all entries', async () => {
|
||||||
|
await cleanupTestCache();
|
||||||
|
|
||||||
|
const cache = new ContextCache(testProjectRoot, {
|
||||||
|
directory: testCacheDir,
|
||||||
|
enabled: true
|
||||||
|
});
|
||||||
|
await cache.init();
|
||||||
|
|
||||||
|
const entry: ICacheEntry = {
|
||||||
|
path: '/test/file.ts',
|
||||||
|
contents: 'test content',
|
||||||
|
tokenCount: 100,
|
||||||
|
mtime: Date.now(),
|
||||||
|
cachedAt: Date.now()
|
||||||
|
};
|
||||||
|
|
||||||
|
await cache.set(entry);
|
||||||
|
expect(cache.getStats().entries).toEqual(1);
|
||||||
|
|
||||||
|
await cache.clear();
|
||||||
|
expect(cache.getStats().entries).toEqual(0);
|
||||||
|
|
||||||
|
await cleanupTestCache();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextCache.clearPaths should clear specific entries', async () => {
|
||||||
|
await cleanupTestCache();
|
||||||
|
|
||||||
|
const cache = new ContextCache(testProjectRoot, {
|
||||||
|
directory: testCacheDir,
|
||||||
|
enabled: true
|
||||||
|
});
|
||||||
|
await cache.init();
|
||||||
|
|
||||||
|
const entries: ICacheEntry[] = [
|
||||||
|
{
|
||||||
|
path: '/test/file1.ts',
|
||||||
|
contents: 'content 1',
|
||||||
|
tokenCount: 100,
|
||||||
|
mtime: Date.now(),
|
||||||
|
cachedAt: Date.now()
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: '/test/file2.ts',
|
||||||
|
contents: 'content 2',
|
||||||
|
tokenCount: 200,
|
||||||
|
mtime: Date.now(),
|
||||||
|
cachedAt: Date.now()
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
await cache.setMany(entries);
|
||||||
|
expect(cache.getStats().entries).toEqual(2);
|
||||||
|
|
||||||
|
await cache.clearPaths(['/test/file1.ts']);
|
||||||
|
expect(cache.getStats().entries).toEqual(1);
|
||||||
|
|
||||||
|
await cleanupTestCache();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextCache should enforce max size by evicting oldest entries', async () => {
|
||||||
|
await cleanupTestCache();
|
||||||
|
|
||||||
|
const cache = new ContextCache(testProjectRoot, {
|
||||||
|
directory: testCacheDir,
|
||||||
|
enabled: true,
|
||||||
|
maxSize: 0.001 // Very small: 0.001 MB = 1KB
|
||||||
|
});
|
||||||
|
await cache.init();
|
||||||
|
|
||||||
|
// Add entries that exceed the max size
|
||||||
|
const largeContent = 'x'.repeat(500); // 500 bytes
|
||||||
|
|
||||||
|
const entries: ICacheEntry[] = [
|
||||||
|
{
|
||||||
|
path: '/test/file1.ts',
|
||||||
|
contents: largeContent,
|
||||||
|
tokenCount: 100,
|
||||||
|
mtime: Date.now(),
|
||||||
|
cachedAt: Date.now() - 3000 // Oldest
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: '/test/file2.ts',
|
||||||
|
contents: largeContent,
|
||||||
|
tokenCount: 100,
|
||||||
|
mtime: Date.now(),
|
||||||
|
cachedAt: Date.now() - 2000
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: '/test/file3.ts',
|
||||||
|
contents: largeContent,
|
||||||
|
tokenCount: 100,
|
||||||
|
mtime: Date.now(),
|
||||||
|
cachedAt: Date.now() - 1000 // Newest
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
await cache.setMany(entries);
|
||||||
|
|
||||||
|
const stats = cache.getStats();
|
||||||
|
// Should have evicted oldest entries to stay under size limit
|
||||||
|
expect(stats.totalSize).toBeLessThanOrEqual(1024); // 1KB
|
||||||
|
|
||||||
|
await cleanupTestCache();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextCache should not cache when disabled', async () => {
|
||||||
|
await cleanupTestCache();
|
||||||
|
|
||||||
|
const cache = new ContextCache(testProjectRoot, {
|
||||||
|
directory: testCacheDir,
|
||||||
|
enabled: false
|
||||||
|
});
|
||||||
|
await cache.init();
|
||||||
|
|
||||||
|
const entry: ICacheEntry = {
|
||||||
|
path: '/test/file.ts',
|
||||||
|
contents: 'test content',
|
||||||
|
tokenCount: 100,
|
||||||
|
mtime: Date.now(),
|
||||||
|
cachedAt: Date.now()
|
||||||
|
};
|
||||||
|
|
||||||
|
await cache.set(entry);
|
||||||
|
|
||||||
|
const retrieved = await cache.get('/test/file.ts');
|
||||||
|
expect(retrieved).toBeNull();
|
||||||
|
|
||||||
|
await cleanupTestCache();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextCache should persist to disk and reload', async () => {
|
||||||
|
await cleanupTestCache();
|
||||||
|
|
||||||
|
// Create first cache instance and add entry
|
||||||
|
const cache1 = new ContextCache(testProjectRoot, {
|
||||||
|
directory: testCacheDir,
|
||||||
|
enabled: true
|
||||||
|
});
|
||||||
|
await cache1.init();
|
||||||
|
|
||||||
|
// Use a real file that exists so validation passes
|
||||||
|
const testPath = path.join(testProjectRoot, 'package.json');
|
||||||
|
const stats = await fs.promises.stat(testPath);
|
||||||
|
const fileMtime = Math.floor(stats.mtimeMs);
|
||||||
|
|
||||||
|
const entry: ICacheEntry = {
|
||||||
|
path: testPath,
|
||||||
|
contents: 'persistent content',
|
||||||
|
tokenCount: 150,
|
||||||
|
mtime: fileMtime,
|
||||||
|
cachedAt: Date.now()
|
||||||
|
};
|
||||||
|
|
||||||
|
await cache1.set(entry);
|
||||||
|
|
||||||
|
// Wait for persist
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 500));
|
||||||
|
|
||||||
|
// Create second cache instance (should reload from disk)
|
||||||
|
const cache2 = new ContextCache(testProjectRoot, {
|
||||||
|
directory: testCacheDir,
|
||||||
|
enabled: true
|
||||||
|
});
|
||||||
|
await cache2.init();
|
||||||
|
|
||||||
|
const cacheStats = cache2.getStats();
|
||||||
|
expect(cacheStats.entries).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
await cleanupTestCache();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextCache should handle invalid cache index gracefully', async () => {
|
||||||
|
await cleanupTestCache();
|
||||||
|
|
||||||
|
const cache = new ContextCache(testProjectRoot, {
|
||||||
|
directory: testCacheDir,
|
||||||
|
enabled: true
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create cache dir manually
|
||||||
|
await fs.promises.mkdir(testCacheDir, { recursive: true });
|
||||||
|
|
||||||
|
// Write invalid JSON to cache index
|
||||||
|
const cacheIndexPath = path.join(testCacheDir, 'index.json');
|
||||||
|
await fs.promises.writeFile(cacheIndexPath, 'invalid json {', 'utf-8');
|
||||||
|
|
||||||
|
// Should not throw, should just start with empty cache
|
||||||
|
await cache.init();
|
||||||
|
|
||||||
|
const stats = cache.getStats();
|
||||||
|
expect(stats.entries).toEqual(0);
|
||||||
|
|
||||||
|
await cleanupTestCache();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('ContextCache should return proper stats for empty cache', async () => {
|
||||||
|
await cleanupTestCache();
|
||||||
|
|
||||||
|
const cache = new ContextCache(testProjectRoot, {
|
||||||
|
directory: testCacheDir,
|
||||||
|
enabled: true
|
||||||
|
});
|
||||||
|
await cache.init();
|
||||||
|
|
||||||
|
const stats = cache.getStats();
|
||||||
|
|
||||||
|
expect(stats.entries).toEqual(0);
|
||||||
|
expect(stats.totalSize).toEqual(0);
|
||||||
|
expect(stats.oldestEntry).toBeNull();
|
||||||
|
expect(stats.newestEntry).toBeNull();
|
||||||
|
|
||||||
|
await cleanupTestCache();
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
147
test/test.iterativecontextbuilder.node.ts
Normal file
147
test/test.iterativecontextbuilder.node.ts
Normal file
@@ -0,0 +1,147 @@
|
|||||||
|
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as path from 'path';
|
||||||
|
import { IterativeContextBuilder } from '../ts/context/iterative-context-builder.js';
|
||||||
|
import type { IIterativeConfig, TaskType } from '../ts/context/types.js';
|
||||||
|
import * as qenv from '@push.rocks/qenv';
|
||||||
|
|
||||||
|
// Test project directory
|
||||||
|
const testProjectRoot = path.join(process.cwd());
|
||||||
|
|
||||||
|
// Helper to check if OPENAI_TOKEN is available
|
||||||
|
async function hasOpenAIToken(): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const qenvInstance = new qenv.Qenv();
|
||||||
|
const token = await qenvInstance.getEnvVarOnDemand('OPENAI_TOKEN');
|
||||||
|
return !!token;
|
||||||
|
} catch (error) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
tap.test('IterativeContextBuilder should create instance with default config', async () => {
|
||||||
|
const builder = new IterativeContextBuilder(testProjectRoot);
|
||||||
|
expect(builder).toBeInstanceOf(IterativeContextBuilder);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('IterativeContextBuilder should create instance with custom config', async () => {
|
||||||
|
const customConfig: Partial<IIterativeConfig> = {
|
||||||
|
maxIterations: 3,
|
||||||
|
firstPassFileLimit: 5,
|
||||||
|
subsequentPassFileLimit: 3,
|
||||||
|
temperature: 0.5,
|
||||||
|
model: 'gpt-4',
|
||||||
|
};
|
||||||
|
const builder = new IterativeContextBuilder(testProjectRoot, customConfig);
|
||||||
|
expect(builder).toBeInstanceOf(IterativeContextBuilder);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('IterativeContextBuilder should initialize successfully', async () => {
|
||||||
|
if (!(await hasOpenAIToken())) {
|
||||||
|
console.log('⚠️ Skipping initialization test - OPENAI_TOKEN not available');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const builder = new IterativeContextBuilder(testProjectRoot);
|
||||||
|
await builder.initialize();
|
||||||
|
// If we get here without error, initialization succeeded
|
||||||
|
expect(true).toEqual(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('IterativeContextBuilder should build context iteratively for readme task', async () => {
|
||||||
|
if (!(await hasOpenAIToken())) {
|
||||||
|
console.log('⚠️ Skipping iterative build test - OPENAI_TOKEN not available');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const builder = new IterativeContextBuilder(testProjectRoot, {
|
||||||
|
maxIterations: 2, // Limit iterations for testing
|
||||||
|
firstPassFileLimit: 3,
|
||||||
|
subsequentPassFileLimit: 2,
|
||||||
|
});
|
||||||
|
|
||||||
|
await builder.initialize();
|
||||||
|
|
||||||
|
const result = await builder.buildContextIteratively('readme');
|
||||||
|
|
||||||
|
// Verify result structure
|
||||||
|
expect(result).toBeTypeOf('object');
|
||||||
|
expect(result.context).toBeTypeOf('string');
|
||||||
|
expect(result.context.length).toBeGreaterThan(0);
|
||||||
|
expect(result.tokenCount).toBeTypeOf('number');
|
||||||
|
expect(result.tokenCount).toBeGreaterThan(0);
|
||||||
|
expect(result.includedFiles).toBeInstanceOf(Array);
|
||||||
|
expect(result.includedFiles.length).toBeGreaterThan(0);
|
||||||
|
expect(result.iterationCount).toBeTypeOf('number');
|
||||||
|
expect(result.iterationCount).toBeGreaterThan(0);
|
||||||
|
expect(result.iterationCount).toBeLessThanOrEqual(2);
|
||||||
|
expect(result.iterations).toBeInstanceOf(Array);
|
||||||
|
expect(result.iterations.length).toEqual(result.iterationCount);
|
||||||
|
expect(result.apiCallCount).toBeTypeOf('number');
|
||||||
|
expect(result.apiCallCount).toBeGreaterThan(0);
|
||||||
|
expect(result.totalDuration).toBeTypeOf('number');
|
||||||
|
expect(result.totalDuration).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
// Verify iteration structure
|
||||||
|
for (const iteration of result.iterations) {
|
||||||
|
expect(iteration.iteration).toBeTypeOf('number');
|
||||||
|
expect(iteration.filesLoaded).toBeInstanceOf(Array);
|
||||||
|
expect(iteration.tokensUsed).toBeTypeOf('number');
|
||||||
|
expect(iteration.totalTokensUsed).toBeTypeOf('number');
|
||||||
|
expect(iteration.decision).toBeTypeOf('object');
|
||||||
|
expect(iteration.duration).toBeTypeOf('number');
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`✅ Iterative context build completed:`);
|
||||||
|
console.log(` Iterations: ${result.iterationCount}`);
|
||||||
|
console.log(` Files: ${result.includedFiles.length}`);
|
||||||
|
console.log(` Tokens: ${result.tokenCount}`);
|
||||||
|
console.log(` API calls: ${result.apiCallCount}`);
|
||||||
|
console.log(` Duration: ${(result.totalDuration / 1000).toFixed(2)}s`);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('IterativeContextBuilder should respect token budget', async () => {
|
||||||
|
if (!(await hasOpenAIToken())) {
|
||||||
|
console.log('⚠️ Skipping token budget test - OPENAI_TOKEN not available');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const builder = new IterativeContextBuilder(testProjectRoot, {
|
||||||
|
maxIterations: 5,
|
||||||
|
});
|
||||||
|
|
||||||
|
await builder.initialize();
|
||||||
|
|
||||||
|
const result = await builder.buildContextIteratively('description');
|
||||||
|
|
||||||
|
// Token count should not exceed budget significantly (allow 5% margin for safety)
|
||||||
|
const configManager = (await import('../ts/context/config-manager.js')).ConfigManager.getInstance();
|
||||||
|
const maxTokens = configManager.getMaxTokens();
|
||||||
|
expect(result.tokenCount).toBeLessThanOrEqual(maxTokens * 1.05);
|
||||||
|
|
||||||
|
console.log(`✅ Token budget respected: ${result.tokenCount}/${maxTokens}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('IterativeContextBuilder should work with different task types', async () => {
|
||||||
|
if (!(await hasOpenAIToken())) {
|
||||||
|
console.log('⚠️ Skipping task types test - OPENAI_TOKEN not available');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const taskTypes: TaskType[] = ['readme', 'description', 'commit'];
|
||||||
|
|
||||||
|
for (const taskType of taskTypes) {
|
||||||
|
const builder = new IterativeContextBuilder(testProjectRoot, {
|
||||||
|
maxIterations: 2,
|
||||||
|
firstPassFileLimit: 2,
|
||||||
|
});
|
||||||
|
|
||||||
|
await builder.initialize();
|
||||||
|
const result = await builder.buildContextIteratively(taskType);
|
||||||
|
|
||||||
|
expect(result.includedFiles.length).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
console.log(`✅ ${taskType}: ${result.includedFiles.length} files, ${result.tokenCount} tokens`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
243
test/test.lazyfileloader.node.ts
Normal file
243
test/test.lazyfileloader.node.ts
Normal file
@@ -0,0 +1,243 @@
|
|||||||
|
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as path from 'path';
|
||||||
|
import { LazyFileLoader } from '../ts/context/lazy-file-loader.js';
|
||||||
|
import type { IFileMetadata } from '../ts/context/types.js';
|
||||||
|
|
||||||
|
const testProjectRoot = process.cwd();
|
||||||
|
|
||||||
|
tap.test('LazyFileLoader should create instance with project root', async () => {
|
||||||
|
const loader = new LazyFileLoader(testProjectRoot);
|
||||||
|
expect(loader).toBeInstanceOf(LazyFileLoader);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('LazyFileLoader.getMetadata should return file metadata without loading contents', async () => {
|
||||||
|
const loader = new LazyFileLoader(testProjectRoot);
|
||||||
|
const packageJsonPath = path.join(testProjectRoot, 'package.json');
|
||||||
|
|
||||||
|
const metadata = await loader.getMetadata(packageJsonPath);
|
||||||
|
|
||||||
|
expect(metadata.path).toEqual(packageJsonPath);
|
||||||
|
expect(metadata.relativePath).toEqual('package.json');
|
||||||
|
expect(metadata.size).toBeGreaterThan(0);
|
||||||
|
expect(metadata.mtime).toBeGreaterThan(0);
|
||||||
|
expect(metadata.estimatedTokens).toBeGreaterThan(0);
|
||||||
|
// Rough estimate: size / 4 (with reasonable tolerance)
|
||||||
|
expect(metadata.estimatedTokens).toBeGreaterThan(metadata.size / 5);
|
||||||
|
expect(metadata.estimatedTokens).toBeLessThan(metadata.size / 3);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('LazyFileLoader.getMetadata should cache metadata for same file', async () => {
|
||||||
|
const loader = new LazyFileLoader(testProjectRoot);
|
||||||
|
const packageJsonPath = path.join(testProjectRoot, 'package.json');
|
||||||
|
|
||||||
|
const metadata1 = await loader.getMetadata(packageJsonPath);
|
||||||
|
const metadata2 = await loader.getMetadata(packageJsonPath);
|
||||||
|
|
||||||
|
// Should return identical metadata from cache
|
||||||
|
expect(metadata1.mtime).toEqual(metadata2.mtime);
|
||||||
|
expect(metadata1.size).toEqual(metadata2.size);
|
||||||
|
expect(metadata1.estimatedTokens).toEqual(metadata2.estimatedTokens);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('LazyFileLoader.scanFiles should scan TypeScript files', async () => {
|
||||||
|
const loader = new LazyFileLoader(testProjectRoot);
|
||||||
|
|
||||||
|
const metadata = await loader.scanFiles(['ts/context/types.ts']);
|
||||||
|
|
||||||
|
expect(metadata.length).toBeGreaterThan(0);
|
||||||
|
const typesFile = metadata.find(m => m.relativePath.includes('types.ts'));
|
||||||
|
expect(typesFile).toBeDefined();
|
||||||
|
expect(typesFile!.size).toBeGreaterThan(0);
|
||||||
|
expect(typesFile!.estimatedTokens).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('LazyFileLoader.scanFiles should handle multiple globs', async () => {
|
||||||
|
const loader = new LazyFileLoader(testProjectRoot);
|
||||||
|
|
||||||
|
const metadata = await loader.scanFiles([
|
||||||
|
'package.json',
|
||||||
|
'readme.md'
|
||||||
|
]);
|
||||||
|
|
||||||
|
expect(metadata.length).toBeGreaterThanOrEqual(2);
|
||||||
|
const hasPackageJson = metadata.some(m => m.relativePath === 'package.json');
|
||||||
|
const hasReadme = metadata.some(m => m.relativePath.toLowerCase() === 'readme.md');
|
||||||
|
expect(hasPackageJson).toEqual(true);
|
||||||
|
expect(hasReadme).toEqual(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('LazyFileLoader.loadFile should load file with actual token count', async () => {
|
||||||
|
const loader = new LazyFileLoader(testProjectRoot);
|
||||||
|
const packageJsonPath = path.join(testProjectRoot, 'package.json');
|
||||||
|
|
||||||
|
const tokenizer = (content: string) => Math.ceil(content.length / 4);
|
||||||
|
const fileInfo = await loader.loadFile(packageJsonPath, tokenizer);
|
||||||
|
|
||||||
|
expect(fileInfo.path).toEqual(packageJsonPath);
|
||||||
|
expect(fileInfo.contents).toBeDefined();
|
||||||
|
expect(fileInfo.contents.length).toBeGreaterThan(0);
|
||||||
|
expect(fileInfo.tokenCount).toBeGreaterThan(0);
|
||||||
|
expect(fileInfo.relativePath).toEqual('package.json');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('LazyFileLoader.loadFiles should load multiple files in parallel', async () => {
|
||||||
|
const loader = new LazyFileLoader(testProjectRoot);
|
||||||
|
|
||||||
|
const metadata: IFileMetadata[] = [
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'package.json'),
|
||||||
|
relativePath: 'package.json',
|
||||||
|
size: 100,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 25
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'readme.md'),
|
||||||
|
relativePath: 'readme.md',
|
||||||
|
size: 200,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 50
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
const tokenizer = (content: string) => Math.ceil(content.length / 4);
|
||||||
|
const startTime = Date.now();
|
||||||
|
const files = await loader.loadFiles(metadata, tokenizer);
|
||||||
|
const endTime = Date.now();
|
||||||
|
|
||||||
|
expect(files.length).toEqual(2);
|
||||||
|
expect(files[0].contents).toBeDefined();
|
||||||
|
expect(files[1].contents).toBeDefined();
|
||||||
|
|
||||||
|
// Should be fast (parallel loading)
|
||||||
|
expect(endTime - startTime).toBeLessThan(5000); // 5 seconds max
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('LazyFileLoader.updateImportanceScores should update cached metadata', async () => {
|
||||||
|
const loader = new LazyFileLoader(testProjectRoot);
|
||||||
|
const packageJsonPath = path.join(testProjectRoot, 'package.json');
|
||||||
|
|
||||||
|
// Get initial metadata
|
||||||
|
await loader.getMetadata(packageJsonPath);
|
||||||
|
|
||||||
|
// Update importance scores
|
||||||
|
const scores = new Map<string, number>();
|
||||||
|
scores.set(packageJsonPath, 0.95);
|
||||||
|
loader.updateImportanceScores(scores);
|
||||||
|
|
||||||
|
// Check cached metadata has updated score
|
||||||
|
const cached = loader.getCachedMetadata();
|
||||||
|
const packageJsonMeta = cached.find(m => m.path === packageJsonPath);
|
||||||
|
|
||||||
|
expect(packageJsonMeta).toBeDefined();
|
||||||
|
expect(packageJsonMeta!.importanceScore).toEqual(0.95);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('LazyFileLoader.getTotalEstimatedTokens should sum all cached metadata tokens', async () => {
|
||||||
|
const loader = new LazyFileLoader(testProjectRoot);
|
||||||
|
|
||||||
|
// Scan some files
|
||||||
|
await loader.scanFiles(['package.json', 'readme.md']);
|
||||||
|
|
||||||
|
const totalTokens = loader.getTotalEstimatedTokens();
|
||||||
|
|
||||||
|
expect(totalTokens).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('LazyFileLoader.clearCache should clear metadata cache', async () => {
|
||||||
|
const loader = new LazyFileLoader(testProjectRoot);
|
||||||
|
|
||||||
|
// Scan files to populate cache
|
||||||
|
await loader.scanFiles(['package.json']);
|
||||||
|
expect(loader.getCachedMetadata().length).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
// Clear cache
|
||||||
|
loader.clearCache();
|
||||||
|
|
||||||
|
expect(loader.getCachedMetadata().length).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('LazyFileLoader.getCachedMetadata should return all cached entries', async () => {
|
||||||
|
const loader = new LazyFileLoader(testProjectRoot);
|
||||||
|
|
||||||
|
// Scan files
|
||||||
|
await loader.scanFiles(['package.json', 'readme.md']);
|
||||||
|
|
||||||
|
const cached = loader.getCachedMetadata();
|
||||||
|
|
||||||
|
expect(cached.length).toBeGreaterThanOrEqual(2);
|
||||||
|
expect(cached.every(m => m.path && m.size && m.estimatedTokens)).toEqual(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('LazyFileLoader should handle non-existent files gracefully', async () => {
|
||||||
|
const loader = new LazyFileLoader(testProjectRoot);
|
||||||
|
const nonExistentPath = path.join(testProjectRoot, 'this-file-does-not-exist.ts');
|
||||||
|
|
||||||
|
try {
|
||||||
|
await loader.getMetadata(nonExistentPath);
|
||||||
|
expect(false).toEqual(true); // Should not reach here
|
||||||
|
} catch (error) {
|
||||||
|
expect(error).toBeDefined();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('LazyFileLoader.loadFiles should filter out failed file loads', async () => {
|
||||||
|
const loader = new LazyFileLoader(testProjectRoot);
|
||||||
|
|
||||||
|
const metadata: IFileMetadata[] = [
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'package.json'),
|
||||||
|
relativePath: 'package.json',
|
||||||
|
size: 100,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 25
|
||||||
|
},
|
||||||
|
{
|
||||||
|
path: path.join(testProjectRoot, 'non-existent-file.txt'),
|
||||||
|
relativePath: 'non-existent-file.txt',
|
||||||
|
size: 100,
|
||||||
|
mtime: Date.now(),
|
||||||
|
estimatedTokens: 25
|
||||||
|
}
|
||||||
|
];
|
||||||
|
|
||||||
|
const tokenizer = (content: string) => Math.ceil(content.length / 4);
|
||||||
|
const files = await loader.loadFiles(metadata, tokenizer);
|
||||||
|
|
||||||
|
// Should only include the successfully loaded file
|
||||||
|
expect(files.length).toEqual(1);
|
||||||
|
expect(files[0].relativePath).toEqual('package.json');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('LazyFileLoader should handle glob patterns for TypeScript source files', async () => {
|
||||||
|
const loader = new LazyFileLoader(testProjectRoot);
|
||||||
|
|
||||||
|
const metadata = await loader.scanFiles(['ts/context/*.ts']);
|
||||||
|
|
||||||
|
expect(metadata.length).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
// Should find multiple context files
|
||||||
|
const hasEnhancedContext = metadata.some(m => m.relativePath.includes('enhanced-context.ts'));
|
||||||
|
const hasTypes = metadata.some(m => m.relativePath.includes('types.ts'));
|
||||||
|
|
||||||
|
expect(hasEnhancedContext).toEqual(true);
|
||||||
|
expect(hasTypes).toEqual(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('LazyFileLoader should estimate tokens reasonably accurately', async () => {
|
||||||
|
const loader = new LazyFileLoader(testProjectRoot);
|
||||||
|
const packageJsonPath = path.join(testProjectRoot, 'package.json');
|
||||||
|
|
||||||
|
const metadata = await loader.getMetadata(packageJsonPath);
|
||||||
|
const tokenizer = (content: string) => Math.ceil(content.length / 4);
|
||||||
|
const fileInfo = await loader.loadFile(packageJsonPath, tokenizer);
|
||||||
|
|
||||||
|
// Estimated tokens should be close to actual (within reasonable range)
|
||||||
|
const difference = Math.abs(metadata.estimatedTokens - fileInfo.tokenCount);
|
||||||
|
const percentDiff = (difference / fileInfo.tokenCount) * 100;
|
||||||
|
|
||||||
|
// Should be within 20% accuracy (since it's just an estimate)
|
||||||
|
expect(percentDiff).toBeLessThan(20);
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
@@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@git.zone/tsdoc',
|
name: '@git.zone/tsdoc',
|
||||||
version: '1.5.0',
|
version: '1.7.0',
|
||||||
description: 'A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.'
|
description: 'A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.'
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -77,8 +77,8 @@ interface {
|
|||||||
For the recommendedNextVersionDetails, please only add a detail entries to the array if it has an obvious value to the reader.
|
For the recommendedNextVersionDetails, please only add a detail entries to the array if it has an obvious value to the reader.
|
||||||
|
|
||||||
You are being given the files of the project. You should use them to create the commit message.
|
You are being given the files of the project. You should use them to create the commit message.
|
||||||
Also you are given a diff
|
Also you are given a diff.
|
||||||
|
Never mention CLAUDE code, or codex.
|
||||||
`,
|
`,
|
||||||
messageHistory: [],
|
messageHistory: [],
|
||||||
userMessage: contextString,
|
userMessage: contextString,
|
||||||
|
|||||||
@@ -75,6 +75,10 @@ export class AiDoc {
|
|||||||
await this.openaiInstance.start();
|
await this.openaiInstance.start();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async stop() {
|
||||||
|
await this.openaiInstance.stop();
|
||||||
|
}
|
||||||
|
|
||||||
public async buildReadme(projectDirArg: string) {
|
public async buildReadme(projectDirArg: string) {
|
||||||
const readmeInstance = new aiDocsClasses.Readme(this, projectDirArg);
|
const readmeInstance = new aiDocsClasses.Readme(this, projectDirArg);
|
||||||
return await readmeInstance.build();
|
return await readmeInstance.build();
|
||||||
|
|||||||
31
ts/cli.ts
31
ts/cli.ts
@@ -57,29 +57,25 @@ export const run = async () => {
|
|||||||
|
|
||||||
logger.log('info', `Calculating context token count...`);
|
logger.log('info', `Calculating context token count...`);
|
||||||
|
|
||||||
// Determine context mode based on args
|
|
||||||
let contextMode: context.ContextMode = 'full';
|
|
||||||
if (argvArg.trim || argvArg.trimmed) {
|
|
||||||
contextMode = 'trimmed';
|
|
||||||
} else if (argvArg.summarize || argvArg.summarized) {
|
|
||||||
contextMode = 'summarized';
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get task type if specified
|
// Get task type if specified
|
||||||
let taskType: context.TaskType | undefined = undefined;
|
let taskType: context.TaskType | undefined = undefined;
|
||||||
if (argvArg.task) {
|
if (argvArg.task) {
|
||||||
if (['readme', 'commit', 'description'].includes(argvArg.task)) {
|
if (['readme', 'commit', 'description'].includes(argvArg.task)) {
|
||||||
taskType = argvArg.task as context.TaskType;
|
taskType = argvArg.task as context.TaskType;
|
||||||
} else {
|
} else {
|
||||||
logger.log('warn', `Unknown task type: ${argvArg.task}. Using default context.`);
|
logger.log('warn', `Unknown task type: ${argvArg.task}. Using default (readme).`);
|
||||||
|
taskType = 'readme';
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
// Default to readme if no task specified
|
||||||
|
taskType = 'readme';
|
||||||
}
|
}
|
||||||
|
|
||||||
// Use enhanced context
|
// Use iterative context building
|
||||||
const taskFactory = new context.TaskContextFactory(paths.cwd);
|
const taskFactory = new context.TaskContextFactory(paths.cwd);
|
||||||
await taskFactory.initialize();
|
await taskFactory.initialize();
|
||||||
|
|
||||||
let contextResult: context.IContextResult;
|
let contextResult: context.IIterativeContextResult;
|
||||||
|
|
||||||
if (argvArg.all) {
|
if (argvArg.all) {
|
||||||
// Show stats for all task types
|
// Show stats for all task types
|
||||||
@@ -100,21 +96,8 @@ export const run = async () => {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (taskType) {
|
|
||||||
// Get context for specific task
|
// Get context for specific task
|
||||||
contextResult = await taskFactory.createContextForTask(taskType);
|
contextResult = await taskFactory.createContextForTask(taskType);
|
||||||
} else {
|
|
||||||
// Get generic context with specified mode
|
|
||||||
const enhancedContext = new context.EnhancedContext(paths.cwd);
|
|
||||||
await enhancedContext.initialize();
|
|
||||||
enhancedContext.setContextMode(contextMode);
|
|
||||||
|
|
||||||
if (argvArg.maxTokens) {
|
|
||||||
enhancedContext.setTokenBudget(parseInt(argvArg.maxTokens, 10));
|
|
||||||
}
|
|
||||||
|
|
||||||
contextResult = await enhancedContext.buildContext();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Display results
|
// Display results
|
||||||
logger.log('ok', `Total context token count: ${contextResult.tokenCount}`);
|
logger.log('ok', `Total context token count: ${contextResult.tokenCount}`);
|
||||||
|
|||||||
@@ -1,5 +1,17 @@
|
|||||||
import * as plugins from '../plugins.js';
|
import * as plugins from '../plugins.js';
|
||||||
import type { IContextConfig, ITrimConfig, ITaskConfig, TaskType, ContextMode } from './types.js';
|
import * as fs from 'fs';
|
||||||
|
import type {
|
||||||
|
IContextConfig,
|
||||||
|
ITrimConfig,
|
||||||
|
ITaskConfig,
|
||||||
|
TaskType,
|
||||||
|
ContextMode,
|
||||||
|
ICacheConfig,
|
||||||
|
IAnalyzerConfig,
|
||||||
|
IPrioritizationWeights,
|
||||||
|
ITierConfig,
|
||||||
|
IIterativeConfig
|
||||||
|
} from './types.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Manages configuration for context building
|
* Manages configuration for context building
|
||||||
@@ -8,6 +20,7 @@ export class ConfigManager {
|
|||||||
private static instance: ConfigManager;
|
private static instance: ConfigManager;
|
||||||
private config: IContextConfig;
|
private config: IContextConfig;
|
||||||
private projectDir: string = '';
|
private projectDir: string = '';
|
||||||
|
private configCache: { mtime: number; config: IContextConfig } | null = null;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the singleton instance of ConfigManager
|
* Get the singleton instance of ConfigManager
|
||||||
@@ -65,6 +78,34 @@ export class ConfigManager {
|
|||||||
maxFunctionLines: 5,
|
maxFunctionLines: 5,
|
||||||
removeComments: true,
|
removeComments: true,
|
||||||
removeBlankLines: true
|
removeBlankLines: true
|
||||||
|
},
|
||||||
|
cache: {
|
||||||
|
enabled: true,
|
||||||
|
ttl: 3600, // 1 hour
|
||||||
|
maxSize: 100, // 100MB
|
||||||
|
directory: undefined // Will be set to .nogit/context-cache by ContextCache
|
||||||
|
},
|
||||||
|
analyzer: {
|
||||||
|
useAIRefinement: false, // Disabled by default for now
|
||||||
|
aiModel: 'haiku'
|
||||||
|
},
|
||||||
|
prioritization: {
|
||||||
|
dependencyWeight: 0.3,
|
||||||
|
relevanceWeight: 0.4,
|
||||||
|
efficiencyWeight: 0.2,
|
||||||
|
recencyWeight: 0.1
|
||||||
|
},
|
||||||
|
tiers: {
|
||||||
|
essential: { minScore: 0.8, trimLevel: 'none' },
|
||||||
|
important: { minScore: 0.5, trimLevel: 'light' },
|
||||||
|
optional: { minScore: 0.2, trimLevel: 'aggressive' }
|
||||||
|
},
|
||||||
|
iterative: {
|
||||||
|
maxIterations: 5,
|
||||||
|
firstPassFileLimit: 10,
|
||||||
|
subsequentPassFileLimit: 5,
|
||||||
|
temperature: 0.3,
|
||||||
|
model: 'gpt-4-turbo-preview'
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -78,13 +119,26 @@ export class ConfigManager {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create KeyValueStore for this project
|
const npmextraJsonPath = plugins.path.join(this.projectDir, 'npmextra.json');
|
||||||
// We'll just use smartfile directly instead of KeyValueStore
|
|
||||||
|
// Check if file exists
|
||||||
|
const fileExists = await plugins.smartfile.fs.fileExists(npmextraJsonPath);
|
||||||
|
if (!fileExists) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check cache
|
||||||
|
const stats = await fs.promises.stat(npmextraJsonPath);
|
||||||
|
const currentMtime = Math.floor(stats.mtimeMs);
|
||||||
|
|
||||||
|
if (this.configCache && this.configCache.mtime === currentMtime) {
|
||||||
|
// Use cached config
|
||||||
|
this.config = this.configCache.config;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// Read the npmextra.json file
|
// Read the npmextra.json file
|
||||||
const npmextraJsonFile = await plugins.smartfile.SmartFile.fromFilePath(
|
const npmextraJsonFile = await plugins.smartfile.SmartFile.fromFilePath(npmextraJsonPath);
|
||||||
plugins.path.join(this.projectDir, 'npmextra.json')
|
|
||||||
);
|
|
||||||
const npmextraContent = JSON.parse(npmextraJsonFile.contents.toString());
|
const npmextraContent = JSON.parse(npmextraJsonFile.contents.toString());
|
||||||
|
|
||||||
// Check for tsdoc context configuration
|
// Check for tsdoc context configuration
|
||||||
@@ -92,6 +146,12 @@ export class ConfigManager {
|
|||||||
// Merge with default config
|
// Merge with default config
|
||||||
this.config = this.mergeConfigs(this.config, npmextraContent.tsdoc.context);
|
this.config = this.mergeConfigs(this.config, npmextraContent.tsdoc.context);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Cache the config
|
||||||
|
this.configCache = {
|
||||||
|
mtime: currentMtime,
|
||||||
|
config: { ...this.config }
|
||||||
|
};
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error loading context configuration:', error);
|
console.error('Error loading context configuration:', error);
|
||||||
}
|
}
|
||||||
@@ -132,6 +192,46 @@ export class ConfigManager {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Merge cache configuration
|
||||||
|
if (userConfig.cache) {
|
||||||
|
result.cache = {
|
||||||
|
...result.cache,
|
||||||
|
...userConfig.cache
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge analyzer configuration
|
||||||
|
if (userConfig.analyzer) {
|
||||||
|
result.analyzer = {
|
||||||
|
...result.analyzer,
|
||||||
|
...userConfig.analyzer
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge prioritization weights
|
||||||
|
if (userConfig.prioritization) {
|
||||||
|
result.prioritization = {
|
||||||
|
...result.prioritization,
|
||||||
|
...userConfig.prioritization
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge tier configuration
|
||||||
|
if (userConfig.tiers) {
|
||||||
|
result.tiers = {
|
||||||
|
...result.tiers,
|
||||||
|
...userConfig.tiers
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Merge iterative configuration
|
||||||
|
if (userConfig.iterative) {
|
||||||
|
result.iterative = {
|
||||||
|
...result.iterative,
|
||||||
|
...userConfig.iterative
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -180,6 +280,9 @@ export class ConfigManager {
|
|||||||
// Merge with existing config
|
// Merge with existing config
|
||||||
this.config = this.mergeConfigs(this.config, config);
|
this.config = this.mergeConfigs(this.config, config);
|
||||||
|
|
||||||
|
// Invalidate cache
|
||||||
|
this.configCache = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (!this.projectDir) {
|
if (!this.projectDir) {
|
||||||
return;
|
return;
|
||||||
@@ -206,4 +309,61 @@ export class ConfigManager {
|
|||||||
console.error('Error updating context configuration:', error);
|
console.error('Error updating context configuration:', error);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get cache configuration
|
||||||
|
*/
|
||||||
|
public getCacheConfig(): ICacheConfig {
|
||||||
|
return this.config.cache || { enabled: true, ttl: 3600, maxSize: 100 };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get analyzer configuration
|
||||||
|
*/
|
||||||
|
public getAnalyzerConfig(): IAnalyzerConfig {
|
||||||
|
return this.config.analyzer || { useAIRefinement: false, aiModel: 'haiku' };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get prioritization weights
|
||||||
|
*/
|
||||||
|
public getPrioritizationWeights(): IPrioritizationWeights {
|
||||||
|
return this.config.prioritization || {
|
||||||
|
dependencyWeight: 0.3,
|
||||||
|
relevanceWeight: 0.4,
|
||||||
|
efficiencyWeight: 0.2,
|
||||||
|
recencyWeight: 0.1
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get tier configuration
|
||||||
|
*/
|
||||||
|
public getTierConfig(): ITierConfig {
|
||||||
|
return this.config.tiers || {
|
||||||
|
essential: { minScore: 0.8, trimLevel: 'none' },
|
||||||
|
important: { minScore: 0.5, trimLevel: 'light' },
|
||||||
|
optional: { minScore: 0.2, trimLevel: 'aggressive' }
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get iterative configuration
|
||||||
|
*/
|
||||||
|
public getIterativeConfig(): IIterativeConfig {
|
||||||
|
return this.config.iterative || {
|
||||||
|
maxIterations: 5,
|
||||||
|
firstPassFileLimit: 10,
|
||||||
|
subsequentPassFileLimit: 5,
|
||||||
|
temperature: 0.3,
|
||||||
|
model: 'gpt-4-turbo-preview'
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clear the config cache (force reload on next access)
|
||||||
|
*/
|
||||||
|
public clearCache(): void {
|
||||||
|
this.configCache = null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
391
ts/context/context-analyzer.ts
Normal file
391
ts/context/context-analyzer.ts
Normal file
@@ -0,0 +1,391 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type {
|
||||||
|
IFileMetadata,
|
||||||
|
IFileDependencies,
|
||||||
|
IFileAnalysis,
|
||||||
|
IAnalysisResult,
|
||||||
|
TaskType,
|
||||||
|
IPrioritizationWeights,
|
||||||
|
ITierConfig,
|
||||||
|
} from './types.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ContextAnalyzer provides intelligent file selection and prioritization
|
||||||
|
* based on dependency analysis, task relevance, and configurable weights
|
||||||
|
*/
|
||||||
|
export class ContextAnalyzer {
|
||||||
|
private projectRoot: string;
|
||||||
|
private weights: Required<IPrioritizationWeights>;
|
||||||
|
private tiers: Required<ITierConfig>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new ContextAnalyzer
|
||||||
|
* @param projectRoot - Root directory of the project
|
||||||
|
* @param weights - Prioritization weights
|
||||||
|
* @param tiers - Tier configuration
|
||||||
|
*/
|
||||||
|
constructor(
|
||||||
|
projectRoot: string,
|
||||||
|
weights: Partial<IPrioritizationWeights> = {},
|
||||||
|
tiers: Partial<ITierConfig> = {}
|
||||||
|
) {
|
||||||
|
this.projectRoot = projectRoot;
|
||||||
|
|
||||||
|
// Default weights
|
||||||
|
this.weights = {
|
||||||
|
dependencyWeight: weights.dependencyWeight ?? 0.3,
|
||||||
|
relevanceWeight: weights.relevanceWeight ?? 0.4,
|
||||||
|
efficiencyWeight: weights.efficiencyWeight ?? 0.2,
|
||||||
|
recencyWeight: weights.recencyWeight ?? 0.1,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Default tiers
|
||||||
|
this.tiers = {
|
||||||
|
essential: tiers.essential ?? { minScore: 0.8, trimLevel: 'none' },
|
||||||
|
important: tiers.important ?? { minScore: 0.5, trimLevel: 'light' },
|
||||||
|
optional: tiers.optional ?? { minScore: 0.2, trimLevel: 'aggressive' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Analyzes files for a specific task type
|
||||||
|
* @param metadata - Array of file metadata to analyze
|
||||||
|
* @param taskType - Type of task being performed
|
||||||
|
* @param changedFiles - Optional list of recently changed files (for commits)
|
||||||
|
* @returns Analysis result with scored files
|
||||||
|
*/
|
||||||
|
public async analyze(
|
||||||
|
metadata: IFileMetadata[],
|
||||||
|
taskType: TaskType,
|
||||||
|
changedFiles: string[] = []
|
||||||
|
): Promise<IAnalysisResult> {
|
||||||
|
const startTime = Date.now();
|
||||||
|
|
||||||
|
// Build dependency graph
|
||||||
|
const dependencyGraph = await this.buildDependencyGraph(metadata);
|
||||||
|
|
||||||
|
// Calculate centrality scores
|
||||||
|
this.calculateCentrality(dependencyGraph);
|
||||||
|
|
||||||
|
// Analyze each file
|
||||||
|
const files: IFileAnalysis[] = [];
|
||||||
|
for (const meta of metadata) {
|
||||||
|
const analysis = await this.analyzeFile(
|
||||||
|
meta,
|
||||||
|
taskType,
|
||||||
|
dependencyGraph,
|
||||||
|
changedFiles
|
||||||
|
);
|
||||||
|
files.push(analysis);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort by importance score (highest first)
|
||||||
|
files.sort((a, b) => b.importanceScore - a.importanceScore);
|
||||||
|
|
||||||
|
const analysisDuration = Date.now() - startTime;
|
||||||
|
|
||||||
|
return {
|
||||||
|
taskType,
|
||||||
|
files,
|
||||||
|
dependencyGraph,
|
||||||
|
totalFiles: metadata.length,
|
||||||
|
analysisDuration,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Builds a dependency graph from file metadata
|
||||||
|
* @param metadata - Array of file metadata
|
||||||
|
* @returns Dependency graph as a map
|
||||||
|
*/
|
||||||
|
private async buildDependencyGraph(
|
||||||
|
metadata: IFileMetadata[]
|
||||||
|
): Promise<Map<string, IFileDependencies>> {
|
||||||
|
const graph = new Map<string, IFileDependencies>();
|
||||||
|
|
||||||
|
// Initialize graph entries
|
||||||
|
for (const meta of metadata) {
|
||||||
|
graph.set(meta.path, {
|
||||||
|
path: meta.path,
|
||||||
|
imports: [],
|
||||||
|
importedBy: [],
|
||||||
|
centrality: 0,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse imports from each file
|
||||||
|
for (const meta of metadata) {
|
||||||
|
try {
|
||||||
|
const contents = await plugins.smartfile.fs.toStringSync(meta.path);
|
||||||
|
const imports = this.extractImports(contents, meta.path);
|
||||||
|
|
||||||
|
const deps = graph.get(meta.path)!;
|
||||||
|
deps.imports = imports;
|
||||||
|
|
||||||
|
// Update importedBy for imported files
|
||||||
|
for (const importPath of imports) {
|
||||||
|
const importedDeps = graph.get(importPath);
|
||||||
|
if (importedDeps) {
|
||||||
|
importedDeps.importedBy.push(meta.path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.warn(`Failed to parse imports from ${meta.path}:`, error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return graph;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extracts import statements from file contents
|
||||||
|
* @param contents - File contents
|
||||||
|
* @param filePath - Path of the file being analyzed
|
||||||
|
* @returns Array of absolute paths to imported files
|
||||||
|
*/
|
||||||
|
private extractImports(contents: string, filePath: string): string[] {
|
||||||
|
const imports: string[] = [];
|
||||||
|
const fileDir = plugins.path.dirname(filePath);
|
||||||
|
|
||||||
|
// Match various import patterns
|
||||||
|
const importRegex = /(?:import|export).*?from\s+['"](.+?)['"]/g;
|
||||||
|
let match;
|
||||||
|
|
||||||
|
while ((match = importRegex.exec(contents)) !== null) {
|
||||||
|
const importPath = match[1];
|
||||||
|
|
||||||
|
// Skip external modules
|
||||||
|
if (!importPath.startsWith('.')) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Resolve relative import to absolute path
|
||||||
|
let resolvedPath = plugins.path.resolve(fileDir, importPath);
|
||||||
|
|
||||||
|
// Handle various file extensions
|
||||||
|
const extensions = ['.ts', '.js', '.tsx', '.jsx', '/index.ts', '/index.js'];
|
||||||
|
let found = false;
|
||||||
|
|
||||||
|
for (const ext of extensions) {
|
||||||
|
const testPath = resolvedPath.endsWith(ext) ? resolvedPath : resolvedPath + ext;
|
||||||
|
try {
|
||||||
|
// Use synchronous file check to avoid async in this context
|
||||||
|
const fs = require('fs');
|
||||||
|
const exists = fs.existsSync(testPath);
|
||||||
|
if (exists) {
|
||||||
|
imports.push(testPath);
|
||||||
|
found = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// Continue trying other extensions
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!found && !resolvedPath.includes('.')) {
|
||||||
|
// Try with .ts extension as default
|
||||||
|
imports.push(resolvedPath + '.ts');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return imports;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculates centrality scores for all nodes in the dependency graph
|
||||||
|
* Uses a simplified PageRank-like algorithm
|
||||||
|
* @param graph - Dependency graph
|
||||||
|
*/
|
||||||
|
private calculateCentrality(graph: Map<string, IFileDependencies>): void {
|
||||||
|
const damping = 0.85;
|
||||||
|
const iterations = 10;
|
||||||
|
const nodeCount = graph.size;
|
||||||
|
|
||||||
|
// Initialize scores
|
||||||
|
const scores = new Map<string, number>();
|
||||||
|
for (const path of graph.keys()) {
|
||||||
|
scores.set(path, 1.0 / nodeCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Iterative calculation
|
||||||
|
for (let i = 0; i < iterations; i++) {
|
||||||
|
const newScores = new Map<string, number>();
|
||||||
|
|
||||||
|
for (const [path, deps] of graph.entries()) {
|
||||||
|
let score = (1 - damping) / nodeCount;
|
||||||
|
|
||||||
|
// Add contributions from nodes that import this file
|
||||||
|
for (const importerPath of deps.importedBy) {
|
||||||
|
const importerDeps = graph.get(importerPath);
|
||||||
|
if (importerDeps) {
|
||||||
|
const importerScore = scores.get(importerPath) ?? 0;
|
||||||
|
const outgoingCount = importerDeps.imports.length || 1;
|
||||||
|
score += damping * (importerScore / outgoingCount);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
newScores.set(path, score);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update scores
|
||||||
|
for (const [path, score] of newScores) {
|
||||||
|
scores.set(path, score);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Normalize scores to 0-1 range
|
||||||
|
const maxScore = Math.max(...scores.values());
|
||||||
|
if (maxScore > 0) {
|
||||||
|
for (const deps of graph.values()) {
|
||||||
|
const score = scores.get(deps.path) ?? 0;
|
||||||
|
deps.centrality = score / maxScore;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Analyzes a single file
|
||||||
|
* @param meta - File metadata
|
||||||
|
* @param taskType - Task being performed
|
||||||
|
* @param graph - Dependency graph
|
||||||
|
* @param changedFiles - Recently changed files
|
||||||
|
* @returns File analysis
|
||||||
|
*/
|
||||||
|
private async analyzeFile(
|
||||||
|
meta: IFileMetadata,
|
||||||
|
taskType: TaskType,
|
||||||
|
graph: Map<string, IFileDependencies>,
|
||||||
|
changedFiles: string[]
|
||||||
|
): Promise<IFileAnalysis> {
|
||||||
|
const deps = graph.get(meta.path);
|
||||||
|
const centralityScore = deps?.centrality ?? 0;
|
||||||
|
|
||||||
|
// Calculate task-specific relevance
|
||||||
|
const relevanceScore = this.calculateRelevance(meta, taskType);
|
||||||
|
|
||||||
|
// Calculate efficiency (information per token)
|
||||||
|
const efficiencyScore = this.calculateEfficiency(meta);
|
||||||
|
|
||||||
|
// Calculate recency (for commit tasks)
|
||||||
|
const recencyScore = this.calculateRecency(meta, changedFiles);
|
||||||
|
|
||||||
|
// Calculate combined importance score
|
||||||
|
const importanceScore =
|
||||||
|
relevanceScore * this.weights.relevanceWeight +
|
||||||
|
centralityScore * this.weights.dependencyWeight +
|
||||||
|
efficiencyScore * this.weights.efficiencyWeight +
|
||||||
|
recencyScore * this.weights.recencyWeight;
|
||||||
|
|
||||||
|
// Assign tier
|
||||||
|
const tier = this.assignTier(importanceScore);
|
||||||
|
|
||||||
|
return {
|
||||||
|
path: meta.path,
|
||||||
|
relevanceScore,
|
||||||
|
centralityScore,
|
||||||
|
efficiencyScore,
|
||||||
|
recencyScore,
|
||||||
|
importanceScore,
|
||||||
|
tier,
|
||||||
|
reason: this.generateReason(meta, taskType, importanceScore, tier),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculates task-specific relevance score
|
||||||
|
*/
|
||||||
|
private calculateRelevance(meta: IFileMetadata, taskType: TaskType): number {
|
||||||
|
const relativePath = meta.relativePath.toLowerCase();
|
||||||
|
let score = 0.5; // Base score
|
||||||
|
|
||||||
|
// README generation - prioritize public APIs and main exports
|
||||||
|
if (taskType === 'readme') {
|
||||||
|
if (relativePath.includes('index.ts')) score += 0.3;
|
||||||
|
if (relativePath.match(/^ts\/[^\/]+\.ts$/)) score += 0.2; // Root level exports
|
||||||
|
if (relativePath.includes('test/')) score -= 0.3;
|
||||||
|
if (relativePath.includes('classes/')) score += 0.1;
|
||||||
|
if (relativePath.includes('interfaces/')) score += 0.1;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Commit messages - prioritize changed files and their dependencies
|
||||||
|
if (taskType === 'commit') {
|
||||||
|
if (relativePath.includes('test/')) score -= 0.2;
|
||||||
|
// Recency will handle changed files
|
||||||
|
}
|
||||||
|
|
||||||
|
// Description generation - prioritize main exports and core interfaces
|
||||||
|
if (taskType === 'description') {
|
||||||
|
if (relativePath.includes('index.ts')) score += 0.4;
|
||||||
|
if (relativePath.match(/^ts\/[^\/]+\.ts$/)) score += 0.3;
|
||||||
|
if (relativePath.includes('test/')) score -= 0.4;
|
||||||
|
if (relativePath.includes('interfaces/')) score += 0.2;
|
||||||
|
}
|
||||||
|
|
||||||
|
return Math.max(0, Math.min(1, score));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculates efficiency score (information density)
|
||||||
|
*/
|
||||||
|
private calculateEfficiency(meta: IFileMetadata): number {
|
||||||
|
// Prefer files that are not too large (good signal-to-noise ratio)
|
||||||
|
const optimalSize = 5000; // ~1250 tokens
|
||||||
|
const distance = Math.abs(meta.estimatedTokens - optimalSize);
|
||||||
|
const normalized = Math.max(0, 1 - distance / optimalSize);
|
||||||
|
|
||||||
|
return normalized;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculates recency score for changed files
|
||||||
|
*/
|
||||||
|
private calculateRecency(meta: IFileMetadata, changedFiles: string[]): number {
|
||||||
|
if (changedFiles.length === 0) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this file was changed
|
||||||
|
const isChanged = changedFiles.some((changed) => changed === meta.path);
|
||||||
|
|
||||||
|
return isChanged ? 1.0 : 0.0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Assigns a tier based on importance score
|
||||||
|
*/
|
||||||
|
private assignTier(score: number): 'essential' | 'important' | 'optional' | 'excluded' {
|
||||||
|
if (score >= this.tiers.essential.minScore) return 'essential';
|
||||||
|
if (score >= this.tiers.important.minScore) return 'important';
|
||||||
|
if (score >= this.tiers.optional.minScore) return 'optional';
|
||||||
|
return 'excluded';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generates a human-readable reason for the score
|
||||||
|
*/
|
||||||
|
private generateReason(
|
||||||
|
meta: IFileMetadata,
|
||||||
|
taskType: TaskType,
|
||||||
|
score: number,
|
||||||
|
tier: string
|
||||||
|
): string {
|
||||||
|
const reasons: string[] = [];
|
||||||
|
|
||||||
|
if (meta.relativePath.includes('index.ts')) {
|
||||||
|
reasons.push('main export file');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (meta.relativePath.includes('test/')) {
|
||||||
|
reasons.push('test file (lower priority)');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (taskType === 'readme' && meta.relativePath.match(/^ts\/[^\/]+\.ts$/)) {
|
||||||
|
reasons.push('root-level module');
|
||||||
|
}
|
||||||
|
|
||||||
|
reasons.push(`score: ${score.toFixed(2)}`);
|
||||||
|
reasons.push(`tier: ${tier}`);
|
||||||
|
|
||||||
|
return reasons.join(', ');
|
||||||
|
}
|
||||||
|
}
|
||||||
286
ts/context/context-cache.ts
Normal file
286
ts/context/context-cache.ts
Normal file
@@ -0,0 +1,286 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import type { ICacheEntry, ICacheConfig } from './types.js';
|
||||||
|
import { logger } from '../logging.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ContextCache provides persistent caching of file contents and token counts
|
||||||
|
* with automatic invalidation on file changes
|
||||||
|
*/
|
||||||
|
export class ContextCache {
|
||||||
|
private cacheDir: string;
|
||||||
|
private cache: Map<string, ICacheEntry> = new Map();
|
||||||
|
private config: Required<ICacheConfig>;
|
||||||
|
private cacheIndexPath: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new ContextCache
|
||||||
|
* @param projectRoot - Root directory of the project
|
||||||
|
* @param config - Cache configuration
|
||||||
|
*/
|
||||||
|
constructor(projectRoot: string, config: Partial<ICacheConfig> = {}) {
|
||||||
|
this.config = {
|
||||||
|
enabled: config.enabled ?? true,
|
||||||
|
ttl: config.ttl ?? 3600, // 1 hour default
|
||||||
|
maxSize: config.maxSize ?? 100, // 100MB default
|
||||||
|
directory: config.directory ?? plugins.path.join(projectRoot, '.nogit', 'context-cache'),
|
||||||
|
};
|
||||||
|
|
||||||
|
this.cacheDir = this.config.directory;
|
||||||
|
this.cacheIndexPath = plugins.path.join(this.cacheDir, 'index.json');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initializes the cache by loading from disk
|
||||||
|
*/
|
||||||
|
public async init(): Promise<void> {
|
||||||
|
if (!this.config.enabled) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure cache directory exists
|
||||||
|
await plugins.smartfile.fs.ensureDir(this.cacheDir);
|
||||||
|
|
||||||
|
// Load cache index if it exists
|
||||||
|
try {
|
||||||
|
const indexExists = await plugins.smartfile.fs.fileExists(this.cacheIndexPath);
|
||||||
|
if (indexExists) {
|
||||||
|
const indexContent = await plugins.smartfile.fs.toStringSync(this.cacheIndexPath);
|
||||||
|
const indexData = JSON.parse(indexContent) as ICacheEntry[];
|
||||||
|
if (Array.isArray(indexData)) {
|
||||||
|
for (const entry of indexData) {
|
||||||
|
this.cache.set(entry.path, entry);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('Failed to load cache index:', error.message);
|
||||||
|
// Start with empty cache if loading fails
|
||||||
|
}
|
||||||
|
|
||||||
|
// Clean up expired and invalid entries
|
||||||
|
await this.cleanup();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets a cached entry if it's still valid
|
||||||
|
* @param filePath - Absolute path to the file
|
||||||
|
* @returns Cache entry if valid, null otherwise
|
||||||
|
*/
|
||||||
|
public async get(filePath: string): Promise<ICacheEntry | null> {
|
||||||
|
if (!this.config.enabled) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const entry = this.cache.get(filePath);
|
||||||
|
if (!entry) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if entry is expired
|
||||||
|
const now = Date.now();
|
||||||
|
if (now - entry.cachedAt > this.config.ttl * 1000) {
|
||||||
|
this.cache.delete(filePath);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if file has been modified
|
||||||
|
try {
|
||||||
|
const stats = await fs.promises.stat(filePath);
|
||||||
|
const currentMtime = Math.floor(stats.mtimeMs);
|
||||||
|
|
||||||
|
if (currentMtime !== entry.mtime) {
|
||||||
|
// File has changed, invalidate cache
|
||||||
|
this.cache.delete(filePath);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return entry;
|
||||||
|
} catch (error) {
|
||||||
|
// File doesn't exist anymore
|
||||||
|
this.cache.delete(filePath);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stores a cache entry
|
||||||
|
* @param entry - Cache entry to store
|
||||||
|
*/
|
||||||
|
public async set(entry: ICacheEntry): Promise<void> {
|
||||||
|
if (!this.config.enabled) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.cache.set(entry.path, entry);
|
||||||
|
|
||||||
|
// Check cache size and evict old entries if needed
|
||||||
|
await this.enforceMaxSize();
|
||||||
|
|
||||||
|
// Persist to disk (async, don't await)
|
||||||
|
this.persist().catch((error) => {
|
||||||
|
console.warn('Failed to persist cache:', error.message);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stores multiple cache entries
|
||||||
|
* @param entries - Array of cache entries
|
||||||
|
*/
|
||||||
|
public async setMany(entries: ICacheEntry[]): Promise<void> {
|
||||||
|
if (!this.config.enabled) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const entry of entries) {
|
||||||
|
this.cache.set(entry.path, entry);
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.enforceMaxSize();
|
||||||
|
await this.persist();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checks if a file is cached and valid
|
||||||
|
* @param filePath - Absolute path to the file
|
||||||
|
* @returns True if cached and valid
|
||||||
|
*/
|
||||||
|
public async has(filePath: string): Promise<boolean> {
|
||||||
|
const entry = await this.get(filePath);
|
||||||
|
return entry !== null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets cache statistics
|
||||||
|
*/
|
||||||
|
public getStats(): {
|
||||||
|
entries: number;
|
||||||
|
totalSize: number;
|
||||||
|
oldestEntry: number | null;
|
||||||
|
newestEntry: number | null;
|
||||||
|
} {
|
||||||
|
let totalSize = 0;
|
||||||
|
let oldestEntry: number | null = null;
|
||||||
|
let newestEntry: number | null = null;
|
||||||
|
|
||||||
|
for (const entry of this.cache.values()) {
|
||||||
|
totalSize += entry.contents.length;
|
||||||
|
|
||||||
|
if (oldestEntry === null || entry.cachedAt < oldestEntry) {
|
||||||
|
oldestEntry = entry.cachedAt;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (newestEntry === null || entry.cachedAt > newestEntry) {
|
||||||
|
newestEntry = entry.cachedAt;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
entries: this.cache.size,
|
||||||
|
totalSize,
|
||||||
|
oldestEntry,
|
||||||
|
newestEntry,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clears all cache entries
|
||||||
|
*/
|
||||||
|
public async clear(): Promise<void> {
|
||||||
|
this.cache.clear();
|
||||||
|
await this.persist();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clears specific cache entries
|
||||||
|
* @param filePaths - Array of file paths to clear
|
||||||
|
*/
|
||||||
|
public async clearPaths(filePaths: string[]): Promise<void> {
|
||||||
|
for (const path of filePaths) {
|
||||||
|
this.cache.delete(path);
|
||||||
|
}
|
||||||
|
await this.persist();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cleans up expired and invalid cache entries
|
||||||
|
*/
|
||||||
|
private async cleanup(): Promise<void> {
|
||||||
|
const now = Date.now();
|
||||||
|
const toDelete: string[] = [];
|
||||||
|
|
||||||
|
for (const [path, entry] of this.cache.entries()) {
|
||||||
|
// Check expiration
|
||||||
|
if (now - entry.cachedAt > this.config.ttl * 1000) {
|
||||||
|
toDelete.push(path);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if file still exists and hasn't changed
|
||||||
|
try {
|
||||||
|
const stats = await fs.promises.stat(path);
|
||||||
|
const currentMtime = Math.floor(stats.mtimeMs);
|
||||||
|
|
||||||
|
if (currentMtime !== entry.mtime) {
|
||||||
|
toDelete.push(path);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// File doesn't exist
|
||||||
|
toDelete.push(path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const path of toDelete) {
|
||||||
|
this.cache.delete(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (toDelete.length > 0) {
|
||||||
|
await this.persist();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enforces maximum cache size by evicting oldest entries
|
||||||
|
*/
|
||||||
|
private async enforceMaxSize(): Promise<void> {
|
||||||
|
const stats = this.getStats();
|
||||||
|
const maxSizeBytes = this.config.maxSize * 1024 * 1024; // Convert MB to bytes
|
||||||
|
|
||||||
|
if (stats.totalSize <= maxSizeBytes) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort entries by age (oldest first)
|
||||||
|
const entries = Array.from(this.cache.entries()).sort(
|
||||||
|
(a, b) => a[1].cachedAt - b[1].cachedAt
|
||||||
|
);
|
||||||
|
|
||||||
|
// Remove oldest entries until we're under the limit
|
||||||
|
let currentSize = stats.totalSize;
|
||||||
|
for (const [path, entry] of entries) {
|
||||||
|
if (currentSize <= maxSizeBytes) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
currentSize -= entry.contents.length;
|
||||||
|
this.cache.delete(path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Persists cache index to disk
|
||||||
|
*/
|
||||||
|
private async persist(): Promise<void> {
|
||||||
|
if (!this.config.enabled) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const entries = Array.from(this.cache.values());
|
||||||
|
const content = JSON.stringify(entries, null, 2);
|
||||||
|
await plugins.smartfile.memory.toFs(content, this.cacheIndexPath);
|
||||||
|
} catch (error) {
|
||||||
|
console.warn('Failed to persist cache index:', error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -243,4 +243,68 @@ export class ContextTrimmer {
|
|||||||
...config
|
...config
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Trim a file based on its importance tier
|
||||||
|
* @param filePath The path to the file
|
||||||
|
* @param content The file's contents
|
||||||
|
* @param level The trimming level to apply ('none', 'light', 'aggressive')
|
||||||
|
* @returns The trimmed file contents
|
||||||
|
*/
|
||||||
|
public trimFileWithLevel(
|
||||||
|
filePath: string,
|
||||||
|
content: string,
|
||||||
|
level: 'none' | 'light' | 'aggressive'
|
||||||
|
): string {
|
||||||
|
// No trimming for essential files
|
||||||
|
if (level === 'none') {
|
||||||
|
return content;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a temporary config based on level
|
||||||
|
const originalConfig = { ...this.config };
|
||||||
|
|
||||||
|
try {
|
||||||
|
if (level === 'light') {
|
||||||
|
// Light trimming: preserve signatures, remove only complex implementations
|
||||||
|
this.config = {
|
||||||
|
...this.config,
|
||||||
|
removeImplementations: false,
|
||||||
|
preserveInterfaces: true,
|
||||||
|
preserveTypeDefs: true,
|
||||||
|
preserveJSDoc: true,
|
||||||
|
maxFunctionLines: 10,
|
||||||
|
removeComments: false,
|
||||||
|
removeBlankLines: true
|
||||||
|
};
|
||||||
|
} else if (level === 'aggressive') {
|
||||||
|
// Aggressive trimming: remove all implementations, keep only signatures
|
||||||
|
this.config = {
|
||||||
|
...this.config,
|
||||||
|
removeImplementations: true,
|
||||||
|
preserveInterfaces: true,
|
||||||
|
preserveTypeDefs: true,
|
||||||
|
preserveJSDoc: true,
|
||||||
|
maxFunctionLines: 3,
|
||||||
|
removeComments: true,
|
||||||
|
removeBlankLines: true
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process based on file type
|
||||||
|
let result = content;
|
||||||
|
if (filePath.endsWith('.ts') || filePath.endsWith('.tsx')) {
|
||||||
|
result = this.trimTypeScriptFile(content);
|
||||||
|
} else if (filePath.endsWith('.md')) {
|
||||||
|
result = this.trimMarkdownFile(content);
|
||||||
|
} else if (filePath.endsWith('.json')) {
|
||||||
|
result = this.trimJsonFile(content);
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
} finally {
|
||||||
|
// Restore original config
|
||||||
|
this.config = originalConfig;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
@@ -1,7 +1,10 @@
|
|||||||
import * as plugins from '../plugins.js';
|
import * as plugins from '../plugins.js';
|
||||||
import type { ContextMode, IContextResult, IFileInfo, TaskType } from './types.js';
|
import type { ContextMode, IContextResult, IFileInfo, TaskType, IFileMetadata } from './types.js';
|
||||||
import { ContextTrimmer } from './context-trimmer.js';
|
import { ContextTrimmer } from './context-trimmer.js';
|
||||||
import { ConfigManager } from './config-manager.js';
|
import { ConfigManager } from './config-manager.js';
|
||||||
|
import { LazyFileLoader } from './lazy-file-loader.js';
|
||||||
|
import { ContextCache } from './context-cache.js';
|
||||||
|
import { ContextAnalyzer } from './context-analyzer.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Enhanced ProjectContext that supports context optimization strategies
|
* Enhanced ProjectContext that supports context optimization strategies
|
||||||
@@ -10,6 +13,9 @@ export class EnhancedContext {
|
|||||||
private projectDir: string;
|
private projectDir: string;
|
||||||
private trimmer: ContextTrimmer;
|
private trimmer: ContextTrimmer;
|
||||||
private configManager: ConfigManager;
|
private configManager: ConfigManager;
|
||||||
|
private lazyLoader: LazyFileLoader;
|
||||||
|
private cache: ContextCache;
|
||||||
|
private analyzer: ContextAnalyzer;
|
||||||
private contextMode: ContextMode = 'trimmed';
|
private contextMode: ContextMode = 'trimmed';
|
||||||
private tokenBudget: number = 190000; // Default for o4-mini
|
private tokenBudget: number = 190000; // Default for o4-mini
|
||||||
private contextResult: IContextResult = {
|
private contextResult: IContextResult = {
|
||||||
@@ -29,6 +35,13 @@ export class EnhancedContext {
|
|||||||
this.projectDir = projectDirArg;
|
this.projectDir = projectDirArg;
|
||||||
this.configManager = ConfigManager.getInstance();
|
this.configManager = ConfigManager.getInstance();
|
||||||
this.trimmer = new ContextTrimmer(this.configManager.getTrimConfig());
|
this.trimmer = new ContextTrimmer(this.configManager.getTrimConfig());
|
||||||
|
this.lazyLoader = new LazyFileLoader(projectDirArg);
|
||||||
|
this.cache = new ContextCache(projectDirArg, this.configManager.getCacheConfig());
|
||||||
|
this.analyzer = new ContextAnalyzer(
|
||||||
|
projectDirArg,
|
||||||
|
this.configManager.getPrioritizationWeights(),
|
||||||
|
this.configManager.getTierConfig()
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -38,6 +51,7 @@ export class EnhancedContext {
|
|||||||
await this.configManager.initialize(this.projectDir);
|
await this.configManager.initialize(this.projectDir);
|
||||||
this.tokenBudget = this.configManager.getMaxTokens();
|
this.tokenBudget = this.configManager.getMaxTokens();
|
||||||
this.trimmer.updateConfig(this.configManager.getTrimConfig());
|
this.trimmer.updateConfig(this.configManager.getTrimConfig());
|
||||||
|
await this.cache.init();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -57,73 +71,15 @@ export class EnhancedContext {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gather files from the project
|
* Convert files to context with smart analysis and prioritization
|
||||||
* @param includePaths Optional paths to include
|
* @param metadata - File metadata to analyze
|
||||||
* @param excludePaths Optional paths to exclude
|
* @param taskType - Task type for context-aware prioritization
|
||||||
|
* @param mode - Context mode to use
|
||||||
|
* @returns Context string
|
||||||
*/
|
*/
|
||||||
public async gatherFiles(includePaths?: string[], excludePaths?: string[]): Promise<Record<string, plugins.smartfile.SmartFile | plugins.smartfile.SmartFile[]>> {
|
public async convertFilesToContextWithAnalysis(
|
||||||
const smartfilePackageJSON = await plugins.smartfile.SmartFile.fromFilePath(
|
metadata: IFileMetadata[],
|
||||||
plugins.path.join(this.projectDir, 'package.json'),
|
taskType: TaskType,
|
||||||
this.projectDir,
|
|
||||||
);
|
|
||||||
|
|
||||||
const smartfilesReadme = await plugins.smartfile.SmartFile.fromFilePath(
|
|
||||||
plugins.path.join(this.projectDir, 'readme.md'),
|
|
||||||
this.projectDir,
|
|
||||||
);
|
|
||||||
|
|
||||||
const smartfilesReadmeHints = await plugins.smartfile.SmartFile.fromFilePath(
|
|
||||||
plugins.path.join(this.projectDir, 'readme.hints.md'),
|
|
||||||
this.projectDir,
|
|
||||||
);
|
|
||||||
|
|
||||||
const smartfilesNpmextraJSON = await plugins.smartfile.SmartFile.fromFilePath(
|
|
||||||
plugins.path.join(this.projectDir, 'npmextra.json'),
|
|
||||||
this.projectDir,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Use provided include paths or default to all TypeScript files
|
|
||||||
const includeGlobs = includePaths?.map(path => `${path}/**/*.ts`) || ['ts*/**/*.ts'];
|
|
||||||
|
|
||||||
// Get TypeScript files
|
|
||||||
const smartfilesModPromises = includeGlobs.map(glob =>
|
|
||||||
plugins.smartfile.fs.fileTreeToObject(this.projectDir, glob)
|
|
||||||
);
|
|
||||||
|
|
||||||
const smartfilesModArrays = await Promise.all(smartfilesModPromises);
|
|
||||||
|
|
||||||
// Flatten the arrays
|
|
||||||
const smartfilesMod: plugins.smartfile.SmartFile[] = [];
|
|
||||||
smartfilesModArrays.forEach(array => {
|
|
||||||
smartfilesMod.push(...array);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Get test files if not excluded
|
|
||||||
let smartfilesTest: plugins.smartfile.SmartFile[] = [];
|
|
||||||
if (!excludePaths?.includes('test/')) {
|
|
||||||
smartfilesTest = await plugins.smartfile.fs.fileTreeToObject(
|
|
||||||
this.projectDir,
|
|
||||||
'test/**/*.ts',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
smartfilePackageJSON,
|
|
||||||
smartfilesReadme,
|
|
||||||
smartfilesReadmeHints,
|
|
||||||
smartfilesNpmextraJSON,
|
|
||||||
smartfilesMod,
|
|
||||||
smartfilesTest,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Convert files to context string
|
|
||||||
* @param files The files to convert
|
|
||||||
* @param mode The context mode to use
|
|
||||||
*/
|
|
||||||
public async convertFilesToContext(
|
|
||||||
files: plugins.smartfile.SmartFile[],
|
|
||||||
mode: ContextMode = this.contextMode
|
mode: ContextMode = this.contextMode
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
// Reset context result
|
// Reset context result
|
||||||
@@ -136,54 +92,97 @@ export class EnhancedContext {
|
|||||||
tokenSavings: 0
|
tokenSavings: 0
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Analyze files for smart prioritization
|
||||||
|
const analysis = await this.analyzer.analyze(metadata, taskType, []);
|
||||||
|
|
||||||
|
// Sort files by importance score (highest first)
|
||||||
|
const sortedAnalysis = [...analysis.files].sort(
|
||||||
|
(a, b) => b.importanceScore - a.importanceScore
|
||||||
|
);
|
||||||
|
|
||||||
|
// Filter out excluded tier
|
||||||
|
const relevantFiles = sortedAnalysis.filter(f => f.tier !== 'excluded');
|
||||||
|
|
||||||
let totalTokenCount = 0;
|
let totalTokenCount = 0;
|
||||||
let totalOriginalTokens = 0;
|
let totalOriginalTokens = 0;
|
||||||
|
|
||||||
// Sort files by importance (for now just a simple alphabetical sort)
|
|
||||||
// Later this could be enhanced with more sophisticated prioritization
|
|
||||||
const sortedFiles = [...files].sort((a, b) => a.relative.localeCompare(b.relative));
|
|
||||||
|
|
||||||
const processedFiles: string[] = [];
|
const processedFiles: string[] = [];
|
||||||
|
|
||||||
for (const smartfile of sortedFiles) {
|
// Load files with cache support
|
||||||
// Calculate original token count
|
for (const fileAnalysis of relevantFiles) {
|
||||||
const originalContent = smartfile.contents.toString();
|
try {
|
||||||
const originalTokenCount = this.countTokens(originalContent);
|
// Check cache first
|
||||||
|
let contents: string;
|
||||||
|
let originalTokenCount: number;
|
||||||
|
|
||||||
|
const cached = await this.cache.get(fileAnalysis.path);
|
||||||
|
if (cached) {
|
||||||
|
contents = cached.contents;
|
||||||
|
originalTokenCount = cached.tokenCount;
|
||||||
|
} else {
|
||||||
|
// Load file
|
||||||
|
const fileData = await plugins.smartfile.fs.toStringSync(fileAnalysis.path);
|
||||||
|
contents = fileData;
|
||||||
|
originalTokenCount = this.countTokens(contents);
|
||||||
|
|
||||||
|
// Cache it
|
||||||
|
await this.cache.set({
|
||||||
|
path: fileAnalysis.path,
|
||||||
|
contents,
|
||||||
|
tokenCount: originalTokenCount,
|
||||||
|
mtime: Date.now(),
|
||||||
|
cachedAt: Date.now()
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
totalOriginalTokens += originalTokenCount;
|
totalOriginalTokens += originalTokenCount;
|
||||||
|
|
||||||
// Apply trimming based on mode
|
// Apply tier-based trimming
|
||||||
let processedContent = originalContent;
|
let processedContent = contents;
|
||||||
|
let trimLevel: 'none' | 'light' | 'aggressive' = 'light';
|
||||||
|
|
||||||
if (mode !== 'full') {
|
if (fileAnalysis.tier === 'essential') {
|
||||||
processedContent = this.trimmer.trimFile(
|
trimLevel = 'none';
|
||||||
smartfile.relative,
|
} else if (fileAnalysis.tier === 'important') {
|
||||||
originalContent,
|
trimLevel = 'light';
|
||||||
mode
|
} else if (fileAnalysis.tier === 'optional') {
|
||||||
|
trimLevel = 'aggressive';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply trimming based on mode and tier
|
||||||
|
if (mode !== 'full' && trimLevel !== 'none') {
|
||||||
|
const relativePath = plugins.path.relative(this.projectDir, fileAnalysis.path);
|
||||||
|
processedContent = this.trimmer.trimFileWithLevel(
|
||||||
|
relativePath,
|
||||||
|
contents,
|
||||||
|
trimLevel
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Calculate new token count
|
// Calculate token count
|
||||||
const processedTokenCount = this.countTokens(processedContent);
|
const processedTokenCount = this.countTokens(processedContent);
|
||||||
|
|
||||||
// Check if we have budget for this file
|
// Check token budget
|
||||||
if (totalTokenCount + processedTokenCount > this.tokenBudget) {
|
if (totalTokenCount + processedTokenCount > this.tokenBudget) {
|
||||||
// We don't have budget for this file
|
// We don't have budget for this file
|
||||||
|
const relativePath = plugins.path.relative(this.projectDir, fileAnalysis.path);
|
||||||
this.contextResult.excludedFiles.push({
|
this.contextResult.excludedFiles.push({
|
||||||
path: smartfile.path,
|
path: fileAnalysis.path,
|
||||||
contents: originalContent,
|
contents,
|
||||||
relativePath: smartfile.relative,
|
relativePath,
|
||||||
tokenCount: originalTokenCount
|
tokenCount: originalTokenCount,
|
||||||
|
importanceScore: fileAnalysis.importanceScore
|
||||||
});
|
});
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Format the file for context
|
// Format the file for context
|
||||||
|
const relativePath = plugins.path.relative(this.projectDir, fileAnalysis.path);
|
||||||
const formattedContent = `
|
const formattedContent = `
|
||||||
====== START OF FILE ${smartfile.relative} ======
|
====== START OF FILE ${relativePath} ======
|
||||||
|
|
||||||
${processedContent}
|
${processedContent}
|
||||||
|
|
||||||
====== END OF FILE ${smartfile.relative} ======
|
====== END OF FILE ${relativePath} ======
|
||||||
`;
|
`;
|
||||||
|
|
||||||
processedFiles.push(formattedContent);
|
processedFiles.push(formattedContent);
|
||||||
@@ -191,18 +190,22 @@ ${processedContent}
|
|||||||
|
|
||||||
// Track file in appropriate list
|
// Track file in appropriate list
|
||||||
const fileInfo: IFileInfo = {
|
const fileInfo: IFileInfo = {
|
||||||
path: smartfile.path,
|
path: fileAnalysis.path,
|
||||||
contents: processedContent,
|
contents: processedContent,
|
||||||
relativePath: smartfile.relative,
|
relativePath,
|
||||||
tokenCount: processedTokenCount
|
tokenCount: processedTokenCount,
|
||||||
|
importanceScore: fileAnalysis.importanceScore
|
||||||
};
|
};
|
||||||
|
|
||||||
if (mode === 'full' || processedContent === originalContent) {
|
if (trimLevel === 'none' || processedContent === contents) {
|
||||||
this.contextResult.includedFiles.push(fileInfo);
|
this.contextResult.includedFiles.push(fileInfo);
|
||||||
} else {
|
} else {
|
||||||
this.contextResult.trimmedFiles.push(fileInfo);
|
this.contextResult.trimmedFiles.push(fileInfo);
|
||||||
this.contextResult.tokenSavings += (originalTokenCount - processedTokenCount);
|
this.contextResult.tokenSavings += (originalTokenCount - processedTokenCount);
|
||||||
}
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.warn(`Failed to process file ${fileAnalysis.path}:`, error.message);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Join all processed files
|
// Join all processed files
|
||||||
@@ -216,8 +219,8 @@ ${processedContent}
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Build context for the project
|
* Build context for the project using smart analysis
|
||||||
* @param taskType Optional task type for task-specific context
|
* @param taskType Task type for context-aware prioritization (defaults to 'description')
|
||||||
*/
|
*/
|
||||||
public async buildContext(taskType?: TaskType): Promise<IContextResult> {
|
public async buildContext(taskType?: TaskType): Promise<IContextResult> {
|
||||||
// Initialize if needed
|
// Initialize if needed
|
||||||
@@ -225,49 +228,35 @@ ${processedContent}
|
|||||||
await this.initialize();
|
await this.initialize();
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get task-specific configuration if a task type is provided
|
// Smart context building always requires a task type for optimal prioritization
|
||||||
if (taskType) {
|
// Default to 'description' if not provided
|
||||||
const taskConfig = this.configManager.getTaskConfig(taskType);
|
const effectiveTaskType = taskType || 'description';
|
||||||
|
|
||||||
|
// Get task-specific configuration
|
||||||
|
const taskConfig = this.configManager.getTaskConfig(effectiveTaskType);
|
||||||
if (taskConfig.mode) {
|
if (taskConfig.mode) {
|
||||||
this.setContextMode(taskConfig.mode);
|
this.setContextMode(taskConfig.mode);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Gather files
|
// Build globs for scanning
|
||||||
const taskConfig = taskType ? this.configManager.getTaskConfig(taskType) : undefined;
|
const includeGlobs = taskConfig?.includePaths?.map(p => `${p}/**/*.ts`) || [
|
||||||
const files = await this.gatherFiles(
|
'ts/**/*.ts',
|
||||||
taskConfig?.includePaths,
|
'ts*/**/*.ts'
|
||||||
taskConfig?.excludePaths
|
];
|
||||||
);
|
|
||||||
|
|
||||||
// Convert files to context
|
// Add config files
|
||||||
// Create an array of all files to process
|
const configGlobs = [
|
||||||
const allFiles: plugins.smartfile.SmartFile[] = [];
|
'package.json',
|
||||||
|
'readme.md',
|
||||||
|
'readme.hints.md',
|
||||||
|
'npmextra.json'
|
||||||
|
];
|
||||||
|
|
||||||
// Add individual files
|
// Scan files for metadata (fast, doesn't load contents)
|
||||||
if (files.smartfilePackageJSON) allFiles.push(files.smartfilePackageJSON as plugins.smartfile.SmartFile);
|
const metadata = await this.lazyLoader.scanFiles([...configGlobs, ...includeGlobs]);
|
||||||
if (files.smartfilesReadme) allFiles.push(files.smartfilesReadme as plugins.smartfile.SmartFile);
|
|
||||||
if (files.smartfilesReadmeHints) allFiles.push(files.smartfilesReadmeHints as plugins.smartfile.SmartFile);
|
|
||||||
if (files.smartfilesNpmextraJSON) allFiles.push(files.smartfilesNpmextraJSON as plugins.smartfile.SmartFile);
|
|
||||||
|
|
||||||
// Add arrays of files
|
// Use smart analyzer to build context with intelligent prioritization
|
||||||
if (files.smartfilesMod) {
|
await this.convertFilesToContextWithAnalysis(metadata, effectiveTaskType, this.contextMode);
|
||||||
if (Array.isArray(files.smartfilesMod)) {
|
|
||||||
allFiles.push(...files.smartfilesMod);
|
|
||||||
} else {
|
|
||||||
allFiles.push(files.smartfilesMod);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (files.smartfilesTest) {
|
|
||||||
if (Array.isArray(files.smartfilesTest)) {
|
|
||||||
allFiles.push(...files.smartfilesTest);
|
|
||||||
} else {
|
|
||||||
allFiles.push(files.smartfilesTest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const context = await this.convertFilesToContext(allFiles);
|
|
||||||
|
|
||||||
return this.contextResult;
|
return this.contextResult;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,9 @@ import { EnhancedContext } from './enhanced-context.js';
|
|||||||
import { TaskContextFactory } from './task-context-factory.js';
|
import { TaskContextFactory } from './task-context-factory.js';
|
||||||
import { ConfigManager } from './config-manager.js';
|
import { ConfigManager } from './config-manager.js';
|
||||||
import { ContextTrimmer } from './context-trimmer.js';
|
import { ContextTrimmer } from './context-trimmer.js';
|
||||||
|
import { LazyFileLoader } from './lazy-file-loader.js';
|
||||||
|
import { ContextCache } from './context-cache.js';
|
||||||
|
import { ContextAnalyzer } from './context-analyzer.js';
|
||||||
import type {
|
import type {
|
||||||
ContextMode,
|
ContextMode,
|
||||||
IContextConfig,
|
IContextConfig,
|
||||||
@@ -9,7 +12,19 @@ import type {
|
|||||||
IFileInfo,
|
IFileInfo,
|
||||||
ITrimConfig,
|
ITrimConfig,
|
||||||
ITaskConfig,
|
ITaskConfig,
|
||||||
TaskType
|
TaskType,
|
||||||
|
ICacheConfig,
|
||||||
|
IAnalyzerConfig,
|
||||||
|
IPrioritizationWeights,
|
||||||
|
ITierConfig,
|
||||||
|
ITierSettings,
|
||||||
|
IFileMetadata,
|
||||||
|
ICacheEntry,
|
||||||
|
IFileDependencies,
|
||||||
|
IFileAnalysis,
|
||||||
|
IAnalysisResult,
|
||||||
|
IIterativeConfig,
|
||||||
|
IIterativeContextResult
|
||||||
} from './types.js';
|
} from './types.js';
|
||||||
|
|
||||||
export {
|
export {
|
||||||
@@ -18,6 +33,9 @@ export {
|
|||||||
TaskContextFactory,
|
TaskContextFactory,
|
||||||
ConfigManager,
|
ConfigManager,
|
||||||
ContextTrimmer,
|
ContextTrimmer,
|
||||||
|
LazyFileLoader,
|
||||||
|
ContextCache,
|
||||||
|
ContextAnalyzer,
|
||||||
};
|
};
|
||||||
|
|
||||||
// Types
|
// Types
|
||||||
@@ -28,5 +46,17 @@ export type {
|
|||||||
IFileInfo,
|
IFileInfo,
|
||||||
ITrimConfig,
|
ITrimConfig,
|
||||||
ITaskConfig,
|
ITaskConfig,
|
||||||
TaskType
|
TaskType,
|
||||||
|
ICacheConfig,
|
||||||
|
IAnalyzerConfig,
|
||||||
|
IPrioritizationWeights,
|
||||||
|
ITierConfig,
|
||||||
|
ITierSettings,
|
||||||
|
IFileMetadata,
|
||||||
|
ICacheEntry,
|
||||||
|
IFileDependencies,
|
||||||
|
IFileAnalysis,
|
||||||
|
IAnalysisResult,
|
||||||
|
IIterativeConfig,
|
||||||
|
IIterativeContextResult
|
||||||
};
|
};
|
||||||
467
ts/context/iterative-context-builder.ts
Normal file
467
ts/context/iterative-context-builder.ts
Normal file
@@ -0,0 +1,467 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import { logger } from '../logging.js';
|
||||||
|
import type {
|
||||||
|
TaskType,
|
||||||
|
IFileMetadata,
|
||||||
|
IFileInfo,
|
||||||
|
IIterativeContextResult,
|
||||||
|
IIterationState,
|
||||||
|
IFileSelectionDecision,
|
||||||
|
IContextSufficiencyDecision,
|
||||||
|
IIterativeConfig,
|
||||||
|
} from './types.js';
|
||||||
|
import { LazyFileLoader } from './lazy-file-loader.js';
|
||||||
|
import { ContextCache } from './context-cache.js';
|
||||||
|
import { ContextAnalyzer } from './context-analyzer.js';
|
||||||
|
import { ConfigManager } from './config-manager.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Iterative context builder that uses AI to intelligently select files
|
||||||
|
* across multiple iterations until sufficient context is gathered
|
||||||
|
*/
|
||||||
|
export class IterativeContextBuilder {
|
||||||
|
private projectRoot: string;
|
||||||
|
private lazyLoader: LazyFileLoader;
|
||||||
|
private cache: ContextCache;
|
||||||
|
private analyzer: ContextAnalyzer;
|
||||||
|
private config: Required<IIterativeConfig>;
|
||||||
|
private tokenBudget: number = 190000;
|
||||||
|
private openaiInstance: plugins.smartai.OpenAiProvider;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new IterativeContextBuilder
|
||||||
|
* @param projectRoot - Root directory of the project
|
||||||
|
* @param config - Iterative configuration
|
||||||
|
*/
|
||||||
|
constructor(projectRoot: string, config?: Partial<IIterativeConfig>) {
|
||||||
|
this.projectRoot = projectRoot;
|
||||||
|
this.lazyLoader = new LazyFileLoader(projectRoot);
|
||||||
|
this.cache = new ContextCache(projectRoot);
|
||||||
|
this.analyzer = new ContextAnalyzer(projectRoot);
|
||||||
|
|
||||||
|
// Default configuration
|
||||||
|
this.config = {
|
||||||
|
maxIterations: config?.maxIterations ?? 5,
|
||||||
|
firstPassFileLimit: config?.firstPassFileLimit ?? 10,
|
||||||
|
subsequentPassFileLimit: config?.subsequentPassFileLimit ?? 5,
|
||||||
|
temperature: config?.temperature ?? 0.3,
|
||||||
|
model: config?.model ?? 'gpt-4-turbo-preview',
|
||||||
|
};
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize the builder
|
||||||
|
*/
|
||||||
|
public async initialize(): Promise<void> {
|
||||||
|
await this.cache.init();
|
||||||
|
const configManager = ConfigManager.getInstance();
|
||||||
|
await configManager.initialize(this.projectRoot);
|
||||||
|
this.tokenBudget = configManager.getMaxTokens();
|
||||||
|
|
||||||
|
// Initialize OpenAI instance
|
||||||
|
const qenvInstance = new plugins.qenv.Qenv();
|
||||||
|
const openaiToken = await qenvInstance.getEnvVarOnDemand('OPENAI_TOKEN');
|
||||||
|
if (!openaiToken) {
|
||||||
|
throw new Error('OPENAI_TOKEN environment variable is required for iterative context building');
|
||||||
|
}
|
||||||
|
this.openaiInstance = new plugins.smartai.OpenAiProvider({
|
||||||
|
openaiToken,
|
||||||
|
});
|
||||||
|
await this.openaiInstance.start();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build context iteratively using AI decision making
|
||||||
|
* @param taskType - Type of task being performed
|
||||||
|
* @returns Complete iterative context result
|
||||||
|
*/
|
||||||
|
public async buildContextIteratively(taskType: TaskType): Promise<IIterativeContextResult> {
|
||||||
|
const startTime = Date.now();
|
||||||
|
logger.log('info', '🤖 Starting iterative context building...');
|
||||||
|
logger.log('info', ` Task: ${taskType}, Budget: ${this.tokenBudget} tokens, Max iterations: ${this.config.maxIterations}`);
|
||||||
|
|
||||||
|
// Phase 1: Scan project files for metadata
|
||||||
|
logger.log('info', '📋 Scanning project files...');
|
||||||
|
const metadata = await this.scanProjectFiles(taskType);
|
||||||
|
const totalEstimatedTokens = metadata.reduce((sum, m) => sum + m.estimatedTokens, 0);
|
||||||
|
logger.log('info', ` Found ${metadata.length} files (~${totalEstimatedTokens} estimated tokens)`);
|
||||||
|
|
||||||
|
// Phase 2: Analyze files for initial prioritization
|
||||||
|
logger.log('info', '🔍 Analyzing file dependencies and importance...');
|
||||||
|
const analysis = await this.analyzer.analyze(metadata, taskType, []);
|
||||||
|
logger.log('info', ` Analysis complete in ${analysis.analysisDuration}ms`);
|
||||||
|
|
||||||
|
// Track state across iterations
|
||||||
|
const iterations: IIterationState[] = [];
|
||||||
|
let totalTokensUsed = 0;
|
||||||
|
let apiCallCount = 0;
|
||||||
|
let loadedContent = '';
|
||||||
|
const includedFiles: IFileInfo[] = [];
|
||||||
|
|
||||||
|
// Phase 3: Iterative file selection and loading
|
||||||
|
for (let iteration = 1; iteration <= this.config.maxIterations; iteration++) {
|
||||||
|
const iterationStart = Date.now();
|
||||||
|
logger.log('info', `\n🤔 Iteration ${iteration}/${this.config.maxIterations}: Asking AI which files to examine...`);
|
||||||
|
|
||||||
|
const remainingBudget = this.tokenBudget - totalTokensUsed;
|
||||||
|
logger.log('info', ` Token budget remaining: ${remainingBudget}/${this.tokenBudget} (${Math.round((remainingBudget / this.tokenBudget) * 100)}%)`);
|
||||||
|
|
||||||
|
// Get AI decision on which files to load
|
||||||
|
const decision = await this.getFileSelectionDecision(
|
||||||
|
metadata,
|
||||||
|
analysis.files.slice(0, 30), // Top 30 files by importance
|
||||||
|
taskType,
|
||||||
|
iteration,
|
||||||
|
totalTokensUsed,
|
||||||
|
remainingBudget,
|
||||||
|
loadedContent
|
||||||
|
);
|
||||||
|
apiCallCount++;
|
||||||
|
|
||||||
|
logger.log('info', ` AI reasoning: ${decision.reasoning}`);
|
||||||
|
logger.log('info', ` AI requested ${decision.filesToLoad.length} files`);
|
||||||
|
|
||||||
|
// Load requested files
|
||||||
|
const iterationFiles: IFileInfo[] = [];
|
||||||
|
let iterationTokens = 0;
|
||||||
|
|
||||||
|
if (decision.filesToLoad.length > 0) {
|
||||||
|
logger.log('info', '📥 Loading requested files...');
|
||||||
|
|
||||||
|
for (const filePath of decision.filesToLoad) {
|
||||||
|
try {
|
||||||
|
const fileInfo = await this.loadFile(filePath);
|
||||||
|
if (totalTokensUsed + fileInfo.tokenCount! <= this.tokenBudget) {
|
||||||
|
const formattedFile = this.formatFileForContext(fileInfo);
|
||||||
|
loadedContent += formattedFile;
|
||||||
|
includedFiles.push(fileInfo);
|
||||||
|
iterationFiles.push(fileInfo);
|
||||||
|
iterationTokens += fileInfo.tokenCount!;
|
||||||
|
totalTokensUsed += fileInfo.tokenCount!;
|
||||||
|
|
||||||
|
logger.log('info', ` ✓ ${fileInfo.relativePath} (${fileInfo.tokenCount} tokens)`);
|
||||||
|
} else {
|
||||||
|
logger.log('warn', ` ✗ ${fileInfo.relativePath} - would exceed budget, skipping`);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.log('warn', ` ✗ Failed to load ${filePath}: ${error.message}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Record iteration state
|
||||||
|
const iterationDuration = Date.now() - iterationStart;
|
||||||
|
iterations.push({
|
||||||
|
iteration,
|
||||||
|
filesLoaded: iterationFiles,
|
||||||
|
tokensUsed: iterationTokens,
|
||||||
|
totalTokensUsed,
|
||||||
|
decision,
|
||||||
|
duration: iterationDuration,
|
||||||
|
});
|
||||||
|
|
||||||
|
logger.log('info', ` Iteration ${iteration} complete: ${iterationFiles.length} files loaded, ${iterationTokens} tokens used`);
|
||||||
|
|
||||||
|
// Check if we should continue
|
||||||
|
if (totalTokensUsed >= this.tokenBudget * 0.95) {
|
||||||
|
logger.log('warn', '⚠️ Approaching token budget limit, stopping iterations');
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ask AI if context is sufficient
|
||||||
|
if (iteration < this.config.maxIterations) {
|
||||||
|
logger.log('info', '🤔 Asking AI if context is sufficient...');
|
||||||
|
const sufficiencyDecision = await this.evaluateContextSufficiency(
|
||||||
|
loadedContent,
|
||||||
|
taskType,
|
||||||
|
iteration,
|
||||||
|
totalTokensUsed,
|
||||||
|
remainingBudget - iterationTokens
|
||||||
|
);
|
||||||
|
apiCallCount++;
|
||||||
|
|
||||||
|
logger.log('info', ` AI decision: ${sufficiencyDecision.sufficient ? '✅ SUFFICIENT' : '⏭️ NEEDS MORE'}`);
|
||||||
|
logger.log('info', ` Reasoning: ${sufficiencyDecision.reasoning}`);
|
||||||
|
|
||||||
|
if (sufficiencyDecision.sufficient) {
|
||||||
|
logger.log('ok', '✅ Context building complete - AI determined context is sufficient');
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const totalDuration = Date.now() - startTime;
|
||||||
|
logger.log('ok', `\n✅ Iterative context building complete!`);
|
||||||
|
logger.log('info', ` Files included: ${includedFiles.length}`);
|
||||||
|
logger.log('info', ` Token usage: ${totalTokensUsed}/${this.tokenBudget} (${Math.round((totalTokensUsed / this.tokenBudget) * 100)}%)`);
|
||||||
|
logger.log('info', ` Iterations: ${iterations.length}, API calls: ${apiCallCount}`);
|
||||||
|
logger.log('info', ` Total duration: ${(totalDuration / 1000).toFixed(2)}s`);
|
||||||
|
|
||||||
|
return {
|
||||||
|
context: loadedContent,
|
||||||
|
tokenCount: totalTokensUsed,
|
||||||
|
includedFiles,
|
||||||
|
trimmedFiles: [],
|
||||||
|
excludedFiles: [],
|
||||||
|
tokenSavings: 0,
|
||||||
|
iterationCount: iterations.length,
|
||||||
|
iterations,
|
||||||
|
apiCallCount,
|
||||||
|
totalDuration,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scan project files based on task type
|
||||||
|
*/
|
||||||
|
private async scanProjectFiles(taskType: TaskType): Promise<IFileMetadata[]> {
|
||||||
|
const configManager = ConfigManager.getInstance();
|
||||||
|
const taskConfig = configManager.getTaskConfig(taskType);
|
||||||
|
|
||||||
|
const includeGlobs = taskConfig?.includePaths?.map(p => `${p}/**/*.ts`) || [
|
||||||
|
'ts/**/*.ts',
|
||||||
|
'ts*/**/*.ts'
|
||||||
|
];
|
||||||
|
|
||||||
|
const configGlobs = [
|
||||||
|
'package.json',
|
||||||
|
'readme.md',
|
||||||
|
'readme.hints.md',
|
||||||
|
'npmextra.json'
|
||||||
|
];
|
||||||
|
|
||||||
|
return await this.lazyLoader.scanFiles([...configGlobs, ...includeGlobs]);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get AI decision on which files to load
|
||||||
|
*/
|
||||||
|
private async getFileSelectionDecision(
|
||||||
|
allMetadata: IFileMetadata[],
|
||||||
|
analyzedFiles: any[],
|
||||||
|
taskType: TaskType,
|
||||||
|
iteration: number,
|
||||||
|
tokensUsed: number,
|
||||||
|
remainingBudget: number,
|
||||||
|
loadedContent: string
|
||||||
|
): Promise<IFileSelectionDecision> {
|
||||||
|
const isFirstIteration = iteration === 1;
|
||||||
|
const fileLimit = isFirstIteration
|
||||||
|
? this.config.firstPassFileLimit
|
||||||
|
: this.config.subsequentPassFileLimit;
|
||||||
|
|
||||||
|
const systemPrompt = this.buildFileSelectionPrompt(
|
||||||
|
allMetadata,
|
||||||
|
analyzedFiles,
|
||||||
|
taskType,
|
||||||
|
iteration,
|
||||||
|
tokensUsed,
|
||||||
|
remainingBudget,
|
||||||
|
loadedContent,
|
||||||
|
fileLimit
|
||||||
|
);
|
||||||
|
|
||||||
|
const response = await this.openaiInstance.chat({
|
||||||
|
systemMessage: `You are an AI assistant that helps select the most relevant files for code analysis.
|
||||||
|
You must respond ONLY with valid JSON that can be parsed with JSON.parse().
|
||||||
|
Do not wrap the JSON in markdown code blocks or add any other text.`,
|
||||||
|
userMessage: systemPrompt,
|
||||||
|
messageHistory: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Parse JSON response, handling potential markdown formatting
|
||||||
|
const content = response.message.replace('```json', '').replace('```', '').trim();
|
||||||
|
const parsed = JSON.parse(content);
|
||||||
|
|
||||||
|
return {
|
||||||
|
reasoning: parsed.reasoning || 'No reasoning provided',
|
||||||
|
filesToLoad: parsed.files_to_load || [],
|
||||||
|
estimatedTokensNeeded: parsed.estimated_tokens_needed,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build prompt for file selection
|
||||||
|
*/
|
||||||
|
private buildFileSelectionPrompt(
|
||||||
|
metadata: IFileMetadata[],
|
||||||
|
analyzedFiles: any[],
|
||||||
|
taskType: TaskType,
|
||||||
|
iteration: number,
|
||||||
|
tokensUsed: number,
|
||||||
|
remainingBudget: number,
|
||||||
|
loadedContent: string,
|
||||||
|
fileLimit: number
|
||||||
|
): string {
|
||||||
|
const taskDescriptions = {
|
||||||
|
readme: 'generating a comprehensive README that explains the project\'s purpose, features, and API',
|
||||||
|
commit: 'analyzing code changes to generate an intelligent commit message',
|
||||||
|
description: 'generating a concise project description for package.json',
|
||||||
|
};
|
||||||
|
|
||||||
|
const alreadyLoadedFiles = loadedContent
|
||||||
|
? loadedContent.split('\n======').slice(1).map(section => {
|
||||||
|
const match = section.match(/START OF FILE (.+?) ======/);
|
||||||
|
return match ? match[1] : '';
|
||||||
|
}).filter(Boolean)
|
||||||
|
: [];
|
||||||
|
|
||||||
|
const availableFiles = metadata
|
||||||
|
.filter(m => !alreadyLoadedFiles.includes(m.relativePath))
|
||||||
|
.map(m => {
|
||||||
|
const analysis = analyzedFiles.find(a => a.path === m.path);
|
||||||
|
return `- ${m.relativePath} (${m.size} bytes, ~${m.estimatedTokens} tokens${analysis ? `, importance: ${analysis.importanceScore.toFixed(2)}` : ''})`;
|
||||||
|
})
|
||||||
|
.join('\n');
|
||||||
|
|
||||||
|
return `You are building context for ${taskDescriptions[taskType]} in a TypeScript project.
|
||||||
|
|
||||||
|
ITERATION: ${iteration}
|
||||||
|
TOKENS USED: ${tokensUsed}/${tokensUsed + remainingBudget} (${Math.round((tokensUsed / (tokensUsed + remainingBudget)) * 100)}%)
|
||||||
|
REMAINING BUDGET: ${remainingBudget} tokens
|
||||||
|
|
||||||
|
${alreadyLoadedFiles.length > 0 ? `FILES ALREADY LOADED:\n${alreadyLoadedFiles.map(f => `- ${f}`).join('\n')}\n\n` : ''}AVAILABLE FILES (not yet loaded):
|
||||||
|
${availableFiles}
|
||||||
|
|
||||||
|
Your task: Select up to ${fileLimit} files that will give you the MOST understanding for this ${taskType} task.
|
||||||
|
|
||||||
|
${iteration === 1 ? `This is the FIRST iteration. Focus on:
|
||||||
|
- Main entry points (index.ts, main exports)
|
||||||
|
- Core classes and interfaces
|
||||||
|
- Package configuration
|
||||||
|
` : `This is iteration ${iteration}. You've already seen some files. Now focus on:
|
||||||
|
- Files that complement what you've already loaded
|
||||||
|
- Dependencies of already-loaded files
|
||||||
|
- Missing pieces for complete understanding
|
||||||
|
`}
|
||||||
|
|
||||||
|
Consider:
|
||||||
|
1. File importance scores (if provided)
|
||||||
|
2. File paths (ts/index.ts is likely more important than ts/internal/utils.ts)
|
||||||
|
3. Token efficiency (prefer smaller files if they provide good information)
|
||||||
|
4. Remaining budget (${remainingBudget} tokens)
|
||||||
|
|
||||||
|
Respond in JSON format:
|
||||||
|
{
|
||||||
|
"reasoning": "Brief explanation of why you're selecting these files",
|
||||||
|
"files_to_load": ["path/to/file1.ts", "path/to/file2.ts"],
|
||||||
|
"estimated_tokens_needed": 15000
|
||||||
|
}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Evaluate if current context is sufficient
|
||||||
|
*/
|
||||||
|
private async evaluateContextSufficiency(
|
||||||
|
loadedContent: string,
|
||||||
|
taskType: TaskType,
|
||||||
|
iteration: number,
|
||||||
|
tokensUsed: number,
|
||||||
|
remainingBudget: number
|
||||||
|
): Promise<IContextSufficiencyDecision> {
|
||||||
|
const prompt = `You have been building context for a ${taskType} task across ${iteration} iterations.
|
||||||
|
|
||||||
|
CURRENT STATE:
|
||||||
|
- Tokens used: ${tokensUsed}
|
||||||
|
- Remaining budget: ${remainingBudget}
|
||||||
|
- Files loaded: ${loadedContent.split('\n======').length - 1}
|
||||||
|
|
||||||
|
CONTEXT SO FAR:
|
||||||
|
${loadedContent.substring(0, 3000)}... (truncated for brevity)
|
||||||
|
|
||||||
|
Question: Do you have SUFFICIENT context to successfully complete the ${taskType} task?
|
||||||
|
|
||||||
|
Consider:
|
||||||
|
- For README: Do you understand the project's purpose, main features, API surface, and usage patterns?
|
||||||
|
- For commit: Do you understand what changed and why?
|
||||||
|
- For description: Do you understand the project's core value proposition?
|
||||||
|
|
||||||
|
Respond in JSON format:
|
||||||
|
{
|
||||||
|
"sufficient": true or false,
|
||||||
|
"reasoning": "Detailed explanation of your decision"
|
||||||
|
}`;
|
||||||
|
|
||||||
|
const response = await this.openaiInstance.chat({
|
||||||
|
systemMessage: `You are an AI assistant that evaluates whether gathered context is sufficient for a task.
|
||||||
|
You must respond ONLY with valid JSON that can be parsed with JSON.parse().
|
||||||
|
Do not wrap the JSON in markdown code blocks or add any other text.`,
|
||||||
|
userMessage: prompt,
|
||||||
|
messageHistory: [],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Parse JSON response, handling potential markdown formatting
|
||||||
|
const content = response.message.replace('```json', '').replace('```', '').trim();
|
||||||
|
const parsed = JSON.parse(content);
|
||||||
|
|
||||||
|
return {
|
||||||
|
sufficient: parsed.sufficient || false,
|
||||||
|
reasoning: parsed.reasoning || 'No reasoning provided',
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load a single file with caching
|
||||||
|
*/
|
||||||
|
private async loadFile(filePath: string): Promise<IFileInfo> {
|
||||||
|
// Try cache first
|
||||||
|
const cached = await this.cache.get(filePath);
|
||||||
|
if (cached) {
|
||||||
|
return {
|
||||||
|
path: filePath,
|
||||||
|
relativePath: plugins.path.relative(this.projectRoot, filePath),
|
||||||
|
contents: cached.contents,
|
||||||
|
tokenCount: cached.tokenCount,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load from disk
|
||||||
|
const contents = await plugins.smartfile.fs.toStringSync(filePath);
|
||||||
|
const tokenCount = this.countTokens(contents);
|
||||||
|
const relativePath = plugins.path.relative(this.projectRoot, filePath);
|
||||||
|
|
||||||
|
// Cache it
|
||||||
|
const stats = await fs.promises.stat(filePath);
|
||||||
|
await this.cache.set({
|
||||||
|
path: filePath,
|
||||||
|
contents,
|
||||||
|
tokenCount,
|
||||||
|
mtime: Math.floor(stats.mtimeMs),
|
||||||
|
cachedAt: Date.now(),
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
path: filePath,
|
||||||
|
relativePath,
|
||||||
|
contents,
|
||||||
|
tokenCount,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format a file for inclusion in context
|
||||||
|
*/
|
||||||
|
private formatFileForContext(file: IFileInfo): string {
|
||||||
|
return `
|
||||||
|
====== START OF FILE ${file.relativePath} ======
|
||||||
|
|
||||||
|
${file.contents}
|
||||||
|
|
||||||
|
====== END OF FILE ${file.relativePath} ======
|
||||||
|
`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Count tokens in text
|
||||||
|
*/
|
||||||
|
private countTokens(text: string): number {
|
||||||
|
try {
|
||||||
|
const tokens = plugins.gptTokenizer.encode(text);
|
||||||
|
return tokens.length;
|
||||||
|
} catch (error) {
|
||||||
|
return Math.ceil(text.length / 4);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
191
ts/context/lazy-file-loader.ts
Normal file
191
ts/context/lazy-file-loader.ts
Normal file
@@ -0,0 +1,191 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import type { IFileMetadata, IFileInfo } from './types.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* LazyFileLoader handles efficient file loading by:
|
||||||
|
* - Scanning files for metadata without loading contents
|
||||||
|
* - Providing fast file size and token estimates
|
||||||
|
* - Loading contents only when requested
|
||||||
|
* - Parallel loading of selected files
|
||||||
|
*/
|
||||||
|
export class LazyFileLoader {
|
||||||
|
private projectRoot: string;
|
||||||
|
private metadataCache: Map<string, IFileMetadata> = new Map();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a new LazyFileLoader
|
||||||
|
* @param projectRoot - Root directory of the project
|
||||||
|
*/
|
||||||
|
constructor(projectRoot: string) {
|
||||||
|
this.projectRoot = projectRoot;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scans files in given globs and creates metadata without loading contents
|
||||||
|
* @param globs - File patterns to scan (e.g., ['ts/**\/*.ts', 'test/**\/*.ts'])
|
||||||
|
* @returns Array of file metadata
|
||||||
|
*/
|
||||||
|
public async scanFiles(globs: string[]): Promise<IFileMetadata[]> {
|
||||||
|
const metadata: IFileMetadata[] = [];
|
||||||
|
|
||||||
|
for (const globPattern of globs) {
|
||||||
|
try {
|
||||||
|
const smartFiles = await plugins.smartfile.fs.fileTreeToObject(this.projectRoot, globPattern);
|
||||||
|
const fileArray = Array.isArray(smartFiles) ? smartFiles : [smartFiles];
|
||||||
|
|
||||||
|
for (const smartFile of fileArray) {
|
||||||
|
try {
|
||||||
|
const meta = await this.getMetadata(smartFile.path);
|
||||||
|
metadata.push(meta);
|
||||||
|
} catch (error) {
|
||||||
|
// Skip files that can't be read
|
||||||
|
console.warn(`Failed to get metadata for ${smartFile.path}:`, error.message);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
// Skip patterns that don't match any files
|
||||||
|
console.warn(`No files found for pattern ${globPattern}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return metadata;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets metadata for a single file without loading contents
|
||||||
|
* @param filePath - Absolute path to the file
|
||||||
|
* @returns File metadata
|
||||||
|
*/
|
||||||
|
public async getMetadata(filePath: string): Promise<IFileMetadata> {
|
||||||
|
// Check cache first
|
||||||
|
if (this.metadataCache.has(filePath)) {
|
||||||
|
const cached = this.metadataCache.get(filePath)!;
|
||||||
|
const currentStats = await fs.promises.stat(filePath);
|
||||||
|
|
||||||
|
// Return cached if file hasn't changed
|
||||||
|
if (cached.mtime === Math.floor(currentStats.mtimeMs)) {
|
||||||
|
return cached;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get file stats
|
||||||
|
const stats = await fs.promises.stat(filePath);
|
||||||
|
const relativePath = plugins.path.relative(this.projectRoot, filePath);
|
||||||
|
|
||||||
|
// Estimate tokens: rough estimate of ~4 characters per token
|
||||||
|
// This is faster than reading and tokenizing the entire file
|
||||||
|
const estimatedTokens = Math.ceil(stats.size / 4);
|
||||||
|
|
||||||
|
const metadata: IFileMetadata = {
|
||||||
|
path: filePath,
|
||||||
|
relativePath,
|
||||||
|
size: stats.size,
|
||||||
|
mtime: Math.floor(stats.mtimeMs),
|
||||||
|
estimatedTokens,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Cache the metadata
|
||||||
|
this.metadataCache.set(filePath, metadata);
|
||||||
|
|
||||||
|
return metadata;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Loads file contents for selected files in parallel
|
||||||
|
* @param metadata - Array of file metadata to load
|
||||||
|
* @param tokenizer - Function to calculate accurate token count
|
||||||
|
* @returns Array of complete file info with contents
|
||||||
|
*/
|
||||||
|
public async loadFiles(
|
||||||
|
metadata: IFileMetadata[],
|
||||||
|
tokenizer: (content: string) => number
|
||||||
|
): Promise<IFileInfo[]> {
|
||||||
|
// Load files in parallel
|
||||||
|
const loadPromises = metadata.map(async (meta) => {
|
||||||
|
try {
|
||||||
|
const contents = await plugins.smartfile.fs.toStringSync(meta.path);
|
||||||
|
const tokenCount = tokenizer(contents);
|
||||||
|
|
||||||
|
const fileInfo: IFileInfo = {
|
||||||
|
path: meta.path,
|
||||||
|
relativePath: meta.relativePath,
|
||||||
|
contents,
|
||||||
|
tokenCount,
|
||||||
|
importanceScore: meta.importanceScore,
|
||||||
|
};
|
||||||
|
|
||||||
|
return fileInfo;
|
||||||
|
} catch (error) {
|
||||||
|
console.warn(`Failed to load file ${meta.path}:`, error.message);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Wait for all loads to complete and filter out failures
|
||||||
|
const results = await Promise.all(loadPromises);
|
||||||
|
return results.filter((r): r is IFileInfo => r !== null);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Loads a single file with contents
|
||||||
|
* @param filePath - Absolute path to the file
|
||||||
|
* @param tokenizer - Function to calculate accurate token count
|
||||||
|
* @returns Complete file info with contents
|
||||||
|
*/
|
||||||
|
public async loadFile(
|
||||||
|
filePath: string,
|
||||||
|
tokenizer: (content: string) => number
|
||||||
|
): Promise<IFileInfo> {
|
||||||
|
const meta = await this.getMetadata(filePath);
|
||||||
|
const contents = await plugins.smartfile.fs.toStringSync(filePath);
|
||||||
|
const tokenCount = tokenizer(contents);
|
||||||
|
const relativePath = plugins.path.relative(this.projectRoot, filePath);
|
||||||
|
|
||||||
|
return {
|
||||||
|
path: filePath,
|
||||||
|
relativePath,
|
||||||
|
contents,
|
||||||
|
tokenCount,
|
||||||
|
importanceScore: meta.importanceScore,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Updates importance scores for metadata entries
|
||||||
|
* @param scores - Map of file paths to importance scores
|
||||||
|
*/
|
||||||
|
public updateImportanceScores(scores: Map<string, number>): void {
|
||||||
|
for (const [path, score] of scores) {
|
||||||
|
const meta = this.metadataCache.get(path);
|
||||||
|
if (meta) {
|
||||||
|
meta.importanceScore = score;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clears the metadata cache
|
||||||
|
*/
|
||||||
|
public clearCache(): void {
|
||||||
|
this.metadataCache.clear();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets total estimated tokens for all cached metadata
|
||||||
|
*/
|
||||||
|
public getTotalEstimatedTokens(): number {
|
||||||
|
let total = 0;
|
||||||
|
for (const meta of this.metadataCache.values()) {
|
||||||
|
total += meta.estimatedTokens;
|
||||||
|
}
|
||||||
|
return total;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets cached metadata entries
|
||||||
|
*/
|
||||||
|
public getCachedMetadata(): IFileMetadata[] {
|
||||||
|
return Array.from(this.metadataCache.values());
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,10 +1,10 @@
|
|||||||
import * as plugins from '../plugins.js';
|
import * as plugins from '../plugins.js';
|
||||||
import { EnhancedContext } from './enhanced-context.js';
|
import { IterativeContextBuilder } from './iterative-context-builder.js';
|
||||||
import { ConfigManager } from './config-manager.js';
|
import { ConfigManager } from './config-manager.js';
|
||||||
import type { IContextResult, TaskType } from './types.js';
|
import type { IIterativeContextResult, TaskType } from './types.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Factory class for creating task-specific context
|
* Factory class for creating task-specific context using iterative context building
|
||||||
*/
|
*/
|
||||||
export class TaskContextFactory {
|
export class TaskContextFactory {
|
||||||
private projectDir: string;
|
private projectDir: string;
|
||||||
@@ -29,71 +29,50 @@ export class TaskContextFactory {
|
|||||||
/**
|
/**
|
||||||
* Create context for README generation
|
* Create context for README generation
|
||||||
*/
|
*/
|
||||||
public async createContextForReadme(): Promise<IContextResult> {
|
public async createContextForReadme(): Promise<IIterativeContextResult> {
|
||||||
const contextBuilder = new EnhancedContext(this.projectDir);
|
const iterativeBuilder = new IterativeContextBuilder(
|
||||||
await contextBuilder.initialize();
|
this.projectDir,
|
||||||
|
this.configManager.getIterativeConfig()
|
||||||
// Get README-specific configuration
|
);
|
||||||
const taskConfig = this.configManager.getTaskConfig('readme');
|
await iterativeBuilder.initialize();
|
||||||
if (taskConfig.mode) {
|
return await iterativeBuilder.buildContextIteratively('readme');
|
||||||
contextBuilder.setContextMode(taskConfig.mode);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build the context for README task
|
|
||||||
return await contextBuilder.buildContext('readme');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create context for description generation
|
* Create context for description generation
|
||||||
*/
|
*/
|
||||||
public async createContextForDescription(): Promise<IContextResult> {
|
public async createContextForDescription(): Promise<IIterativeContextResult> {
|
||||||
const contextBuilder = new EnhancedContext(this.projectDir);
|
const iterativeBuilder = new IterativeContextBuilder(
|
||||||
await contextBuilder.initialize();
|
this.projectDir,
|
||||||
|
this.configManager.getIterativeConfig()
|
||||||
// Get description-specific configuration
|
);
|
||||||
const taskConfig = this.configManager.getTaskConfig('description');
|
await iterativeBuilder.initialize();
|
||||||
if (taskConfig.mode) {
|
return await iterativeBuilder.buildContextIteratively('description');
|
||||||
contextBuilder.setContextMode(taskConfig.mode);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build the context for description task
|
|
||||||
return await contextBuilder.buildContext('description');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create context for commit message generation
|
* Create context for commit message generation
|
||||||
* @param gitDiff Optional git diff to include
|
* @param gitDiff Optional git diff to include (currently not used in iterative mode)
|
||||||
*/
|
*/
|
||||||
public async createContextForCommit(gitDiff?: string): Promise<IContextResult> {
|
public async createContextForCommit(gitDiff?: string): Promise<IIterativeContextResult> {
|
||||||
const contextBuilder = new EnhancedContext(this.projectDir);
|
const iterativeBuilder = new IterativeContextBuilder(
|
||||||
await contextBuilder.initialize();
|
this.projectDir,
|
||||||
|
this.configManager.getIterativeConfig()
|
||||||
// Get commit-specific configuration
|
);
|
||||||
const taskConfig = this.configManager.getTaskConfig('commit');
|
await iterativeBuilder.initialize();
|
||||||
if (taskConfig.mode) {
|
// Note: git diff could be incorporated into the iterative prompts if needed
|
||||||
contextBuilder.setContextMode(taskConfig.mode);
|
return await iterativeBuilder.buildContextIteratively('commit');
|
||||||
}
|
|
||||||
|
|
||||||
// Build the context for commit task
|
|
||||||
const contextResult = await contextBuilder.buildContext('commit');
|
|
||||||
|
|
||||||
// If git diff is provided, add it to the context
|
|
||||||
if (gitDiff) {
|
|
||||||
contextBuilder.updateWithGitDiff(gitDiff);
|
|
||||||
}
|
|
||||||
|
|
||||||
return contextBuilder.getContextResult();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create context for any task type
|
* Create context for any task type
|
||||||
* @param taskType The task type to create context for
|
* @param taskType The task type to create context for
|
||||||
* @param additionalContent Optional additional content to include
|
* @param additionalContent Optional additional content (currently not used)
|
||||||
*/
|
*/
|
||||||
public async createContextForTask(
|
public async createContextForTask(
|
||||||
taskType: TaskType,
|
taskType: TaskType,
|
||||||
additionalContent?: string
|
additionalContent?: string
|
||||||
): Promise<IContextResult> {
|
): Promise<IIterativeContextResult> {
|
||||||
switch (taskType) {
|
switch (taskType) {
|
||||||
case 'readme':
|
case 'readme':
|
||||||
return this.createContextForReadme();
|
return this.createContextForReadme();
|
||||||
@@ -102,10 +81,8 @@ export class TaskContextFactory {
|
|||||||
case 'commit':
|
case 'commit':
|
||||||
return this.createContextForCommit(additionalContent);
|
return this.createContextForCommit(additionalContent);
|
||||||
default:
|
default:
|
||||||
// Generic context for unknown task types
|
// Default to readme for unknown task types
|
||||||
const contextBuilder = new EnhancedContext(this.projectDir);
|
return this.createContextForReadme();
|
||||||
await contextBuilder.initialize();
|
|
||||||
return await contextBuilder.buildContext();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -58,6 +58,74 @@ export interface IContextConfig {
|
|||||||
};
|
};
|
||||||
/** Trimming configuration */
|
/** Trimming configuration */
|
||||||
trimming?: ITrimConfig;
|
trimming?: ITrimConfig;
|
||||||
|
/** Cache configuration */
|
||||||
|
cache?: ICacheConfig;
|
||||||
|
/** Analyzer configuration */
|
||||||
|
analyzer?: IAnalyzerConfig;
|
||||||
|
/** Prioritization weights */
|
||||||
|
prioritization?: IPrioritizationWeights;
|
||||||
|
/** Tier configuration for adaptive trimming */
|
||||||
|
tiers?: ITierConfig;
|
||||||
|
/** Iterative context building configuration */
|
||||||
|
iterative?: IIterativeConfig;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cache configuration
|
||||||
|
*/
|
||||||
|
export interface ICacheConfig {
|
||||||
|
/** Whether caching is enabled */
|
||||||
|
enabled?: boolean;
|
||||||
|
/** Time-to-live in seconds */
|
||||||
|
ttl?: number;
|
||||||
|
/** Maximum cache size in MB */
|
||||||
|
maxSize?: number;
|
||||||
|
/** Cache directory path */
|
||||||
|
directory?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Analyzer configuration
|
||||||
|
* Note: Smart analysis is always enabled; this config only controls advanced options
|
||||||
|
*/
|
||||||
|
export interface IAnalyzerConfig {
|
||||||
|
/** Whether to use AI refinement for selection (advanced, disabled by default) */
|
||||||
|
useAIRefinement?: boolean;
|
||||||
|
/** AI model to use for refinement */
|
||||||
|
aiModel?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Weights for file prioritization
|
||||||
|
*/
|
||||||
|
export interface IPrioritizationWeights {
|
||||||
|
/** Weight for dependency centrality */
|
||||||
|
dependencyWeight?: number;
|
||||||
|
/** Weight for task relevance */
|
||||||
|
relevanceWeight?: number;
|
||||||
|
/** Weight for token efficiency */
|
||||||
|
efficiencyWeight?: number;
|
||||||
|
/** Weight for file recency */
|
||||||
|
recencyWeight?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Tier configuration for adaptive trimming
|
||||||
|
*/
|
||||||
|
export interface ITierConfig {
|
||||||
|
essential?: ITierSettings;
|
||||||
|
important?: ITierSettings;
|
||||||
|
optional?: ITierSettings;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Settings for a single tier
|
||||||
|
*/
|
||||||
|
export interface ITierSettings {
|
||||||
|
/** Minimum score to qualify for this tier */
|
||||||
|
minScore: number;
|
||||||
|
/** Trimming level to apply */
|
||||||
|
trimLevel: 'none' | 'light' | 'aggressive';
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -93,3 +161,161 @@ export interface IContextResult {
|
|||||||
/** Token savings from trimming */
|
/** Token savings from trimming */
|
||||||
tokenSavings: number;
|
tokenSavings: number;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* File metadata without contents (for lazy loading)
|
||||||
|
*/
|
||||||
|
export interface IFileMetadata {
|
||||||
|
/** The file path */
|
||||||
|
path: string;
|
||||||
|
/** The file's relative path from the project root */
|
||||||
|
relativePath: string;
|
||||||
|
/** File size in bytes */
|
||||||
|
size: number;
|
||||||
|
/** Last modified time (Unix timestamp) */
|
||||||
|
mtime: number;
|
||||||
|
/** Estimated token count (without loading full contents) */
|
||||||
|
estimatedTokens: number;
|
||||||
|
/** The file's importance score */
|
||||||
|
importanceScore?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cache entry for a file
|
||||||
|
*/
|
||||||
|
export interface ICacheEntry {
|
||||||
|
/** File path */
|
||||||
|
path: string;
|
||||||
|
/** File contents */
|
||||||
|
contents: string;
|
||||||
|
/** Token count */
|
||||||
|
tokenCount: number;
|
||||||
|
/** Last modified time when cached */
|
||||||
|
mtime: number;
|
||||||
|
/** When this cache entry was created */
|
||||||
|
cachedAt: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Dependency information for a file
|
||||||
|
*/
|
||||||
|
export interface IFileDependencies {
|
||||||
|
/** File path */
|
||||||
|
path: string;
|
||||||
|
/** Files this file imports */
|
||||||
|
imports: string[];
|
||||||
|
/** Files that import this file */
|
||||||
|
importedBy: string[];
|
||||||
|
/** Centrality score (0-1) - how central this file is in the dependency graph */
|
||||||
|
centrality: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Analysis result for a file
|
||||||
|
*/
|
||||||
|
export interface IFileAnalysis {
|
||||||
|
/** File path */
|
||||||
|
path: string;
|
||||||
|
/** Task relevance score (0-1) */
|
||||||
|
relevanceScore: number;
|
||||||
|
/** Dependency centrality score (0-1) */
|
||||||
|
centralityScore: number;
|
||||||
|
/** Token efficiency score (0-1) */
|
||||||
|
efficiencyScore: number;
|
||||||
|
/** Recency score (0-1) */
|
||||||
|
recencyScore: number;
|
||||||
|
/** Combined importance score (0-1) */
|
||||||
|
importanceScore: number;
|
||||||
|
/** Assigned tier */
|
||||||
|
tier: 'essential' | 'important' | 'optional' | 'excluded';
|
||||||
|
/** Reason for the score */
|
||||||
|
reason?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of context analysis
|
||||||
|
*/
|
||||||
|
export interface IAnalysisResult {
|
||||||
|
/** Task type being analyzed */
|
||||||
|
taskType: TaskType;
|
||||||
|
/** Analyzed files with scores */
|
||||||
|
files: IFileAnalysis[];
|
||||||
|
/** Dependency graph */
|
||||||
|
dependencyGraph: Map<string, IFileDependencies>;
|
||||||
|
/** Total files analyzed */
|
||||||
|
totalFiles: number;
|
||||||
|
/** Analysis duration in ms */
|
||||||
|
analysisDuration: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Configuration for iterative context building
|
||||||
|
*/
|
||||||
|
export interface IIterativeConfig {
|
||||||
|
/** Maximum number of iterations allowed */
|
||||||
|
maxIterations?: number;
|
||||||
|
/** Maximum files to request in first iteration */
|
||||||
|
firstPassFileLimit?: number;
|
||||||
|
/** Maximum files to request in subsequent iterations */
|
||||||
|
subsequentPassFileLimit?: number;
|
||||||
|
/** Temperature for AI decision making (0-1) */
|
||||||
|
temperature?: number;
|
||||||
|
/** Model to use for iterative decisions */
|
||||||
|
model?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* AI decision for file selection
|
||||||
|
*/
|
||||||
|
export interface IFileSelectionDecision {
|
||||||
|
/** AI's reasoning for file selection */
|
||||||
|
reasoning: string;
|
||||||
|
/** File paths to load */
|
||||||
|
filesToLoad: string[];
|
||||||
|
/** Estimated tokens needed */
|
||||||
|
estimatedTokensNeeded?: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* AI decision for context sufficiency
|
||||||
|
*/
|
||||||
|
export interface IContextSufficiencyDecision {
|
||||||
|
/** Whether context is sufficient */
|
||||||
|
sufficient: boolean;
|
||||||
|
/** AI's reasoning */
|
||||||
|
reasoning: string;
|
||||||
|
/** Additional files needed (if not sufficient) */
|
||||||
|
additionalFilesNeeded?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* State for a single iteration
|
||||||
|
*/
|
||||||
|
export interface IIterationState {
|
||||||
|
/** Iteration number (1-based) */
|
||||||
|
iteration: number;
|
||||||
|
/** Files loaded in this iteration */
|
||||||
|
filesLoaded: IFileInfo[];
|
||||||
|
/** Tokens used in this iteration */
|
||||||
|
tokensUsed: number;
|
||||||
|
/** Total tokens used so far */
|
||||||
|
totalTokensUsed: number;
|
||||||
|
/** AI decision made in this iteration */
|
||||||
|
decision: IFileSelectionDecision | IContextSufficiencyDecision;
|
||||||
|
/** Duration of this iteration in ms */
|
||||||
|
duration: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Result of iterative context building
|
||||||
|
*/
|
||||||
|
export interface IIterativeContextResult extends IContextResult {
|
||||||
|
/** Number of iterations performed */
|
||||||
|
iterationCount: number;
|
||||||
|
/** Details of each iteration */
|
||||||
|
iterations: IIterationState[];
|
||||||
|
/** Total API calls made */
|
||||||
|
apiCallCount: number;
|
||||||
|
/** Total duration in ms */
|
||||||
|
totalDuration: number;
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user