Compare commits

...

25 Commits

Author SHA1 Message Date
3485392979 1.11.2
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-15 15:22:45 +00:00
89adae2cff update 2025-12-15 15:22:35 +00:00
3451ab7456 update 2025-12-15 15:14:16 +00:00
bcded1eafa update 2025-12-15 14:34:02 +00:00
9cae46e2fe update 2025-12-15 14:33:58 +00:00
65c1df30da 1.11.1
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-15 12:02:16 +00:00
e8f2add812 fix(dependencies): update 2025-12-15 12:02:13 +00:00
8fcc304ee3 v1.11.0
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-15 11:36:03 +00:00
69802b46b6 feat(commit): Integrate DualAgentOrchestrator for commit message generation and improve diff/context handling 2025-12-15 11:36:03 +00:00
e500455557 1.10.2
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-13 22:50:26 +00:00
4029691ccd 1.10.1
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-13 11:42:43 +00:00
3b1c84d7e8 fix(npmextra): update to new format 2025-12-13 11:42:39 +00:00
f8d0895aab v1.10.0
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-02 12:17:10 +00:00
d7ec2220a1 feat(diff-processor): Improve diff sampling and file prioritization: increase inclusion thresholds, expand sampled context, and boost priority for interface/type and entry-point files 2025-12-02 12:17:10 +00:00
c24ce31b1f 1.9.2
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-11-04 03:43:27 +00:00
fec2017cc6 fix(deps): Update dependencies and devDependencies to newer versions (bump multiple packages) 2025-11-04 03:43:27 +00:00
88fac91c79 1.9.1
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-11-04 02:28:55 +00:00
ce4da89da9 fix(iterative-context-builder): Rely on DiffProcessor for git diff pre-processing; remove raw char truncation, raise diff token safety, and improve logging 2025-11-04 02:28:55 +00:00
6524adea18 1.9.0
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-11-04 02:19:57 +00:00
4bf0c02618 feat(context): Add intelligent DiffProcessor to summarize and prioritize git diffs and integrate it into the commit context pipeline 2025-11-04 02:19:57 +00:00
f84a65217d 1.8.3
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-11-04 01:37:15 +00:00
3f22fc91ae fix(context): Prevent enormous git diffs and OOM during context building by adding exclusion patterns, truncation, and diagnostic logging 2025-11-04 01:37:15 +00:00
11e65b92ec 1.8.2
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-11-03 17:53:03 +00:00
0a3080518f 1.8.1
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-11-03 17:50:09 +00:00
d0a4ddbb4b fix(git diff): improve git diff 2025-11-03 17:49:35 +00:00
33 changed files with 3930 additions and 7735 deletions

View File

@@ -1,5 +1,59 @@
# Changelog
## 2025-12-15 - 1.11.0 - feat(commit)
Integrate DualAgentOrchestrator for commit message generation and improve diff/context handling
- Add @push.rocks/smartagent dependency and export it from plugins
- Use DualAgentOrchestrator to generate and guardian-validate commit messages
- Use DualAgentOrchestrator for changelog generation with guardian validation
- Switch commit flow to TaskContextFactory and DiffProcessor for token-efficient context
- Expose getOpenaiToken() and wire orchestrator with the project OpenAI token
- Enhance iterative context builder and context components to better manage token budgets and sampling
- Update npmextra.json with release config for @git.zone/cli and reference local smartagent package in package.json
## 2025-12-02 - 1.10.0 - feat(diff-processor)
Improve diff sampling and file prioritization: increase inclusion thresholds, expand sampled context, and boost priority for interface/type and entry-point files
- Raise small/medium file thresholds used by DiffProcessor (smallFileLines 50 -> 300, mediumFileLines 200 -> 800) so more source files are included fully or summarized rather than treated as large metadata-only files
- Increase sample window for medium files (sampleHeadLines/sampleTailLines 20 -> 75) to provide more context when summarizing diffs
- Boost importance scoring for interfaces/type files and entry points (adds +20 for interfaces/.types and +15 for index/mod entry files) to prioritize critical API surface in diff processing
- Keep other prioritization rules intact (source/test/config/docs/build heuristics), and align the aidoc commit DiffProcessor usage with the new defaults
## 2025-11-04 - 1.9.2 - fix(deps)
Update dependencies and devDependencies to newer versions (bump multiple packages)
- Bumped devDependencies: @git.zone/tsbuild 2.6.8 -> 2.7.1, @git.zone/tsrun 1.2.46 -> 1.6.2, @git.zone/tstest 2.3.6 -> 2.7.0
- Bumped runtime dependencies: @push.rocks/smartai 0.5.11 -> 0.8.0, @push.rocks/smartcli 4.0.11 -> 4.0.19, @push.rocks/smartgit 3.2.1 -> 3.3.1, @push.rocks/smartlog 3.1.9 -> 3.1.10, gpt-tokenizer 3.0.1 -> 3.2.0, typedoc 0.28.12 -> 0.28.14, typescript 5.9.2 -> 5.9.3
- No source code changes in this commit; dependency-only updates. Run the test suite and CI to verify compatibility.
## 2025-11-04 - 1.9.1 - fix(iterative-context-builder)
Rely on DiffProcessor for git diff pre-processing; remove raw char truncation, raise diff token safety, and improve logging
- Removed raw character-based truncation of additionalContext — diffs are expected to be pre-processed by DiffProcessor instead of blind substring truncation.
- Now validates pre-processed diff token count only and treats DiffProcessor as the primary sampler (DiffProcessor typically uses a ~100k token budget).
- Increased MAX_DIFF_TOKENS safety net to 200,000 to cover edge cases and avoid false positives; updated logs to reflect pre-processed diffs.
- Improved error messaging to indicate a likely DiffProcessor misconfiguration when pre-processed diffs exceed the safety limit.
- Updated informational logs to state that a pre-processed git diff was added to context.
## 2025-11-04 - 1.9.0 - feat(context)
Add intelligent DiffProcessor to summarize and prioritize git diffs and integrate it into the commit context pipeline
- Add DiffProcessor (ts/context/diff-processor.ts) to intelligently process git diffs: include small files fully, summarize medium files (head/tail sampling), and mark very large files as metadata-only to stay within token budgets.
- Integrate DiffProcessor into commit workflow (ts/aidocs_classes/commit.ts): preprocess raw diffs, emit processed diff statistics, and pass a token-efficient diff section into the TaskContextFactory for commit context generation.
- Export DiffProcessor and its types through the context index and types (ts/context/index.ts, ts/context/types.ts) so other context components can reuse it.
- Add comprehensive tests for the DiffProcessor behavior and integration (test/test.diffprocessor.node.ts) covering small/medium/large diffs, added/deleted files, prioritization, token budgets, and formatting for context.
- Minor adjustments across context/task factories and builders to accept and propagate processed diff strings rather than raw diffs, reducing risk of token overflows during iterative context building.
## 2025-11-04 - 1.8.3 - fix(context)
Prevent enormous git diffs and OOM during context building by adding exclusion patterns, truncation, and diagnostic logging
- Add comprehensive git diff exclusion globs (locks, build artifacts, maps, bundles, IDE folders, logs, caches) when collecting uncommitted diffs to avoid noisy/huge diffs
- Pass glob patterns directly to smartgit.getUncommittedDiff for efficient server-side matching
- Emit diagnostic statistics for diffs (files changed, total characters, estimated tokens, number of exclusion patterns) and warn on unusually large diffs
- Introduce pre-tokenization safety checks in iterative context builder: truncate raw diff text if it exceeds MAX_DIFF_CHARS and throw a clear error if token count still exceeds MAX_DIFF_TOKENS
- Format and log token counts using locale-aware formatting for clarity
- Improve robustness of commit context generation to reduce risk of OOM / model-limit overruns
## 2025-11-03 - 1.8.0 - feat(context)
Wire OpenAI provider through task context factory and add git-diff support to iterative context builder

View File

@@ -31,5 +31,14 @@
},
"tsdoc": {
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
},
"@git.zone/cli": {
"release": {
"registries": [
"https://verdaccio.lossless.digital",
"https://registry.npmjs.org"
],
"accessLevel": "public"
}
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@git.zone/tsdoc",
"version": "1.8.0",
"version": "1.11.2",
"private": false,
"description": "A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.",
"type": "module",
@@ -19,30 +19,31 @@
"buildDocs": "tsdoc"
},
"devDependencies": {
"@git.zone/tsbuild": "^2.6.8",
"@git.zone/tsrun": "^1.2.46",
"@git.zone/tstest": "^2.3.6",
"@types/node": "^22.15.17"
"@git.zone/tsbuild": "^4.0.2",
"@git.zone/tsrun": "^2.0.1",
"@git.zone/tstest": "^3.1.3",
"@types/node": "^25.0.2"
},
"dependencies": {
"@git.zone/tspublish": "^1.10.3",
"@push.rocks/early": "^4.0.3",
"@push.rocks/early": "^4.0.4",
"@push.rocks/npmextra": "^5.3.3",
"@push.rocks/qenv": "^6.1.3",
"@push.rocks/smartai": "^0.5.11",
"@push.rocks/smartcli": "^4.0.11",
"@push.rocks/smartagent": "file:../../push.rocks/smartagent",
"@push.rocks/smartai": "^0.8.0",
"@push.rocks/smartcli": "^4.0.19",
"@push.rocks/smartdelay": "^3.0.5",
"@push.rocks/smartfile": "^11.2.7",
"@push.rocks/smartgit": "^3.2.1",
"@push.rocks/smartinteract": "^2.0.15",
"@push.rocks/smartlog": "^3.1.9",
"@push.rocks/smartfile": "^13.1.2",
"@push.rocks/smartfs": "^1.2.0",
"@push.rocks/smartgit": "^3.3.1",
"@push.rocks/smartinteract": "^2.0.16",
"@push.rocks/smartlog": "^3.1.10",
"@push.rocks/smartlog-destination-local": "^9.0.2",
"@push.rocks/smartpath": "^6.0.0",
"@push.rocks/smartshell": "^3.3.0",
"@push.rocks/smarttime": "^4.0.6",
"gpt-tokenizer": "^3.0.1",
"typedoc": "^0.28.12",
"typescript": "^5.9.2"
"@push.rocks/smarttime": "^4.1.1",
"typedoc": "^0.28.15",
"typescript": "^5.9.3"
},
"files": [
"ts/**/*",

5589
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,5 +0,0 @@
onlyBuiltDependencies:
- esbuild
- mongodb-memory-server
- puppeteer
- sharp

265
readme.md
View File

@@ -1,15 +1,18 @@
# @git.zone/tsdoc 🚀
**AI-Powered Documentation for TypeScript Projects**
> Stop writing documentation. Let AI understand your code and do it for you.
AI-Powered Documentation for TypeScript Projects
## Issue Reporting and Security
For reporting bugs, issues, or security vulnerabilities, please visit [community.foss.global/](https://community.foss.global/). This is the central community hub for all issue reporting. Developers who sign and comply with our contribution agreement and go through identification can also get a [code.foss.global/](https://code.foss.global/) account to submit Pull Requests directly.
## What is tsdoc?
`@git.zone/tsdoc` is a next-generation documentation tool that combines traditional TypeDoc generation with cutting-edge AI to create comprehensive, intelligent documentation for your TypeScript projects. It reads your code, understands it, and writes documentation that actually makes sense.
`@git.zone/tsdoc` is a next-generation documentation CLI tool that combines traditional TypeDoc generation with cutting-edge AI to create comprehensive, intelligent documentation for your TypeScript projects. It reads your code, understands it, and writes documentation that actually makes sense.
### ✨ Key Features
- **🤖 AI-Enhanced Documentation** - Leverages GPT-5 and other models to generate contextual READMEs
- **🤖 AI-Enhanced Documentation** - Leverages AI to generate contextual READMEs
- **🧠 Smart Context Building** - Intelligent file prioritization with dependency analysis and caching
- **📚 TypeDoc Integration** - Classic API documentation generation when you need it
- **💬 Smart Commit Messages** - AI analyzes your changes and suggests meaningful commit messages
@@ -22,9 +25,6 @@
```bash
# Global installation (recommended)
npm install -g @git.zone/tsdoc
# Or with pnpm
pnpm add -g @git.zone/tsdoc
# Or use with npx
@@ -66,7 +66,6 @@ tsdoc commit
| `tsdoc typedoc` | Generate TypeDoc documentation |
| `tsdoc commit` | Generate smart commit message |
| `tsdoc tokens` | Analyze token usage for AI context |
| `tsdoc context` | Display context information |
### Token Analysis
@@ -79,91 +78,25 @@ tsdoc tokens
# Show detailed stats for all task types
tsdoc tokens --all
# Test with trimmed context
tsdoc tokens --trim
# Show detailed breakdown with file listing
tsdoc tokens --detailed --listFiles
```
## Programmatic Usage
### Command Options
### Generate Documentation Programmatically
#### tsdoc aidoc
- `--tokens` / `--showTokens` - Show token count before generating
- `--tokensOnly` - Only show token count, don't generate
```typescript
import { AiDoc } from '@git.zone/tsdoc';
#### tsdoc typedoc
- `--publicSubdir <dir>` - Output subdirectory within public folder
const generateDocs = async () => {
const aiDoc = new AiDoc({ OPENAI_TOKEN: 'your-token' });
await aiDoc.start();
// Generate README
await aiDoc.buildReadme('./');
// Update package.json description
await aiDoc.buildDescription('./');
// Get smart commit message
const commit = await aiDoc.buildNextCommitObject('./');
console.log(commit.recommendedNextVersionMessage);
// Don't forget to stop when done
await aiDoc.stop();
};
```
### TypeDoc Generation
```typescript
import { TypeDoc } from '@git.zone/tsdoc';
const typeDoc = new TypeDoc(process.cwd());
await typeDoc.compile({ publicSubdir: 'docs' });
```
### Smart Context Management
Control how tsdoc processes your codebase with the new intelligent context system:
```typescript
import { EnhancedContext, ContextAnalyzer, LazyFileLoader, ContextCache } from '@git.zone/tsdoc';
const context = new EnhancedContext('./');
await context.initialize();
// Set token budget
context.setTokenBudget(100000);
// Choose context mode
context.setContextMode('trimmed'); // 'full' | 'trimmed' | 'summarized'
// Build optimized context with smart prioritization
const result = await context.buildContext('readme');
console.log(`Tokens used: ${result.tokenCount}`);
console.log(`Files included: ${result.includedFiles.length}`);
console.log(`Token savings: ${result.tokenSavings}`);
```
### Advanced: Using Individual Context Components
```typescript
import { LazyFileLoader, ContextAnalyzer, ContextCache } from '@git.zone/tsdoc';
// Lazy file loading - scan metadata without loading contents
const loader = new LazyFileLoader('./');
const metadata = await loader.scanFiles(['ts/**/*.ts']);
console.log(`Found ${metadata.length} files`);
// Analyze and prioritize files
const analyzer = new ContextAnalyzer('./');
const analysis = await analyzer.analyze(metadata, 'readme');
// Files are sorted by importance with dependency analysis
for (const file of analysis.files) {
console.log(`${file.path}: score ${file.importanceScore.toFixed(2)}, tier ${file.tier}`);
}
// Context caching for performance
const cache = new ContextCache('./', { enabled: true, ttl: 3600 });
await cache.init();
```
#### tsdoc tokens
- `--task <type>` - Specify task type: `readme`, `commit`, or `description`
- `--all` - Show stats for all task types
- `--detailed` - Show detailed token usage and costs
- `--listFiles` - List all files included in context
- `--model <name>` - Show usage for specific model (`gpt4`, `gpt35`)
## Configuration
@@ -171,7 +104,8 @@ Configure tsdoc via `npmextra.json`:
```json
{
"tsdoc": {
"@git.zone/tsdoc": {
"legal": "## License and Legal Information\n\n...",
"context": {
"maxTokens": 190000,
"defaultMode": "trimmed",
@@ -181,7 +115,6 @@ Configure tsdoc via `npmextra.json`:
"maxSize": 100
},
"analyzer": {
"enabled": true,
"useAIRefinement": false
},
"prioritization": {
@@ -234,11 +167,6 @@ Configure tsdoc via `npmextra.json`:
- **maxSize** - Maximum cache size in MB (default: 100)
- **directory** - Cache directory path (default: .nogit/context-cache)
#### Analyzer Configuration
- **enabled** - Enable smart file analysis (default: true)
- **useAIRefinement** - Use AI for additional context refinement (default: false)
- **aiModel** - Model for AI refinement (default: 'haiku')
## How It Works
### 🚀 Smart Context Building Pipeline
@@ -270,21 +198,14 @@ The smart context system delivers significant improvements:
| **Relevance** | Alphabetical sorting | Smart scoring | 🎯 90%+ relevant |
| **Cache Hits** | None | 70-80% | 🚀 Major speedup |
### Traditional Context Optimization
For projects where the analyzer is disabled, tsdoc still employs:
- **Intelligent Trimming** - Removes implementation details while preserving signatures
- **JSDoc Preservation** - Keeps documentation comments
- **Interface Prioritization** - Type definitions always included
- **Token Budgeting** - Ensures optimal use of AI context windows
## Environment Variables
| Variable | Description |
|----------|-------------|
| `OPENAI_TOKEN` | Your OpenAI API key for AI features (required) |
The token can also be provided interactively on first run - it will be persisted in `~/.npmextra/kv/@git.zone/tsdoc.json`.
## Use Cases
### 🚀 Continuous Integration
@@ -336,103 +257,6 @@ tsdoc commit > .git/COMMIT_EDITMSG
}
```
## Advanced Features
### Multi-Module Projects
tsdoc automatically detects and documents multi-module projects:
```typescript
const aiDoc = new AiDoc();
await aiDoc.start();
// Process main project
await aiDoc.buildReadme('./');
// Process submodules
for (const module of ['packages/core', 'packages/cli']) {
await aiDoc.buildReadme(module);
}
await aiDoc.stop();
```
### Custom Context Building
Fine-tune what gets sent to AI with task-specific contexts:
```typescript
import { TaskContextFactory } from '@git.zone/tsdoc';
const factory = new TaskContextFactory('./');
await factory.initialize();
// Get optimized context for specific tasks
const readmeContext = await factory.createContextForReadme();
const commitContext = await factory.createContextForCommit();
const descContext = await factory.createContextForDescription();
```
### Dependency Graph Analysis
Understand your codebase structure:
```typescript
import { ContextAnalyzer } from '@git.zone/tsdoc';
const analyzer = new ContextAnalyzer('./');
const analysis = await analyzer.analyze(metadata, 'readme');
// Explore dependency graph
for (const [path, deps] of analysis.dependencyGraph) {
console.log(`${path}:`);
console.log(` Imports: ${deps.imports.length}`);
console.log(` Imported by: ${deps.importedBy.length}`);
console.log(` Centrality: ${deps.centrality.toFixed(3)}`);
}
```
## Performance & Optimization
### ⚡ Performance Features
- **Lazy Loading** - Files scanned for metadata before content loading
- **Parallel Processing** - Multiple files loaded simultaneously
- **Smart Caching** - Results cached with mtime-based invalidation
- **Incremental Updates** - Only reprocess changed files
- **Streaming** - Minimal memory footprint
### 💰 Cost Optimization
The smart context system significantly reduces AI API costs:
```typescript
// Check token usage before and after optimization
import { EnhancedContext } from '@git.zone/tsdoc';
const context = new EnhancedContext('./');
await context.initialize();
// Build with analyzer enabled
const result = await context.buildContext('readme');
console.log(`Tokens: ${result.tokenCount}`);
console.log(`Savings: ${result.tokenSavings} (${(result.tokenSavings/result.tokenCount*100).toFixed(1)}%)`);
```
### 📊 Token Analysis
Monitor and optimize your token usage:
```bash
# Analyze current token usage
tsdoc tokens
# Compare modes
tsdoc tokens --mode full # No optimization
tsdoc tokens --mode trimmed # Standard optimization
tsdoc tokens --analyze # With smart prioritization
```
## Requirements
- **Node.js** >= 18.0.0
@@ -446,21 +270,15 @@ tsdoc tokens --analyze # With smart prioritization
If you hit token limits, try:
```bash
# Enable smart analyzer (default)
tsdoc aidoc
# Use aggressive trimming
tsdoc aidoc --trim
# Check token usage details
tsdoc tokens --all --analyze
tsdoc tokens --all --detailed
```
Or configure stricter limits:
Or configure stricter limits in `npmextra.json`:
```json
{
"tsdoc": {
"@git.zone/tsdoc": {
"context": {
"maxTokens": 100000,
"tiers": {
@@ -484,19 +302,16 @@ tsdoc aidoc
### Slow Performance
Enable caching and adjust settings:
Enable caching and adjust settings in `npmextra.json`:
```json
{
"tsdoc": {
"@git.zone/tsdoc": {
"context": {
"cache": {
"enabled": true,
"ttl": 7200,
"maxSize": 200
},
"analyzer": {
"enabled": true
}
}
}
@@ -525,9 +340,6 @@ Regenerate documentation with every change. Smart dependency analysis ensures no
### 🎨 Beautiful Output
Clean, professional documentation every time. AI understands your code's purpose and explains it clearly.
### 🛠️ Developer-Friendly
Built by developers, for developers. Sensible defaults, powerful configuration, and extensive programmatic API.
### 💰 Cost-Effective
Smart context optimization reduces AI API costs by 40-60% without sacrificing quality.
@@ -545,6 +357,7 @@ Smart context optimization reduces AI API costs by 40-60% without sacrificing qu
│ ├── ContextCache # Performance caching
│ ├── ContextAnalyzer # Intelligent file analysis
│ ├── ContextTrimmer # Adaptive code trimming
│ ├── DiffProcessor # Git diff optimization
│ ├── ConfigManager # Configuration management
│ └── TaskContextFactory # Task-specific contexts
└── CLI # Command-line interface
@@ -567,30 +380,28 @@ ContextTrimmer (tier-based)
Token Budget (enforcement)
AI Model (GPT-5)
AI Model
Generated Documentation
```
## Contributing
We appreciate your interest! However, we are not accepting external contributions at this time. If you find bugs or have feature requests, please open an issue.
## License and Legal Information
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
This repository contains open-source code licensed under the MIT License. A copy of the license can be found in the [LICENSE](./LICENSE) file.
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
### Trademarks
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH or third parties, and are not included within the scope of the MIT license granted herein.
Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines or the guidelines of the respective third-party owners, and any usage must be approved in writing. Third-party trademarks used herein are the property of their respective owners and used only in a descriptive manner, e.g. for an implementation of an API or similar.
### Company Information
Task Venture Capital GmbH
Registered at District court Bremen HRB 35230 HB, Germany
Registered at District Court Bremen HRB 35230 HB, Germany
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
For any legal inquiries or further information, please contact us via email at hello@task.vc.
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.

View File

@@ -33,7 +33,10 @@ tap.test('should build commit object', async () => {
expect(commitObject).toHaveProperty('recommendedNextVersionLevel');
expect(commitObject).toHaveProperty('recommendedNextVersionScope');
expect(commitObject).toHaveProperty('recommendedNextVersionMessage');
});
})
tap.test('should stop AIdocs', async () => {
await aidocs.stop();
});
tap.start();

View File

@@ -1,465 +0,0 @@
import { tap, expect } from '@git.zone/tstest/tapbundle';
import * as path from 'path';
import { ContextAnalyzer } from '../ts/context/context-analyzer.js';
import type { IFileMetadata } from '../ts/context/types.js';
const testProjectRoot = process.cwd();
tap.test('ContextAnalyzer should create instance with default weights', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
expect(analyzer).toBeInstanceOf(ContextAnalyzer);
});
tap.test('ContextAnalyzer should create instance with custom weights', async () => {
const analyzer = new ContextAnalyzer(
testProjectRoot,
{
dependencyWeight: 0.5,
relevanceWeight: 0.3,
efficiencyWeight: 0.1,
recencyWeight: 0.1
}
);
expect(analyzer).toBeInstanceOf(ContextAnalyzer);
});
tap.test('ContextAnalyzer.analyze should return analysis result with files', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 5000,
mtime: Date.now(),
estimatedTokens: 1250
},
{
path: path.join(testProjectRoot, 'ts/context/enhanced-context.ts'),
relativePath: 'ts/context/enhanced-context.ts',
size: 10000,
mtime: Date.now(),
estimatedTokens: 2500
}
];
const result = await analyzer.analyze(metadata, 'readme');
expect(result.taskType).toEqual('readme');
expect(result.files.length).toEqual(2);
expect(result.totalFiles).toEqual(2);
expect(result.analysisDuration).toBeGreaterThan(0);
expect(result.dependencyGraph).toBeDefined();
});
tap.test('ContextAnalyzer.analyze should assign importance scores to files', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
expect(result.files[0].importanceScore).toBeGreaterThanOrEqual(0);
expect(result.files[0].importanceScore).toBeLessThanOrEqual(1);
});
tap.test('ContextAnalyzer.analyze should sort files by importance score', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
},
{
path: path.join(testProjectRoot, 'test/test.basic.node.ts'),
relativePath: 'test/test.basic.node.ts',
size: 2000,
mtime: Date.now(),
estimatedTokens: 500
}
];
const result = await analyzer.analyze(metadata, 'readme');
// Files should be sorted by importance (highest first)
for (let i = 0; i < result.files.length - 1; i++) {
expect(result.files[i].importanceScore).toBeGreaterThanOrEqual(
result.files[i + 1].importanceScore
);
}
});
tap.test('ContextAnalyzer.analyze should assign tiers based on scores', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/index.ts'),
relativePath: 'ts/index.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
const file = result.files[0];
expect(['essential', 'important', 'optional', 'excluded']).toContain(file.tier);
});
tap.test('ContextAnalyzer should prioritize index.ts files for README task', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/index.ts'),
relativePath: 'ts/index.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
},
{
path: path.join(testProjectRoot, 'ts/some-helper.ts'),
relativePath: 'ts/some-helper.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
// index.ts should have higher relevance score
const indexFile = result.files.find(f => f.path.includes('index.ts'));
const helperFile = result.files.find(f => f.path.includes('some-helper.ts'));
if (indexFile && helperFile) {
expect(indexFile.relevanceScore).toBeGreaterThan(helperFile.relevanceScore);
}
});
tap.test('ContextAnalyzer should deprioritize test files for README task', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
},
{
path: path.join(testProjectRoot, 'test/test.basic.node.ts'),
relativePath: 'test/test.basic.node.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
// Source file should have higher relevance than test file
const sourceFile = result.files.find(f => f.path.includes('ts/context/types.ts'));
const testFile = result.files.find(f => f.path.includes('test/test.basic.node.ts'));
if (sourceFile && testFile) {
expect(sourceFile.relevanceScore).toBeGreaterThan(testFile.relevanceScore);
}
});
tap.test('ContextAnalyzer should prioritize changed files for commit task', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const changedFile = path.join(testProjectRoot, 'ts/context/types.ts');
const unchangedFile = path.join(testProjectRoot, 'ts/index.ts');
const metadata: IFileMetadata[] = [
{
path: changedFile,
relativePath: 'ts/context/types.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
},
{
path: unchangedFile,
relativePath: 'ts/index.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'commit', [changedFile]);
const changed = result.files.find(f => f.path === changedFile);
const unchanged = result.files.find(f => f.path === unchangedFile);
if (changed && unchanged) {
// Changed file should have recency score of 1.0
expect(changed.recencyScore).toEqual(1.0);
// Unchanged file should have recency score of 0
expect(unchanged.recencyScore).toEqual(0);
}
});
tap.test('ContextAnalyzer should calculate efficiency scores', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 5000, // Optimal size
mtime: Date.now(),
estimatedTokens: 1250
},
{
path: path.join(testProjectRoot, 'ts/very-large-file.ts'),
relativePath: 'ts/very-large-file.ts',
size: 50000, // Too large
mtime: Date.now(),
estimatedTokens: 12500
}
];
const result = await analyzer.analyze(metadata, 'readme');
// Optimal size file should have better efficiency score
const optimalFile = result.files.find(f => f.path.includes('types.ts'));
const largeFile = result.files.find(f => f.path.includes('very-large-file.ts'));
if (optimalFile && largeFile) {
expect(optimalFile.efficiencyScore).toBeGreaterThan(largeFile.efficiencyScore);
}
});
tap.test('ContextAnalyzer should build dependency graph', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/enhanced-context.ts'),
relativePath: 'ts/context/enhanced-context.ts',
size: 10000,
mtime: Date.now(),
estimatedTokens: 2500
},
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 5000,
mtime: Date.now(),
estimatedTokens: 1250
}
];
const result = await analyzer.analyze(metadata, 'readme');
expect(result.dependencyGraph.size).toBeGreaterThan(0);
// Check that each file has dependency info
for (const meta of metadata) {
const deps = result.dependencyGraph.get(meta.path);
expect(deps).toBeDefined();
expect(deps!.path).toEqual(meta.path);
expect(deps!.imports).toBeDefined();
expect(deps!.importedBy).toBeDefined();
expect(deps!.centrality).toBeGreaterThanOrEqual(0);
}
});
tap.test('ContextAnalyzer should calculate centrality scores', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 5000,
mtime: Date.now(),
estimatedTokens: 1250
},
{
path: path.join(testProjectRoot, 'ts/context/enhanced-context.ts'),
relativePath: 'ts/context/enhanced-context.ts',
size: 10000,
mtime: Date.now(),
estimatedTokens: 2500
}
];
const result = await analyzer.analyze(metadata, 'readme');
// All centrality scores should be between 0 and 1
for (const [, deps] of result.dependencyGraph) {
expect(deps.centrality).toBeGreaterThanOrEqual(0);
expect(deps.centrality).toBeLessThanOrEqual(1);
}
});
tap.test('ContextAnalyzer should assign higher centrality to highly imported files', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
// types.ts is likely imported by many files
const typesPath = path.join(testProjectRoot, 'ts/context/types.ts');
// A test file is likely imported by fewer files
const testPath = path.join(testProjectRoot, 'test/test.basic.node.ts');
const metadata: IFileMetadata[] = [
{
path: typesPath,
relativePath: 'ts/context/types.ts',
size: 5000,
mtime: Date.now(),
estimatedTokens: 1250
},
{
path: testPath,
relativePath: 'test/test.basic.node.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
const typesDeps = result.dependencyGraph.get(typesPath);
const testDeps = result.dependencyGraph.get(testPath);
if (typesDeps && testDeps) {
// types.ts should generally have higher centrality due to being imported more
expect(typesDeps.centrality).toBeGreaterThanOrEqual(0);
expect(testDeps.centrality).toBeGreaterThanOrEqual(0);
}
});
tap.test('ContextAnalyzer should provide reason for scoring', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/index.ts'),
relativePath: 'ts/index.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
expect(result.files[0].reason).toBeDefined();
expect(result.files[0].reason!.length).toBeGreaterThan(0);
});
tap.test('ContextAnalyzer should handle empty metadata array', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const result = await analyzer.analyze([], 'readme');
expect(result.files.length).toEqual(0);
expect(result.totalFiles).toEqual(0);
expect(result.dependencyGraph.size).toEqual(0);
});
tap.test('ContextAnalyzer should respect custom tier configuration', async () => {
const analyzer = new ContextAnalyzer(
testProjectRoot,
{},
{
essential: { minScore: 0.9, trimLevel: 'none' },
important: { minScore: 0.7, trimLevel: 'light' },
optional: { minScore: 0.5, trimLevel: 'aggressive' }
}
);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}
];
const result = await analyzer.analyze(metadata, 'readme');
// Should use custom tier thresholds
const file = result.files[0];
expect(['essential', 'important', 'optional', 'excluded']).toContain(file.tier);
});
tap.test('ContextAnalyzer should calculate combined importance score from all factors', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot, {
dependencyWeight: 0.25,
relevanceWeight: 0.25,
efficiencyWeight: 0.25,
recencyWeight: 0.25
});
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'ts/context/types.ts'),
relativePath: 'ts/context/types.ts',
size: 5000,
mtime: Date.now(),
estimatedTokens: 1250
}
];
const result = await analyzer.analyze(metadata, 'readme');
const file = result.files[0];
// Importance score should be weighted sum of all factors
// With equal weights (0.25 each), importance should be average of all scores
const expectedImportance =
(file.relevanceScore * 0.25) +
(file.centralityScore * 0.25) +
(file.efficiencyScore * 0.25) +
(file.recencyScore * 0.25);
expect(file.importanceScore).toBeCloseTo(expectedImportance, 2);
});
tap.test('ContextAnalyzer should complete analysis within reasonable time', async () => {
const analyzer = new ContextAnalyzer(testProjectRoot);
const metadata: IFileMetadata[] = Array.from({ length: 10 }, (_, i) => ({
path: path.join(testProjectRoot, `ts/file${i}.ts`),
relativePath: `ts/file${i}.ts`,
size: 3000,
mtime: Date.now(),
estimatedTokens: 750
}));
const startTime = Date.now();
const result = await analyzer.analyze(metadata, 'readme');
const endTime = Date.now();
const duration = endTime - startTime;
// Analysis duration should be recorded (can be 0 for fast operations)
expect(result.analysisDuration).toBeGreaterThanOrEqual(0);
expect(duration).toBeLessThan(10000); // Should complete within 10 seconds
});
export default tap.start();

View File

@@ -1,465 +0,0 @@
import { tap, expect } from '@git.zone/tstest/tapbundle';
import * as path from 'path';
import * as fs from 'fs';
import { ContextCache } from '../ts/context/context-cache.js';
import type { ICacheEntry } from '../ts/context/types.js';
const testProjectRoot = process.cwd();
const testCacheDir = path.join(testProjectRoot, '.nogit', 'test-cache');
// Helper to clean up test cache directory
async function cleanupTestCache() {
try {
await fs.promises.rm(testCacheDir, { recursive: true, force: true });
} catch (error) {
// Ignore if directory doesn't exist
}
}
tap.test('ContextCache should create instance with default config', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
expect(cache).toBeInstanceOf(ContextCache);
await cleanupTestCache();
});
tap.test('ContextCache.init should create cache directory', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
// Check that cache directory was created
const exists = await fs.promises.access(testCacheDir).then(() => true).catch(() => false);
expect(exists).toEqual(true);
await cleanupTestCache();
});
tap.test('ContextCache.set should store cache entry', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const testPath = path.join(testProjectRoot, 'package.json');
// Get actual file mtime for validation to work
const stats = await fs.promises.stat(testPath);
const fileMtime = Math.floor(stats.mtimeMs);
const entry: ICacheEntry = {
path: testPath,
contents: 'test content',
tokenCount: 100,
mtime: fileMtime,
cachedAt: Date.now()
};
await cache.set(entry);
const retrieved = await cache.get(testPath);
expect(retrieved).toBeDefined();
expect(retrieved!.contents).toEqual('test content');
expect(retrieved!.tokenCount).toEqual(100);
await cleanupTestCache();
});
tap.test('ContextCache.get should return null for non-existent entry', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const retrieved = await cache.get('/non/existent/path.ts');
expect(retrieved).toBeNull();
await cleanupTestCache();
});
tap.test('ContextCache.get should invalidate expired entries', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true,
ttl: 1 // 1 second TTL
});
await cache.init();
const testPath = path.join(testProjectRoot, 'test-file.ts');
const entry: ICacheEntry = {
path: testPath,
contents: 'test content',
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now() - 2000 // Cached 2 seconds ago (expired)
};
await cache.set(entry);
// Wait a bit to ensure expiration logic runs
await new Promise(resolve => setTimeout(resolve, 100));
const retrieved = await cache.get(testPath);
expect(retrieved).toBeNull(); // Should be expired
await cleanupTestCache();
});
tap.test('ContextCache.get should invalidate entries when file mtime changes', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const testPath = path.join(testProjectRoot, 'package.json');
const stats = await fs.promises.stat(testPath);
const oldMtime = Math.floor(stats.mtimeMs);
const entry: ICacheEntry = {
path: testPath,
contents: 'test content',
tokenCount: 100,
mtime: oldMtime - 1000, // Old mtime (file has changed)
cachedAt: Date.now()
};
await cache.set(entry);
const retrieved = await cache.get(testPath);
expect(retrieved).toBeNull(); // Should be invalidated due to mtime mismatch
await cleanupTestCache();
});
tap.test('ContextCache.has should check if file is cached and valid', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const testPath = path.join(testProjectRoot, 'package.json');
const stats = await fs.promises.stat(testPath);
const entry: ICacheEntry = {
path: testPath,
contents: 'test content',
tokenCount: 100,
mtime: Math.floor(stats.mtimeMs),
cachedAt: Date.now()
};
await cache.set(entry);
const hasIt = await cache.has(testPath);
expect(hasIt).toEqual(true);
const doesNotHaveIt = await cache.has('/non/existent/path.ts');
expect(doesNotHaveIt).toEqual(false);
await cleanupTestCache();
});
tap.test('ContextCache.setMany should store multiple entries', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const entries: ICacheEntry[] = [
{
path: '/test/file1.ts',
contents: 'content 1',
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now()
},
{
path: '/test/file2.ts',
contents: 'content 2',
tokenCount: 200,
mtime: Date.now(),
cachedAt: Date.now()
}
];
await cache.setMany(entries);
const stats = cache.getStats();
expect(stats.entries).toBeGreaterThanOrEqual(2);
await cleanupTestCache();
});
tap.test('ContextCache.getStats should return cache statistics', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const entry: ICacheEntry = {
path: '/test/file.ts',
contents: 'test content with some length',
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now()
};
await cache.set(entry);
const stats = cache.getStats();
expect(stats.entries).toEqual(1);
expect(stats.totalSize).toBeGreaterThan(0);
expect(stats.oldestEntry).toBeDefined();
expect(stats.newestEntry).toBeDefined();
await cleanupTestCache();
});
tap.test('ContextCache.clear should clear all entries', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const entry: ICacheEntry = {
path: '/test/file.ts',
contents: 'test content',
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now()
};
await cache.set(entry);
expect(cache.getStats().entries).toEqual(1);
await cache.clear();
expect(cache.getStats().entries).toEqual(0);
await cleanupTestCache();
});
tap.test('ContextCache.clearPaths should clear specific entries', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const entries: ICacheEntry[] = [
{
path: '/test/file1.ts',
contents: 'content 1',
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now()
},
{
path: '/test/file2.ts',
contents: 'content 2',
tokenCount: 200,
mtime: Date.now(),
cachedAt: Date.now()
}
];
await cache.setMany(entries);
expect(cache.getStats().entries).toEqual(2);
await cache.clearPaths(['/test/file1.ts']);
expect(cache.getStats().entries).toEqual(1);
await cleanupTestCache();
});
tap.test('ContextCache should enforce max size by evicting oldest entries', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true,
maxSize: 0.001 // Very small: 0.001 MB = 1KB
});
await cache.init();
// Add entries that exceed the max size
const largeContent = 'x'.repeat(500); // 500 bytes
const entries: ICacheEntry[] = [
{
path: '/test/file1.ts',
contents: largeContent,
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now() - 3000 // Oldest
},
{
path: '/test/file2.ts',
contents: largeContent,
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now() - 2000
},
{
path: '/test/file3.ts',
contents: largeContent,
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now() - 1000 // Newest
}
];
await cache.setMany(entries);
const stats = cache.getStats();
// Should have evicted oldest entries to stay under size limit
expect(stats.totalSize).toBeLessThanOrEqual(1024); // 1KB
await cleanupTestCache();
});
tap.test('ContextCache should not cache when disabled', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: false
});
await cache.init();
const entry: ICacheEntry = {
path: '/test/file.ts',
contents: 'test content',
tokenCount: 100,
mtime: Date.now(),
cachedAt: Date.now()
};
await cache.set(entry);
const retrieved = await cache.get('/test/file.ts');
expect(retrieved).toBeNull();
await cleanupTestCache();
});
tap.test('ContextCache should persist to disk and reload', async () => {
await cleanupTestCache();
// Create first cache instance and add entry
const cache1 = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache1.init();
// Use a real file that exists so validation passes
const testPath = path.join(testProjectRoot, 'package.json');
const stats = await fs.promises.stat(testPath);
const fileMtime = Math.floor(stats.mtimeMs);
const entry: ICacheEntry = {
path: testPath,
contents: 'persistent content',
tokenCount: 150,
mtime: fileMtime,
cachedAt: Date.now()
};
await cache1.set(entry);
// Wait for persist
await new Promise(resolve => setTimeout(resolve, 500));
// Create second cache instance (should reload from disk)
const cache2 = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache2.init();
const cacheStats = cache2.getStats();
expect(cacheStats.entries).toBeGreaterThan(0);
await cleanupTestCache();
});
tap.test('ContextCache should handle invalid cache index gracefully', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
// Create cache dir manually
await fs.promises.mkdir(testCacheDir, { recursive: true });
// Write invalid JSON to cache index
const cacheIndexPath = path.join(testCacheDir, 'index.json');
await fs.promises.writeFile(cacheIndexPath, 'invalid json {', 'utf-8');
// Should not throw, should just start with empty cache
await cache.init();
const stats = cache.getStats();
expect(stats.entries).toEqual(0);
await cleanupTestCache();
});
tap.test('ContextCache should return proper stats for empty cache', async () => {
await cleanupTestCache();
const cache = new ContextCache(testProjectRoot, {
directory: testCacheDir,
enabled: true
});
await cache.init();
const stats = cache.getStats();
expect(stats.entries).toEqual(0);
expect(stats.totalSize).toEqual(0);
expect(stats.oldestEntry).toBeNull();
expect(stats.newestEntry).toBeNull();
await cleanupTestCache();
});
export default tap.start();

View File

@@ -0,0 +1,304 @@
import { tap, expect } from '@git.zone/tstest/tapbundle';
import { DiffProcessor } from '../ts/classes.diffprocessor.js';
// Sample diff strings for testing
const createSmallDiff = (filepath: string, addedLines = 5, removedLines = 3): string => {
const lines: string[] = [];
lines.push(`--- a/${filepath}`);
lines.push(`+++ b/${filepath}`);
lines.push(`@@ -1,10 +1,12 @@`);
for (let i = 0; i < removedLines; i++) {
lines.push(`-removed line ${i + 1}`);
}
for (let i = 0; i < addedLines; i++) {
lines.push(`+added line ${i + 1}`);
}
lines.push(' unchanged line');
return lines.join('\n');
};
const createMediumDiff = (filepath: string): string => {
const lines: string[] = [];
lines.push(`--- a/${filepath}`);
lines.push(`+++ b/${filepath}`);
lines.push(`@@ -1,100 +1,150 @@`);
// 150 lines of changes
for (let i = 0; i < 75; i++) {
lines.push(`+added line ${i + 1}`);
}
for (let i = 0; i < 75; i++) {
lines.push(`-removed line ${i + 1}`);
}
return lines.join('\n');
};
const createLargeDiff = (filepath: string): string => {
const lines: string[] = [];
lines.push(`--- a/${filepath}`);
lines.push(`+++ b/${filepath}`);
lines.push(`@@ -1,1000 +1,1500 @@`);
// 2500 lines of changes
for (let i = 0; i < 1250; i++) {
lines.push(`+added line ${i + 1}`);
}
for (let i = 0; i < 1250; i++) {
lines.push(`-removed line ${i + 1}`);
}
return lines.join('\n');
};
const createDeletedFileDiff = (filepath: string): string => {
return `--- a/${filepath}
+++ /dev/null
@@ -1,5 +0,0 @@
-deleted line 1
-deleted line 2
-deleted line 3
-deleted line 4
-deleted line 5`;
};
const createAddedFileDiff = (filepath: string): string => {
return `--- /dev/null
+++ b/${filepath}
@@ -0,0 +1,5 @@
+added line 1
+added line 2
+added line 3
+added line 4
+added line 5`;
};
tap.test('DiffProcessor should parse small diff correctly', async () => {
const processor = new DiffProcessor();
const smallDiff = createSmallDiff('src/test.ts', 5, 3);
const result = processor.processDiffs([smallDiff]);
expect(result.totalFiles).toEqual(1);
expect(result.fullDiffs.length).toEqual(1);
expect(result.summarizedDiffs.length).toEqual(0);
expect(result.metadataOnly.length).toEqual(0);
expect(result.totalTokens).toBeGreaterThan(0);
});
tap.test('DiffProcessor should summarize medium diff', async () => {
const processor = new DiffProcessor();
const mediumDiff = createMediumDiff('src/medium-file.ts');
const result = processor.processDiffs([mediumDiff]);
expect(result.totalFiles).toEqual(1);
expect(result.fullDiffs.length).toEqual(0);
expect(result.summarizedDiffs.length).toEqual(1);
expect(result.metadataOnly.length).toEqual(0);
// Verify the summarized diff contains the sample
const formatted = processor.formatForContext(result);
expect(formatted).toInclude('SUMMARIZED DIFFS');
expect(formatted).toInclude('lines omitted');
});
tap.test('DiffProcessor should handle large diff as metadata only', async () => {
const processor = new DiffProcessor();
const largeDiff = createLargeDiff('dist/bundle.js');
const result = processor.processDiffs([largeDiff]);
expect(result.totalFiles).toEqual(1);
expect(result.fullDiffs.length).toEqual(0);
expect(result.summarizedDiffs.length).toEqual(0);
expect(result.metadataOnly.length).toEqual(1);
const formatted = processor.formatForContext(result);
expect(formatted).toInclude('METADATA ONLY');
expect(formatted).toInclude('dist/bundle.js');
});
tap.test('DiffProcessor should prioritize source files over build artifacts', async () => {
const processor = new DiffProcessor();
const diffs = [
createSmallDiff('dist/bundle.js'),
createSmallDiff('src/important.ts'),
createSmallDiff('build/output.js'),
createSmallDiff('src/core.ts'),
];
const result = processor.processDiffs(diffs);
expect(result.totalFiles).toEqual(4);
// Source files should be included fully first
const formatted = processor.formatForContext(result);
const srcImportantIndex = formatted.indexOf('src/important.ts');
const srcCoreIndex = formatted.indexOf('src/core.ts');
const distBundleIndex = formatted.indexOf('dist/bundle.js');
const buildOutputIndex = formatted.indexOf('build/output.js');
// Source files should appear before build artifacts
expect(srcImportantIndex).toBeLessThan(distBundleIndex);
expect(srcCoreIndex).toBeLessThan(buildOutputIndex);
});
tap.test('DiffProcessor should respect token budget', async () => {
const processor = new DiffProcessor({
maxDiffTokens: 500, // Very small budget to force metadata-only
});
// Create multiple large diffs that will exceed budget
const diffs = [
createLargeDiff('src/file1.ts'),
createLargeDiff('src/file2.ts'),
createLargeDiff('src/file3.ts'),
createLargeDiff('src/file4.ts'),
];
const result = processor.processDiffs(diffs);
expect(result.totalTokens).toBeLessThanOrEqual(500);
// With such a small budget and large files, most should be metadata only
expect(result.metadataOnly.length).toBeGreaterThanOrEqual(2);
});
tap.test('DiffProcessor should handle deleted files', async () => {
const processor = new DiffProcessor();
const deletedDiff = createDeletedFileDiff('src/old-file.ts');
const result = processor.processDiffs([deletedDiff]);
expect(result.totalFiles).toEqual(1);
// Small deleted file should be included fully
expect(result.fullDiffs.length).toEqual(1);
const formatted = processor.formatForContext(result);
expect(formatted).toInclude('src/old-file.ts');
// Verify the file appears in the output
expect(formatted).toInclude('FULL DIFFS');
});
tap.test('DiffProcessor should handle added files', async () => {
const processor = new DiffProcessor();
const addedDiff = createAddedFileDiff('src/new-file.ts');
const result = processor.processDiffs([addedDiff]);
expect(result.totalFiles).toEqual(1);
// Small added file should be included fully
expect(result.fullDiffs.length).toEqual(1);
const formatted = processor.formatForContext(result);
expect(formatted).toInclude('src/new-file.ts');
// Verify the file appears in the output
expect(formatted).toInclude('FULL DIFFS');
});
tap.test('DiffProcessor should handle mixed file sizes', async () => {
const processor = new DiffProcessor();
const diffs = [
createSmallDiff('src/small.ts'),
createMediumDiff('src/medium.ts'),
createLargeDiff('dist/large.js'),
];
const result = processor.processDiffs(diffs);
expect(result.totalFiles).toEqual(3);
expect(result.fullDiffs.length).toEqual(1); // small file
expect(result.summarizedDiffs.length).toEqual(1); // medium file
expect(result.metadataOnly.length).toEqual(1); // large file
const formatted = processor.formatForContext(result);
expect(formatted).toInclude('FULL DIFFS (1 files)');
expect(formatted).toInclude('SUMMARIZED DIFFS (1 files)');
expect(formatted).toInclude('METADATA ONLY (1 files)');
});
tap.test('DiffProcessor should handle empty diff array', async () => {
const processor = new DiffProcessor();
const result = processor.processDiffs([]);
expect(result.totalFiles).toEqual(0);
expect(result.fullDiffs.length).toEqual(0);
expect(result.summarizedDiffs.length).toEqual(0);
expect(result.metadataOnly.length).toEqual(0);
expect(result.totalTokens).toEqual(0);
});
tap.test('DiffProcessor should generate comprehensive summary', async () => {
const processor = new DiffProcessor();
const diffs = [
createSmallDiff('src/file1.ts'),
createSmallDiff('src/file2.ts'),
createMediumDiff('src/file3.ts'),
createLargeDiff('dist/bundle.js'),
];
const result = processor.processDiffs(diffs);
const formatted = processor.formatForContext(result);
expect(formatted).toInclude('GIT DIFF SUMMARY');
expect(formatted).toInclude('Files changed: 4 total');
expect(formatted).toInclude('included in full');
expect(formatted).toInclude('summarized');
expect(formatted).toInclude('metadata only');
expect(formatted).toInclude('Estimated tokens:');
expect(formatted).toInclude('END OF GIT DIFF');
});
tap.test('DiffProcessor should handle custom options', async () => {
const processor = new DiffProcessor({
maxDiffTokens: 50000,
smallFileLines: 30,
mediumFileLines: 150,
sampleHeadLines: 10,
sampleTailLines: 10,
});
const mediumDiff = createMediumDiff('src/file.ts'); // 150 lines
const result = processor.processDiffs([mediumDiff]);
// With custom settings, this should be summarized (exactly at the mediumFileLines threshold)
expect(result.summarizedDiffs.length).toEqual(1);
});
tap.test('DiffProcessor should prioritize test files appropriately', async () => {
const processor = new DiffProcessor();
const diffs = [
createSmallDiff('src/core.ts'),
createSmallDiff('test/core.test.ts'),
createSmallDiff('config.json'),
];
const result = processor.processDiffs(diffs);
const formatted = processor.formatForContext(result);
// Source files should come before test files
const srcIndex = formatted.indexOf('src/core.ts');
const testIndex = formatted.indexOf('test/core.test.ts');
expect(srcIndex).toBeLessThan(testIndex);
});
tap.test('DiffProcessor should handle files with no changes gracefully', async () => {
const processor = new DiffProcessor();
const emptyDiff = `--- a/src/file.ts
+++ b/src/file.ts
@@ -1,1 +1,1 @@`;
const result = processor.processDiffs([emptyDiff]);
expect(result.totalFiles).toEqual(1);
expect(result.fullDiffs.length).toEqual(1); // Still included as a small file
});
export default tap.start();

View File

@@ -1,147 +0,0 @@
import { tap, expect } from '@git.zone/tstest/tapbundle';
import * as path from 'path';
import { IterativeContextBuilder } from '../ts/context/iterative-context-builder.js';
import type { IIterativeConfig, TaskType } from '../ts/context/types.js';
import * as qenv from '@push.rocks/qenv';
// Test project directory
const testProjectRoot = path.join(process.cwd());
// Helper to check if OPENAI_TOKEN is available
async function hasOpenAIToken(): Promise<boolean> {
try {
const qenvInstance = new qenv.Qenv();
const token = await qenvInstance.getEnvVarOnDemand('OPENAI_TOKEN');
return !!token;
} catch (error) {
return false;
}
}
tap.test('IterativeContextBuilder should create instance with default config', async () => {
const builder = new IterativeContextBuilder(testProjectRoot);
expect(builder).toBeInstanceOf(IterativeContextBuilder);
});
tap.test('IterativeContextBuilder should create instance with custom config', async () => {
const customConfig: Partial<IIterativeConfig> = {
maxIterations: 3,
firstPassFileLimit: 5,
subsequentPassFileLimit: 3,
temperature: 0.5,
model: 'gpt-4',
};
const builder = new IterativeContextBuilder(testProjectRoot, customConfig);
expect(builder).toBeInstanceOf(IterativeContextBuilder);
});
tap.test('IterativeContextBuilder should initialize successfully', async () => {
if (!(await hasOpenAIToken())) {
console.log('⚠️ Skipping initialization test - OPENAI_TOKEN not available');
return;
}
const builder = new IterativeContextBuilder(testProjectRoot);
await builder.initialize();
// If we get here without error, initialization succeeded
expect(true).toEqual(true);
});
tap.test('IterativeContextBuilder should build context iteratively for readme task', async () => {
if (!(await hasOpenAIToken())) {
console.log('⚠️ Skipping iterative build test - OPENAI_TOKEN not available');
return;
}
const builder = new IterativeContextBuilder(testProjectRoot, {
maxIterations: 2, // Limit iterations for testing
firstPassFileLimit: 3,
subsequentPassFileLimit: 2,
});
await builder.initialize();
const result = await builder.buildContextIteratively('readme');
// Verify result structure
expect(result).toBeTypeOf('object');
expect(result.context).toBeTypeOf('string');
expect(result.context.length).toBeGreaterThan(0);
expect(result.tokenCount).toBeTypeOf('number');
expect(result.tokenCount).toBeGreaterThan(0);
expect(result.includedFiles).toBeInstanceOf(Array);
expect(result.includedFiles.length).toBeGreaterThan(0);
expect(result.iterationCount).toBeTypeOf('number');
expect(result.iterationCount).toBeGreaterThan(0);
expect(result.iterationCount).toBeLessThanOrEqual(2);
expect(result.iterations).toBeInstanceOf(Array);
expect(result.iterations.length).toEqual(result.iterationCount);
expect(result.apiCallCount).toBeTypeOf('number');
expect(result.apiCallCount).toBeGreaterThan(0);
expect(result.totalDuration).toBeTypeOf('number');
expect(result.totalDuration).toBeGreaterThan(0);
// Verify iteration structure
for (const iteration of result.iterations) {
expect(iteration.iteration).toBeTypeOf('number');
expect(iteration.filesLoaded).toBeInstanceOf(Array);
expect(iteration.tokensUsed).toBeTypeOf('number');
expect(iteration.totalTokensUsed).toBeTypeOf('number');
expect(iteration.decision).toBeTypeOf('object');
expect(iteration.duration).toBeTypeOf('number');
}
console.log(`✅ Iterative context build completed:`);
console.log(` Iterations: ${result.iterationCount}`);
console.log(` Files: ${result.includedFiles.length}`);
console.log(` Tokens: ${result.tokenCount}`);
console.log(` API calls: ${result.apiCallCount}`);
console.log(` Duration: ${(result.totalDuration / 1000).toFixed(2)}s`);
});
tap.test('IterativeContextBuilder should respect token budget', async () => {
if (!(await hasOpenAIToken())) {
console.log('⚠️ Skipping token budget test - OPENAI_TOKEN not available');
return;
}
const builder = new IterativeContextBuilder(testProjectRoot, {
maxIterations: 5,
});
await builder.initialize();
const result = await builder.buildContextIteratively('description');
// Token count should not exceed budget significantly (allow 5% margin for safety)
const configManager = (await import('../ts/context/config-manager.js')).ConfigManager.getInstance();
const maxTokens = configManager.getMaxTokens();
expect(result.tokenCount).toBeLessThanOrEqual(maxTokens * 1.05);
console.log(`✅ Token budget respected: ${result.tokenCount}/${maxTokens}`);
});
tap.test('IterativeContextBuilder should work with different task types', async () => {
if (!(await hasOpenAIToken())) {
console.log('⚠️ Skipping task types test - OPENAI_TOKEN not available');
return;
}
const taskTypes: TaskType[] = ['readme', 'description', 'commit'];
for (const taskType of taskTypes) {
const builder = new IterativeContextBuilder(testProjectRoot, {
maxIterations: 2,
firstPassFileLimit: 2,
});
await builder.initialize();
const result = await builder.buildContextIteratively(taskType);
expect(result.includedFiles.length).toBeGreaterThan(0);
console.log(`${taskType}: ${result.includedFiles.length} files, ${result.tokenCount} tokens`);
}
});
export default tap.start();

View File

@@ -1,243 +0,0 @@
import { tap, expect } from '@git.zone/tstest/tapbundle';
import * as path from 'path';
import { LazyFileLoader } from '../ts/context/lazy-file-loader.js';
import type { IFileMetadata } from '../ts/context/types.js';
const testProjectRoot = process.cwd();
tap.test('LazyFileLoader should create instance with project root', async () => {
const loader = new LazyFileLoader(testProjectRoot);
expect(loader).toBeInstanceOf(LazyFileLoader);
});
tap.test('LazyFileLoader.getMetadata should return file metadata without loading contents', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const packageJsonPath = path.join(testProjectRoot, 'package.json');
const metadata = await loader.getMetadata(packageJsonPath);
expect(metadata.path).toEqual(packageJsonPath);
expect(metadata.relativePath).toEqual('package.json');
expect(metadata.size).toBeGreaterThan(0);
expect(metadata.mtime).toBeGreaterThan(0);
expect(metadata.estimatedTokens).toBeGreaterThan(0);
// Rough estimate: size / 4 (with reasonable tolerance)
expect(metadata.estimatedTokens).toBeGreaterThan(metadata.size / 5);
expect(metadata.estimatedTokens).toBeLessThan(metadata.size / 3);
});
tap.test('LazyFileLoader.getMetadata should cache metadata for same file', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const packageJsonPath = path.join(testProjectRoot, 'package.json');
const metadata1 = await loader.getMetadata(packageJsonPath);
const metadata2 = await loader.getMetadata(packageJsonPath);
// Should return identical metadata from cache
expect(metadata1.mtime).toEqual(metadata2.mtime);
expect(metadata1.size).toEqual(metadata2.size);
expect(metadata1.estimatedTokens).toEqual(metadata2.estimatedTokens);
});
tap.test('LazyFileLoader.scanFiles should scan TypeScript files', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const metadata = await loader.scanFiles(['ts/context/types.ts']);
expect(metadata.length).toBeGreaterThan(0);
const typesFile = metadata.find(m => m.relativePath.includes('types.ts'));
expect(typesFile).toBeDefined();
expect(typesFile!.size).toBeGreaterThan(0);
expect(typesFile!.estimatedTokens).toBeGreaterThan(0);
});
tap.test('LazyFileLoader.scanFiles should handle multiple globs', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const metadata = await loader.scanFiles([
'package.json',
'readme.md'
]);
expect(metadata.length).toBeGreaterThanOrEqual(2);
const hasPackageJson = metadata.some(m => m.relativePath === 'package.json');
const hasReadme = metadata.some(m => m.relativePath.toLowerCase() === 'readme.md');
expect(hasPackageJson).toEqual(true);
expect(hasReadme).toEqual(true);
});
tap.test('LazyFileLoader.loadFile should load file with actual token count', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const packageJsonPath = path.join(testProjectRoot, 'package.json');
const tokenizer = (content: string) => Math.ceil(content.length / 4);
const fileInfo = await loader.loadFile(packageJsonPath, tokenizer);
expect(fileInfo.path).toEqual(packageJsonPath);
expect(fileInfo.contents).toBeDefined();
expect(fileInfo.contents.length).toBeGreaterThan(0);
expect(fileInfo.tokenCount).toBeGreaterThan(0);
expect(fileInfo.relativePath).toEqual('package.json');
});
tap.test('LazyFileLoader.loadFiles should load multiple files in parallel', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'package.json'),
relativePath: 'package.json',
size: 100,
mtime: Date.now(),
estimatedTokens: 25
},
{
path: path.join(testProjectRoot, 'readme.md'),
relativePath: 'readme.md',
size: 200,
mtime: Date.now(),
estimatedTokens: 50
}
];
const tokenizer = (content: string) => Math.ceil(content.length / 4);
const startTime = Date.now();
const files = await loader.loadFiles(metadata, tokenizer);
const endTime = Date.now();
expect(files.length).toEqual(2);
expect(files[0].contents).toBeDefined();
expect(files[1].contents).toBeDefined();
// Should be fast (parallel loading)
expect(endTime - startTime).toBeLessThan(5000); // 5 seconds max
});
tap.test('LazyFileLoader.updateImportanceScores should update cached metadata', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const packageJsonPath = path.join(testProjectRoot, 'package.json');
// Get initial metadata
await loader.getMetadata(packageJsonPath);
// Update importance scores
const scores = new Map<string, number>();
scores.set(packageJsonPath, 0.95);
loader.updateImportanceScores(scores);
// Check cached metadata has updated score
const cached = loader.getCachedMetadata();
const packageJsonMeta = cached.find(m => m.path === packageJsonPath);
expect(packageJsonMeta).toBeDefined();
expect(packageJsonMeta!.importanceScore).toEqual(0.95);
});
tap.test('LazyFileLoader.getTotalEstimatedTokens should sum all cached metadata tokens', async () => {
const loader = new LazyFileLoader(testProjectRoot);
// Scan some files
await loader.scanFiles(['package.json', 'readme.md']);
const totalTokens = loader.getTotalEstimatedTokens();
expect(totalTokens).toBeGreaterThan(0);
});
tap.test('LazyFileLoader.clearCache should clear metadata cache', async () => {
const loader = new LazyFileLoader(testProjectRoot);
// Scan files to populate cache
await loader.scanFiles(['package.json']);
expect(loader.getCachedMetadata().length).toBeGreaterThan(0);
// Clear cache
loader.clearCache();
expect(loader.getCachedMetadata().length).toEqual(0);
});
tap.test('LazyFileLoader.getCachedMetadata should return all cached entries', async () => {
const loader = new LazyFileLoader(testProjectRoot);
// Scan files
await loader.scanFiles(['package.json', 'readme.md']);
const cached = loader.getCachedMetadata();
expect(cached.length).toBeGreaterThanOrEqual(2);
expect(cached.every(m => m.path && m.size && m.estimatedTokens)).toEqual(true);
});
tap.test('LazyFileLoader should handle non-existent files gracefully', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const nonExistentPath = path.join(testProjectRoot, 'this-file-does-not-exist.ts');
try {
await loader.getMetadata(nonExistentPath);
expect(false).toEqual(true); // Should not reach here
} catch (error) {
expect(error).toBeDefined();
}
});
tap.test('LazyFileLoader.loadFiles should filter out failed file loads', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const metadata: IFileMetadata[] = [
{
path: path.join(testProjectRoot, 'package.json'),
relativePath: 'package.json',
size: 100,
mtime: Date.now(),
estimatedTokens: 25
},
{
path: path.join(testProjectRoot, 'non-existent-file.txt'),
relativePath: 'non-existent-file.txt',
size: 100,
mtime: Date.now(),
estimatedTokens: 25
}
];
const tokenizer = (content: string) => Math.ceil(content.length / 4);
const files = await loader.loadFiles(metadata, tokenizer);
// Should only include the successfully loaded file
expect(files.length).toEqual(1);
expect(files[0].relativePath).toEqual('package.json');
});
tap.test('LazyFileLoader should handle glob patterns for TypeScript source files', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const metadata = await loader.scanFiles(['ts/context/*.ts']);
expect(metadata.length).toBeGreaterThan(0);
// Should find multiple context files
const hasEnhancedContext = metadata.some(m => m.relativePath.includes('enhanced-context.ts'));
const hasTypes = metadata.some(m => m.relativePath.includes('types.ts'));
expect(hasEnhancedContext).toEqual(true);
expect(hasTypes).toEqual(true);
});
tap.test('LazyFileLoader should estimate tokens reasonably accurately', async () => {
const loader = new LazyFileLoader(testProjectRoot);
const packageJsonPath = path.join(testProjectRoot, 'package.json');
const metadata = await loader.getMetadata(packageJsonPath);
const tokenizer = (content: string) => Math.ceil(content.length / 4);
const fileInfo = await loader.loadFile(packageJsonPath, tokenizer);
// Estimated tokens should be close to actual (within reasonable range)
const difference = Math.abs(metadata.estimatedTokens - fileInfo.tokenCount);
const percentDiff = (difference / fileInfo.tokenCount) * 100;
// Should be within 20% accuracy (since it's just an estimate)
expect(percentDiff).toBeLessThan(20);
});
export default tap.start();

View File

@@ -1,8 +0,0 @@
import { expect, tap } from '@push.rocks/tapbundle';
import * as tsdoc from '../ts/index.js';
tap.test('first test', async () => {
console.log('test');
});
tap.start();

View File

@@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@git.zone/tsdoc',
version: '1.8.0',
version: '1.11.0',
description: 'A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.'
}

View File

@@ -1,6 +1,8 @@
import * as plugins from '../plugins.js';
import { AiDoc } from '../classes.aidoc.js';
import { ProjectContext } from './projectcontext.js';
import { DiffProcessor } from '../classes.diffprocessor.js';
import { logger } from '../logging.js';
export interface INextCommitObject {
recommendedNextVersionLevel: 'fix' | 'feat' | 'BREAKING CHANGE'; // the recommended next version level of the project
@@ -27,41 +29,126 @@ export class Commit {
smartgitInstance,
this.projectDir
);
const diffStringArray = await gitRepo.getUncommittedDiff([
// Define comprehensive exclusion patterns
// smartgit@3.3.0+ supports glob patterns natively
const excludePatterns = [
// Lock files
'pnpm-lock.yaml',
'package-lock.json',
]);
// Use the new TaskContextFactory for optimized context
const taskContextFactory = new (await import('../context/index.js')).TaskContextFactory(
this.projectDir,
this.aiDocsRef.openaiInstance
);
await taskContextFactory.initialize();
// Generate context specifically for commit task
const contextResult = await taskContextFactory.createContextForCommit(
diffStringArray[0] ? diffStringArray.join('\n\n') : 'No changes.'
);
// Get the optimized context string
let contextString = contextResult.context;
// Log token usage statistics
console.log(`Token usage - Context: ${contextResult.tokenCount}, Files: ${contextResult.includedFiles.length + contextResult.trimmedFiles.length}, Savings: ${contextResult.tokenSavings}`);
// Check for token overflow against model limits
const MODEL_TOKEN_LIMIT = 200000; // o4-mini
if (contextResult.tokenCount > MODEL_TOKEN_LIMIT * 0.9) {
console.log(`⚠️ Warning: Context size (${contextResult.tokenCount} tokens) is close to or exceeds model limit (${MODEL_TOKEN_LIMIT} tokens).`);
console.log(`The model may not be able to process all information effectively.`);
'npm-shrinkwrap.json',
'yarn.lock',
'deno.lock',
'bun.lockb',
// Build artifacts (main culprit for large diffs!)
'dist/**',
'dist_*/**', // dist_ts, dist_web, etc.
'build/**',
'.next/**',
'out/**',
'public/dist/**',
// Compiled/bundled files
'**/*.js.map',
'**/*.d.ts.map',
'**/*.min.js',
'**/*.bundle.js',
'**/*.chunk.js',
// IDE/Editor directories
'.claude/**',
'.cursor/**',
'.vscode/**',
'.idea/**',
'**/*.swp',
'**/*.swo',
// Logs and caches
'.nogit/**',
'**/*.log',
'.cache/**',
'.rpt2_cache/**',
'coverage/**',
'.nyc_output/**',
];
// Pass glob patterns directly to smartgit - it handles matching internally
const diffStringArray = await gitRepo.getUncommittedDiff(excludePatterns);
// Process diffs intelligently using DiffProcessor
let processedDiffString: string;
if (diffStringArray.length > 0) {
// Diagnostic logging for raw diff statistics
const totalChars = diffStringArray.join('\n\n').length;
const estimatedTokens = Math.ceil(totalChars / 4);
console.log(`📊 Raw git diff statistics:`);
console.log(` Files changed: ${diffStringArray.length}`);
console.log(` Total characters: ${totalChars.toLocaleString()}`);
console.log(` Estimated tokens: ${estimatedTokens.toLocaleString()}`);
console.log(` Exclusion patterns: ${excludePatterns.length}`);
// Use DiffProcessor to intelligently handle large diffs
const diffProcessor = new DiffProcessor({
maxDiffTokens: 100000, // Reserve 100k tokens for diffs
smallFileLines: 300, // Most source files are under 300 lines
mediumFileLines: 800, // Only very large files get head/tail treatment
sampleHeadLines: 75, // When sampling, show more context
sampleTailLines: 75, // When sampling, show more context
});
const processedDiff = diffProcessor.processDiffs(diffStringArray);
processedDiffString = diffProcessor.formatForContext(processedDiff);
console.log(`📝 Processed diff statistics:`);
console.log(` Full diffs: ${processedDiff.fullDiffs.length} files`);
console.log(` Summarized: ${processedDiff.summarizedDiffs.length} files`);
console.log(` Metadata only: ${processedDiff.metadataOnly.length} files`);
console.log(` Final tokens: ${processedDiff.totalTokens.toLocaleString()}`);
if (estimatedTokens > 50000) {
console.log(`✅ DiffProcessor reduced token usage: ${estimatedTokens.toLocaleString()}${processedDiff.totalTokens.toLocaleString()}`);
}
} else {
processedDiffString = 'No changes.';
}
let result = await this.aiDocsRef.openaiInstance.chat({
systemMessage: `
// Use DualAgentOrchestrator for commit message generation
// Note: No filesystem tool needed - the diff already contains all change information
const commitOrchestrator = new plugins.smartagent.DualAgentOrchestrator({
smartAiInstance: this.aiDocsRef.smartAiInstance,
defaultProvider: 'openai',
logPrefix: '[Commit]',
onProgress: (event) => logger.log(event.logLevel, event.logMessage),
guardianPolicyPrompt: `
You validate commit messages for semantic versioning compliance.
APPROVE if:
- Version level (fix/feat/BREAKING CHANGE) matches the scope of changes in the diff
- Commit message is clear, professional, and follows conventional commit conventions
- No personal information, licensing details, or AI mentions (Claude/Codex) included
- JSON structure is valid with all required fields
- Scope accurately reflects the changed modules/files
REJECT with specific feedback if:
- Version level doesn't match the scope of changes (e.g., "feat" for a typo fix should be "fix")
- Message is vague, unprofessional, or contains sensitive information
- JSON is malformed or missing required fields
`,
});
await commitOrchestrator.start();
const commitTaskPrompt = `
You create a commit message for a git commit.
The commit message should be based on the files in the project.
You should not include any licensing information.
You should not include any personal information.
Project directory: ${this.projectDir}
Analyze the git diff below to understand what changed and generate a commit message.
You should not include any licensing information or personal information.
Never mention CLAUDE code, or codex.
Important: Answer only in valid JSON.
@@ -70,41 +157,70 @@ Your answer should be parseable with JSON.parse() without modifying anything.
Here is the structure of the JSON you should return:
interface {
recommendedNextVersionLevel: 'fix' | 'feat' | 'BREAKING CHANGE'; // the recommended next version level of the project
recommendedNextVersionScope: string; // the recommended scope name of the next version, like "core" or "cli", or specific class names.
recommendedNextVersionMessage: string; // the commit message. Don't put fix() feat() or BREAKING CHANGE in the message. Please just the message itself.
recommendedNextVersionLevel: 'fix' | 'feat' | 'BREAKING CHANGE'; // the recommended next version level
recommendedNextVersionScope: string; // scope name like "core", "cli", or specific class names
recommendedNextVersionMessage: string; // the commit message (don't include fix/feat prefix)
recommendedNextVersionDetails: string[]; // detailed bullet points for the changelog
recommendedNextVersion: string; // the recommended next version of the project, x.x.x
recommendedNextVersion: string; // the recommended next version x.x.x
}
For the recommendedNextVersionDetails, please only add a detail entries to the array if it has an obvious value to the reader.
For recommendedNextVersionDetails, only add entries that have obvious value to the reader.
You are being given the files of the project. You should use them to create the commit message.
Also you are given a diff.
Never mention CLAUDE code, or codex.
`,
messageHistory: [],
userMessage: contextString,
});
Here is the git diff showing what changed:
${processedDiffString}
Generate the commit message based on these changes.
`;
const commitResult = await commitOrchestrator.run(commitTaskPrompt);
await commitOrchestrator.stop();
if (!commitResult.success) {
throw new Error(`Commit message generation failed: ${commitResult.status}`);
}
// console.log(result.message);
const resultObject: INextCommitObject = JSON.parse(
result.message.replace('```json', '').replace('```', '')
commitResult.result.replace('```json', '').replace('```', '')
);
const previousChangelogPath = plugins.path.join(this.projectDir, 'changelog.md');
let previousChangelog: plugins.smartfile.SmartFile;
if (await plugins.smartfile.fs.fileExists(previousChangelogPath)) {
previousChangelog = await plugins.smartfile.SmartFile.fromFilePath(previousChangelogPath);
if (await plugins.fsInstance.file(previousChangelogPath).exists()) {
previousChangelog = await plugins.smartfileFactory.fromFilePath(previousChangelogPath);
}
if (!previousChangelog) {
// lets build the changelog based on that
const commitMessages = await gitRepo.getAllCommitMessages();
console.log(JSON.stringify(commitMessages, null, 2));
let result2 = await this.aiDocsRef.openaiInstance.chat({
messageHistory: [],
systemMessage: `
// Use DualAgentOrchestrator for changelog generation with Guardian validation
const changelogOrchestrator = new plugins.smartagent.DualAgentOrchestrator({
smartAiInstance: this.aiDocsRef.smartAiInstance,
defaultProvider: 'openai',
logPrefix: '[Changelog]',
onProgress: (event) => logger.log(event.logLevel, event.logMessage),
guardianPolicyPrompt: `
You validate changelog generation.
APPROVE if:
- Changelog follows proper markdown format with ## headers for each version
- Entries are chronologically ordered (newest first)
- Version ranges for trivial commits are properly summarized
- No duplicate or empty entries
- Format matches: ## yyyy-mm-dd - x.x.x - scope
REJECT with feedback if:
- Markdown formatting is incorrect
- Entries are not meaningful or helpful
- Dates or versions are malformed
`,
});
await changelogOrchestrator.start();
const changelogTaskPrompt = `
You are building a changelog.md file for the project.
Omit commits and versions that lack relevant changes, but make sure to mention them as a range with a summarizing message instead.
@@ -118,17 +234,23 @@ A changelog entry should look like this:
You are given:
* the commit messages of the project
Only return the changelog file, so it can be written directly to changelog.md`,
userMessage: `
Only return the changelog file content, so it can be written directly to changelog.md.
Here are the commit messages:
${JSON.stringify(commitMessages, null, 2)}
`,
});
`;
previousChangelog = await plugins.smartfile.SmartFile.fromString(
const changelogResult = await changelogOrchestrator.run(changelogTaskPrompt);
await changelogOrchestrator.stop();
if (!changelogResult.success) {
throw new Error(`Changelog generation failed: ${changelogResult.status}`);
}
previousChangelog = plugins.smartfileFactory.fromString(
previousChangelogPath,
result2.message.replaceAll('```markdown', '').replaceAll('```', ''),
changelogResult.result.replaceAll('```markdown', '').replaceAll('```', ''),
'utf8'
);
}

View File

@@ -1,6 +1,7 @@
import type { AiDoc } from '../classes.aidoc.js';
import * as plugins from '../plugins.js';
import { ProjectContext } from './projectcontext.js';
import { logger } from '../logging.js';
interface IDescriptionInterface {
description: string;
@@ -18,60 +19,97 @@ export class Description {
}
public async build() {
// Use the new TaskContextFactory for optimized context
const taskContextFactory = new (await import('../context/index.js')).TaskContextFactory(
this.projectDir,
this.aiDocsRef.openaiInstance
);
await taskContextFactory.initialize();
// Generate context specifically for description task
const contextResult = await taskContextFactory.createContextForDescription();
const contextString = contextResult.context;
// Log token usage statistics
console.log(`Token usage - Context: ${contextResult.tokenCount}, Files: ${contextResult.includedFiles.length + contextResult.trimmedFiles.length}, Savings: ${contextResult.tokenSavings}`);
// Use DualAgentOrchestrator with filesystem tool for agent-driven exploration
const descriptionOrchestrator = new plugins.smartagent.DualAgentOrchestrator({
smartAiInstance: this.aiDocsRef.smartAiInstance,
defaultProvider: 'openai',
maxIterations: 15,
maxResultChars: 10000, // Limit tool output to prevent token explosion
maxHistoryMessages: 15, // Limit history window
logPrefix: '[Description]',
onProgress: (event) => logger.log(event.logLevel, event.logMessage),
guardianPolicyPrompt: `
You validate description generation tool calls and outputs.
let result = await this.aiDocsRef.openaiInstance.chat({
systemMessage: `
You create a json adhering the following interface:
{
description: string; // a sensible short, one sentence description of the project
keywords: string[]; // an array of tags that describe the project
}
APPROVE tool calls for:
- Reading package.json, npmextra.json, or source files in the ts/ directory
- Listing directory contents to understand project structure
- Using tree to see project structure
The description should be based on what you understand from the project's files.
The keywords should be based on use cases you see from the files.
Don't be cheap about the way you think.
REJECT tool calls for:
- Reading files outside the project directory
- Writing, deleting, or modifying any files
- Any destructive operations
Important: Answer only in valid JSON.
You answer should be parseable with JSON.parse() without modifying anything.
For final output, APPROVE if:
- JSON is valid and parseable
- Description is a clear, concise one-sentence summary
- Keywords are relevant to the project's use cases
- Both description and keywords fields are present
Don't wrap the JSON in three ticks json!!!
`,
messageHistory: [],
userMessage: contextString,
REJECT final output if:
- JSON is malformed or wrapped in markdown code blocks
- Description is too long or vague
- Keywords are irrelevant or generic
`,
});
console.log(result.message);
// Register scoped filesystem tool for agent exploration
descriptionOrchestrator.registerScopedFilesystemTool(this.projectDir);
await descriptionOrchestrator.start();
const descriptionTaskPrompt = `
You create a project description and keywords for an npm package.
PROJECT DIRECTORY: ${this.projectDir}
Use the filesystem tool to explore the project and understand what it does:
1. First, use tree to see the project structure
2. Read package.json to understand the package name and current description
3. Read npmextra.json if it exists for additional metadata
4. Read key source files in ts/ directory to understand the implementation
Then generate a description and keywords based on your exploration.
Your FINAL response must be valid JSON adhering to this interface:
{
description: string; // a sensible short, one sentence description of the project
keywords: string[]; // an array of tags that describe the project based on use cases
}
Important: Answer only in valid JSON.
Your answer should be parseable with JSON.parse() without modifying anything.
Don't wrap the JSON in \`\`\`json\`\`\` - just return the raw JSON object.
`;
const descriptionResult = await descriptionOrchestrator.run(descriptionTaskPrompt);
await descriptionOrchestrator.stop();
if (!descriptionResult.success) {
throw new Error(`Description generation failed: ${descriptionResult.status}`);
}
console.log(descriptionResult.result);
const resultObject: IDescriptionInterface = JSON.parse(
result.message.replace('```json', '').replace('```', ''),
descriptionResult.result.replace('```json', '').replace('```', ''),
);
// Create a standard ProjectContext instance for file operations
// Use ProjectContext to get file handles for writing
const projectContext = new ProjectContext(this.projectDir);
const files = await projectContext.gatherFiles();
// Update npmextra.json
const npmextraJson = files.smartfilesNpmextraJSON;
const npmextraJsonContent = JSON.parse(npmextraJson.contents.toString());
npmextraJsonContent.gitzone.module.description = resultObject.description;
npmextraJsonContent.gitzone.module.keywords = resultObject.keywords;
npmextraJsonContent['@git.zone/cli'].module.description = resultObject.description;
npmextraJsonContent['@git.zone/cli'].module.keywords = resultObject.keywords;
npmextraJson.contents = Buffer.from(JSON.stringify(npmextraJsonContent, null, 2));
await npmextraJson.write();
// do the same with packageJson
// Update package.json
const packageJson = files.smartfilePackageJSON;
const packageJsonContent = JSON.parse(packageJson.contents.toString());
packageJsonContent.description = resultObject.description;
@@ -82,6 +120,6 @@ Don't wrap the JSON in three ticks json!!!
console.log(`\n======================\n`);
console.log(JSON.stringify(resultObject, null, 2));
console.log(`\n======================\n`);
return result.message;
return descriptionResult.result;
}
}

View File

@@ -13,31 +13,29 @@ export class ProjectContext {
}
public async gatherFiles() {
const smartfilePackageJSON = await plugins.smartfile.SmartFile.fromFilePath(
const smartfilePackageJSON = await plugins.smartfileFactory.fromFilePath(
plugins.path.join(this.projectDir, 'package.json'),
this.projectDir,
);
const smartfilesReadme = await plugins.smartfile.SmartFile.fromFilePath(
const smartfilesReadme = await plugins.smartfileFactory.fromFilePath(
plugins.path.join(this.projectDir, 'readme.md'),
this.projectDir,
);
const smartfilesReadmeHints = await plugins.smartfile.SmartFile.fromFilePath(
const smartfilesReadmeHints = await plugins.smartfileFactory.fromFilePath(
plugins.path.join(this.projectDir, 'readme.hints.md'),
this.projectDir,
);
const smartfilesNpmextraJSON = await plugins.smartfile.SmartFile.fromFilePath(
const smartfilesNpmextraJSON = await plugins.smartfileFactory.fromFilePath(
plugins.path.join(this.projectDir, 'npmextra.json'),
this.projectDir,
);
const smartfilesMod = await plugins.smartfile.fs.fileTreeToObject(
const smartfilesMod = await plugins.smartfileFactory.virtualDirectoryFromPath(
this.projectDir,
'ts*/**/*.ts',
);
const smartfilesTest = await plugins.smartfile.fs.fileTreeToObject(
).then(vd => vd.filter(f => f.relative.startsWith('ts') && f.relative.endsWith('.ts')).listFiles());
const smartfilesTest = await plugins.smartfileFactory.virtualDirectoryFromPath(
this.projectDir,
'test/**/*.ts',
);
).then(vd => vd.filter(f => f.relative.startsWith('test/') && f.relative.endsWith('.ts')).listFiles());
return {
smartfilePackageJSON,
smartfilesReadme,
@@ -66,21 +64,14 @@ ${smartfile.contents.toString()}
}
/**
* Calculate the token count for a string using the GPT tokenizer
* @param text The text to count tokens for
* @param model The model to use for token counting (default: gpt-3.5-turbo)
* @returns The number of tokens in the text
* Estimate token count for a string
* Uses a rough estimate of 4 characters per token
* @param text The text to estimate tokens for
* @returns Estimated number of tokens
*/
public countTokens(text: string, model: string = 'gpt-3.5-turbo'): number {
try {
// Use the gpt-tokenizer library to count tokens
const tokens = plugins.gptTokenizer.encode(text);
return tokens.length;
} catch (error) {
console.error('Error counting tokens:', error);
// Provide a rough estimate (4 chars per token) if tokenization fails
return Math.ceil(text.length / 4);
}
public countTokens(text: string): number {
// Rough estimate: ~4 characters per token for English text
return Math.ceil(text.length / 4);
}
private async buildContext(dirArg: string) {

View File

@@ -17,75 +17,111 @@ export class Readme {
public async build() {
let finalReadmeString = ``;
// Use the new TaskContextFactory for optimized context
const taskContextFactory = new (await import('../context/index.js')).TaskContextFactory(
this.projectDir,
this.aiDocsRef.openaiInstance
);
await taskContextFactory.initialize();
// Generate context specifically for readme task
const contextResult = await taskContextFactory.createContextForReadme();
const contextString = contextResult.context;
// Log token usage statistics
console.log(`Token usage - Context: ${contextResult.tokenCount}, Files: ${contextResult.includedFiles.length + contextResult.trimmedFiles.length}, Savings: ${contextResult.tokenSavings}`);
// lets first check legal before introducung any cost
// First check legal info before introducing any cost
const projectContext = new ProjectContext(this.projectDir);
const npmExtraJson = JSON.parse(
(await projectContext.gatherFiles()).smartfilesNpmextraJSON.contents.toString()
);
const legalInfo = npmExtraJson?.tsdoc?.legal;
const legalInfo = npmExtraJson?.['@git.zone/tsdoc']?.legal;
if (!legalInfo) {
const error = new Error(`No legal information found in npmextra.json`);
console.log(error);
}
let result = await this.aiDocsRef.openaiInstance.chat({
systemMessage: `
You create markdown readmes for npm projects. You only output the markdown readme.
// Use DualAgentOrchestrator with filesystem tool for agent-driven exploration
const readmeOrchestrator = new plugins.smartagent.DualAgentOrchestrator({
smartAiInstance: this.aiDocsRef.smartAiInstance,
defaultProvider: 'openai',
maxIterations: 25,
maxResultChars: 15000, // Limit tool output to prevent token explosion
maxHistoryMessages: 20, // Limit history window
logPrefix: '[README]',
onProgress: (event) => logger.log(event.logLevel, event.logMessage),
guardianPolicyPrompt: `
You validate README generation tool calls and outputs.
The Readme should follow the following template:
APPROVE tool calls for:
- Reading any files within the project directory (package.json, ts/*.ts, readme.md, etc.)
- Using tree to see project structure
- Using glob to find source files
- Listing directory contents
REJECT tool calls for:
- Reading files outside the project directory
- Writing, deleting, or modifying any files
- Any destructive operations
For final README output, APPROVE if:
- README follows proper markdown format
- Contains Install and Usage sections
- Code examples are correct TypeScript/ESM syntax
- Documentation is comprehensive and helpful
REJECT final output if:
- README is incomplete or poorly formatted
- Contains licensing information (added separately)
- Uses CommonJS syntax instead of ESM
- Contains "in conclusion" or similar filler
`,
});
// Register scoped filesystem tool for agent exploration
readmeOrchestrator.registerScopedFilesystemTool(this.projectDir);
await readmeOrchestrator.start();
const readmeTaskPrompt = `
You create markdown READMEs for npm projects. You only output the markdown readme.
PROJECT DIRECTORY: ${this.projectDir}
Use the filesystem tool to explore the project and understand what it does:
1. First, use tree to see the project structure (maxDepth: 3)
2. Read package.json to understand the package name, description, and dependencies
3. Read the existing readme.md if it exists (use it as a base, improve and expand)
4. Read readme.hints.md if it exists (contains hints for documentation)
5. Read key source files in ts/ directory to understand the API and implementation
6. Focus on exported classes, interfaces, and functions
Then generate a comprehensive README following this template:
# Project Name
[
The name is the module name of package.json
The description is in the description field of package.json
]
[The name from package.json and description]
## Install
[
Write a short text on how to install the project
]
[Short text on how to install the project]
## Usage
[
[
Give code examples here.
Construct sensible scenarios for the user.
Make sure to show a complete set of features of the module.
Don't omit use cases.
It does not matter how much time you need.
ALWAYS USE ESM SYNTAX AND TYPESCRIPT.
DON'T CHICKEN OUT. Write at least 4000 words. More if necessary.
If there is already a readme, take the Usage section as base. Remove outdated content, and expand and improve upon the valid parts.
Super important: Check for completenes.
Don't include any licensing information. This will be added in a later step.
Avoid "in conclusions".
Good to know:
* npmextra.json contains overall module information.
* readme.hints.md provides valuable hints about module ideas.
Write at least 4000 words. More if necessary.
If there is already a readme, take the Usage section as base. Remove outdated content, expand and improve.
Check for completeness.
Don't include any licensing information. This will be added later.
Avoid "in conclusion" statements.
]
`,
messageHistory: [],
userMessage: contextString,
});
`;
finalReadmeString += result.message + '\n' + legalInfo;
const readmeResult = await readmeOrchestrator.run(readmeTaskPrompt);
await readmeOrchestrator.stop();
if (!readmeResult.success) {
throw new Error(`README generation failed: ${readmeResult.status}`);
}
// Clean up markdown formatting if wrapped in code blocks
let resultMessage = readmeResult.result
.replace(/^```markdown\n?/i, '')
.replace(/\n?```$/i, '');
finalReadmeString += resultMessage + '\n' + legalInfo;
console.log(`\n======================\n`);
console.log(result.message);
console.log(resultMessage);
console.log(`\n======================\n`);
const readme = (await projectContext.gatherFiles()).smartfilesReadme;
@@ -96,60 +132,99 @@ The Readme should follow the following template:
const tsPublishInstance = new plugins.tspublish.TsPublish();
const subModules = await tsPublishInstance.getModuleSubDirs(paths.cwd);
logger.log('info', `Found ${Object.keys(subModules).length} sub modules`);
for (const subModule of Object.keys(subModules)) {
logger.log('info', `Building readme for ${subModule}`);
const subModuleContextString = await projectContext.update();
let result = await this.aiDocsRef.openaiInstance.chat({
systemMessage: `
You create markdown readmes for npm projects. You only output the markdown readme.
IMPORTANT: YOU ARE NOW CREATING THE README FOR THE FOLLOWING SUB MODULE: ${subModule} !!!!!!!!!!!
The Sub Module will be published with the following data:
${JSON.stringify(plugins.smartfile.fs.toStringSync(plugins.path.join(paths.cwd, subModule, 'tspublish.json')), null, 2)}
const subModulePath = plugins.path.join(paths.cwd, subModule);
const tspublishData = await plugins.fsInstance
.file(plugins.path.join(subModulePath, 'tspublish.json'))
.encoding('utf8')
.read();
The Readme should follow the following template:
# Project Name
[
The name is the module name of package.json
The description is in the description field of package.json
]
## Install
[
Write a short text on how to install the project
]
## Usage
[
Give code examples here.
Construct sensible scenarios for the user.
Make sure to show a complete set of features of the module.
Don't omit use cases.
It does not matter how much time you need.
ALWAYS USE ESM SYNTAX AND TYPESCRIPT.
DON'T CHICKEN OUT. Write at least 4000 words. More if necessary.
If there is already a readme, take the Usage section as base. Remove outdated content, and expand and improve upon the valid parts.
Super important: Check for completenes.
Don't include any licensing information. This will be added in a later step.
Avoid "in conclusions".
Good to know:
* npmextra.json contains overall module information.
* readme.hints.md provides valuable hints about module ideas.
* Your output lands directly in the readme.md file.
* Don't use \`\`\` at the beginning or the end. It'll cause problems. Only use it for codeblocks. You are directly writing markdown. No need to introduce it weirdly.
]
`,
messageHistory: [],
userMessage: subModuleContextString,
// Create a new orchestrator with filesystem tool for each submodule
const subModuleOrchestrator = new plugins.smartagent.DualAgentOrchestrator({
smartAiInstance: this.aiDocsRef.smartAiInstance,
defaultProvider: 'openai',
maxIterations: 20,
maxResultChars: 12000,
maxHistoryMessages: 15,
logPrefix: `[README:${subModule}]`,
onProgress: (event) => logger.log(event.logLevel, event.logMessage),
guardianPolicyPrompt: `
You validate README generation for submodules.
APPROVE tool calls for:
- Reading any files within the submodule directory
- Using tree to see structure
- Using glob to find source files
REJECT tool calls for:
- Reading files outside the submodule directory
- Writing, deleting, or modifying any files
- Any destructive operations
APPROVE final README if comprehensive, well-formatted markdown with ESM TypeScript examples.
REJECT incomplete READMEs or those with licensing info.
`,
});
const subModuleReadmeString = result.message + '\n' + legalInfo;
await plugins.smartfile.memory.toFs(subModuleReadmeString, plugins.path.join(paths.cwd, subModule, 'readme.md'));
logger.log('success', `Built readme for ${subModule}`);
// Register scoped filesystem tool for the submodule directory
subModuleOrchestrator.registerScopedFilesystemTool(subModulePath);
await subModuleOrchestrator.start();
const subModulePrompt = `
You create markdown READMEs for npm projects. You only output the markdown readme.
SUB MODULE: ${subModule}
SUB MODULE DIRECTORY: ${subModulePath}
IMPORTANT: YOU ARE CREATING THE README FOR THIS SUB MODULE: ${subModule}
The Sub Module will be published with:
${JSON.stringify(tspublishData, null, 2)}
Use the filesystem tool to explore the submodule:
1. Use tree to see the submodule structure
2. Read package.json to understand the submodule
3. Read source files in ts/ directory to understand the implementation
Generate a README following the template:
# Project Name
[name and description from package.json]
## Install
[installation instructions]
## Usage
[
Code examples with complete features.
ESM TypeScript syntax only.
Write at least 4000 words.
No licensing information.
No "in conclusion".
]
Don't use \`\`\` at the beginning or end. Only for code blocks.
`;
const subModuleResult = await subModuleOrchestrator.run(subModulePrompt);
await subModuleOrchestrator.stop();
if (subModuleResult.success) {
const subModuleReadmeString = subModuleResult.result
.replace(/^```markdown\n?/i, '')
.replace(/\n?```$/i, '') + '\n' + legalInfo;
await plugins.fsInstance
.file(plugins.path.join(subModulePath, 'readme.md'))
.encoding('utf8')
.write(subModuleReadmeString);
logger.log('success', `Built readme for ${subModule}`);
} else {
logger.log('error', `Failed to build readme for ${subModule}: ${subModuleResult.status}`);
}
}
return result.message;
return resultMessage;
}
}

View File

@@ -8,7 +8,7 @@ export class AiDoc {
public npmextraKV: plugins.npmextra.KeyValueStore;
public qenvInstance: plugins.qenv.Qenv;
public aidocInteract: plugins.smartinteract.SmartInteract;
public openaiInstance: plugins.smartai.OpenAiProvider;
public smartAiInstance: plugins.smartai.SmartAi;
argvArg: any;
@@ -36,9 +36,25 @@ export class AiDoc {
this.aidocInteract = new plugins.smartinteract.SmartInteract();
this.qenvInstance = new plugins.qenv.Qenv();
if (!(await this.qenvInstance.getEnvVarOnDemand('OPENAI_TOKEN'))) {
// Migrate old KV store path to new path if needed
const homeDir = plugins.smartpath.get.home();
const oldKvPath = plugins.path.join(homeDir, '.npmextra/kv/tsdoc.json');
const newKvDir = plugins.path.join(homeDir, '.npmextra/kv/@git.zone');
const newKvPath = plugins.path.join(newKvDir, 'tsdoc.json');
if (
await plugins.fsInstance.file(oldKvPath).exists() &&
!(await plugins.fsInstance.file(newKvPath).exists())
) {
console.log('Migrating tsdoc KeyValueStore to @git.zone/tsdoc...');
await plugins.fsInstance.directory(newKvDir).recursive().create();
await plugins.fsInstance.file(oldKvPath).copy(newKvPath);
await plugins.fsInstance.file(oldKvPath).delete();
console.log('Migration complete: tsdoc.json -> @git.zone/tsdoc.json');
}
this.npmextraKV = new plugins.npmextra.KeyValueStore({
typeArg: 'userHomeDir',
identityArg: 'tsdoc',
identityArg: '@git.zone/tsdoc',
mandatoryKeys: ['OPENAI_TOKEN'],
});
@@ -64,19 +80,35 @@ export class AiDoc {
await this.npmextraKV.writeKey('OPENAI_TOKEN', this.openaiToken);
}
}
if (!this.openaiToken) {
if (!this.openaiToken && this.npmextraKV) {
this.openaiToken = await this.npmextraKV.readKey('OPENAI_TOKEN');
}
// lets assume we have an OPENAI_Token now
this.openaiInstance = new plugins.smartai.OpenAiProvider({
this.smartAiInstance = new plugins.smartai.SmartAi({
openaiToken: this.openaiToken,
});
await this.openaiInstance.start();
await this.smartAiInstance.start();
}
public async stop() {
await this.openaiInstance.stop();
if (this.smartAiInstance) {
await this.smartAiInstance.stop();
}
// No explicit cleanup needed for npmextraKV or aidocInteract
// They don't keep event loop alive
}
/**
* Get the OpenAI provider for direct chat calls
* This is a convenience getter to access the provider from SmartAi
*/
public get openaiProvider(): plugins.smartai.OpenAiProvider {
return this.smartAiInstance.openaiProvider;
}
public getOpenaiToken(): string {
return this.openaiToken;
}
public async buildReadme(projectDirArg: string) {
@@ -122,13 +154,12 @@ export class AiDoc {
}
/**
* Count tokens in a text string using GPT tokenizer
* @param text The text to count tokens for
* @param model The model to use for tokenization (default: gpt-3.5-turbo)
* @returns The number of tokens in the text
* Estimate token count in a text string
* @param text The text to estimate tokens for
* @returns Estimated number of tokens
*/
public countTokens(text: string, model: string = 'gpt-3.5-turbo'): number {
public countTokens(text: string): number {
const projectContextInstance = new aiDocsClasses.ProjectContext('');
return projectContextInstance.countTokens(text, model);
return projectContextInstance.countTokens(text);
}
}

353
ts/classes.diffprocessor.ts Normal file
View File

@@ -0,0 +1,353 @@
/**
* Intelligent git diff processor that handles large diffs by sampling and prioritization
* instead of blind truncation.
*/
export interface IDiffFileInfo {
filepath: string;
status: 'added' | 'modified' | 'deleted';
linesAdded: number;
linesRemoved: number;
totalLines: number;
estimatedTokens: number;
diffContent: string;
}
export interface IProcessedDiff {
summary: string; // Human-readable overview
fullDiffs: string[]; // Small files included fully
summarizedDiffs: string[]; // Medium files with head/tail
metadataOnly: string[]; // Large files, just stats
totalFiles: number;
totalTokens: number;
}
export interface IDiffProcessorOptions {
maxDiffTokens?: number; // Maximum tokens for entire diff section (default: 100000)
smallFileLines?: number; // Files <= this are included fully (default: 50)
mediumFileLines?: number; // Files <= this are summarized (default: 200)
sampleHeadLines?: number; // Lines to show at start of medium files (default: 20)
sampleTailLines?: number; // Lines to show at end of medium files (default: 20)
}
export class DiffProcessor {
private options: Required<IDiffProcessorOptions>;
constructor(options: IDiffProcessorOptions = {}) {
this.options = {
maxDiffTokens: options.maxDiffTokens ?? 100000,
smallFileLines: options.smallFileLines ?? 50,
mediumFileLines: options.mediumFileLines ?? 200,
sampleHeadLines: options.sampleHeadLines ?? 20,
sampleTailLines: options.sampleTailLines ?? 20,
};
}
/**
* Process an array of git diffs into a structured, token-efficient format
*/
public processDiffs(diffStringArray: string[]): IProcessedDiff {
// Parse all diffs into file info objects
const fileInfos: IDiffFileInfo[] = diffStringArray
.map(diffString => this.parseDiffFile(diffString))
.filter(info => info !== null) as IDiffFileInfo[];
// Prioritize files (source files first, build artifacts last)
const prioritized = this.prioritizeFiles(fileInfos);
const result: IProcessedDiff = {
summary: '',
fullDiffs: [],
summarizedDiffs: [],
metadataOnly: [],
totalFiles: prioritized.length,
totalTokens: 0,
};
let tokensUsed = 0;
const tokenBudget = this.options.maxDiffTokens;
// Categorize and include files based on size and token budget
for (const fileInfo of prioritized) {
const remainingBudget = tokenBudget - tokensUsed;
if (remainingBudget <= 0) {
// Budget exhausted - rest are metadata only
result.metadataOnly.push(this.formatMetadataOnly(fileInfo));
continue;
}
if (fileInfo.totalLines <= this.options.smallFileLines) {
// Small file - include fully if budget allows
if (fileInfo.estimatedTokens <= remainingBudget) {
const statusPrefix = this.getFileStatusPrefix(fileInfo);
result.fullDiffs.push(`${statusPrefix}${fileInfo.diffContent}`);
tokensUsed += fileInfo.estimatedTokens;
} else {
result.metadataOnly.push(this.formatMetadataOnly(fileInfo));
}
} else if (fileInfo.totalLines <= this.options.mediumFileLines) {
// Medium file - try to include summary with head/tail
const summary = this.extractDiffSample(
fileInfo,
this.options.sampleHeadLines,
this.options.sampleTailLines
);
const summaryTokens = Math.ceil(summary.length / 4); // Rough estimate
if (summaryTokens <= remainingBudget) {
result.summarizedDiffs.push(summary);
tokensUsed += summaryTokens;
} else {
result.metadataOnly.push(this.formatMetadataOnly(fileInfo));
}
} else {
// Large file - metadata only
result.metadataOnly.push(this.formatMetadataOnly(fileInfo));
}
}
result.totalTokens = tokensUsed;
result.summary = this.generateSummary(result);
return result;
}
/**
* Format the processed diff for inclusion in context
*/
public formatForContext(processed: IProcessedDiff): string {
const sections: string[] = [];
// Summary section
sections.push('====== GIT DIFF SUMMARY ======');
sections.push(processed.summary);
sections.push('');
// Full diffs section
if (processed.fullDiffs.length > 0) {
sections.push(`====== FULL DIFFS (${processed.fullDiffs.length} files) ======`);
sections.push(processed.fullDiffs.join('\n\n'));
sections.push('');
}
// Summarized diffs section
if (processed.summarizedDiffs.length > 0) {
sections.push(`====== SUMMARIZED DIFFS (${processed.summarizedDiffs.length} files) ======`);
sections.push(processed.summarizedDiffs.join('\n\n'));
sections.push('');
}
// Metadata only section
if (processed.metadataOnly.length > 0) {
sections.push(`====== METADATA ONLY (${processed.metadataOnly.length} files) ======`);
sections.push(processed.metadataOnly.join('\n'));
sections.push('');
}
sections.push('====== END OF GIT DIFF ======');
return sections.join('\n');
}
/**
* Parse a single git diff string into file information
*/
private parseDiffFile(diffString: string): IDiffFileInfo | null {
if (!diffString || diffString.trim().length === 0) {
return null;
}
const lines = diffString.split('\n');
let filepath = '';
let status: 'added' | 'modified' | 'deleted' = 'modified';
let linesAdded = 0;
let linesRemoved = 0;
// Parse diff header to extract filepath and status
for (const line of lines) {
if (line.startsWith('--- a/')) {
filepath = line.substring(6);
} else if (line.startsWith('+++ b/')) {
const newPath = line.substring(6);
if (newPath === '/dev/null') {
status = 'deleted';
} else if (filepath === '/dev/null') {
status = 'added';
filepath = newPath;
} else {
filepath = newPath;
}
} else if (line.startsWith('+') && !line.startsWith('+++')) {
linesAdded++;
} else if (line.startsWith('-') && !line.startsWith('---')) {
linesRemoved++;
}
}
const totalLines = linesAdded + linesRemoved;
const estimatedTokens = Math.ceil(diffString.length / 4);
return {
filepath,
status,
linesAdded,
linesRemoved,
totalLines,
estimatedTokens,
diffContent: diffString,
};
}
/**
* Prioritize files by importance (source files before build artifacts)
*/
private prioritizeFiles(files: IDiffFileInfo[]): IDiffFileInfo[] {
return files.sort((a, b) => {
const scoreA = this.getFileImportanceScore(a.filepath);
const scoreB = this.getFileImportanceScore(b.filepath);
return scoreB - scoreA; // Higher score first
});
}
/**
* Calculate importance score for a file path
*/
private getFileImportanceScore(filepath: string): number {
// Source files - highest priority
if (filepath.match(/^(src|lib|app|components|pages|api)\//)) {
return 100;
}
// Test files - high priority
if (filepath.match(/\.(test|spec)\.(ts|js|tsx|jsx)$/) || filepath.startsWith('test/')) {
return 80;
}
// Configuration files - medium-high priority
if (filepath.match(/\.(json|yaml|yml|toml|config\.(ts|js))$/)) {
return 60;
}
// Documentation - medium priority
if (filepath.match(/\.(md|txt|rst)$/)) {
return 40;
}
// Build artifacts - low priority
if (filepath.match(/^(dist|build|out|\.next|public\/dist)\//)) {
return 10;
}
// Start with default priority
let score = 50;
// Boost interface/type files - they're usually small but critical
if (filepath.includes('interfaces/') || filepath.includes('.types.')) {
score += 20;
}
// Boost entry points
if (filepath.endsWith('index.ts') || filepath.endsWith('mod.ts')) {
score += 15;
}
return score;
}
/**
* Extract head and tail lines from a diff, omitting the middle
*/
private extractDiffSample(fileInfo: IDiffFileInfo, headLines: number, tailLines: number): string {
const lines = fileInfo.diffContent.split('\n');
const totalLines = lines.length;
if (totalLines <= headLines + tailLines) {
// File is small enough to include fully
return fileInfo.diffContent;
}
// Extract file metadata from diff header
const headerLines: string[] = [];
let bodyStartIndex = 0;
for (let i = 0; i < lines.length; i++) {
if (lines[i].startsWith('@@')) {
headerLines.push(...lines.slice(0, i + 1));
bodyStartIndex = i + 1;
break;
}
}
const bodyLines = lines.slice(bodyStartIndex);
const head = bodyLines.slice(0, headLines);
const tail = bodyLines.slice(-tailLines);
const omittedLines = bodyLines.length - headLines - tailLines;
const statusEmoji = fileInfo.status === 'added' ? '' :
fileInfo.status === 'deleted' ? '' : '📝';
const parts: string[] = [];
parts.push(`${statusEmoji} FILE: ${fileInfo.filepath}`);
parts.push(`CHANGES: +${fileInfo.linesAdded} lines, -${fileInfo.linesRemoved} lines (${fileInfo.totalLines} total)`);
parts.push('');
parts.push(...headerLines);
parts.push(...head);
parts.push('');
parts.push(`[... ${omittedLines} lines omitted - use Read tool to see full file ...]`);
parts.push('');
parts.push(...tail);
return parts.join('\n');
}
/**
* Get file status prefix with emoji
*/
private getFileStatusPrefix(fileInfo: IDiffFileInfo): string {
const statusEmoji = fileInfo.status === 'added' ? '' :
fileInfo.status === 'deleted' ? '' : '📝';
return `${statusEmoji} `;
}
/**
* Extract filepath from diff content
*/
private extractFilepathFromDiff(diffContent: string): string {
const lines = diffContent.split('\n');
for (const line of lines) {
if (line.startsWith('+++ b/')) {
return line.substring(6);
}
}
return 'unknown';
}
/**
* Format file info as metadata only
*/
private formatMetadataOnly(fileInfo: IDiffFileInfo): string {
const statusEmoji = fileInfo.status === 'added' ? '' :
fileInfo.status === 'deleted' ? '' : '📝';
return `${statusEmoji} ${fileInfo.filepath} (+${fileInfo.linesAdded}, -${fileInfo.linesRemoved})`;
}
/**
* Generate human-readable summary of processed diff
*/
private generateSummary(result: IProcessedDiff): string {
const parts: string[] = [];
parts.push(`Files changed: ${result.totalFiles} total`);
parts.push(`- ${result.fullDiffs.length} included in full`);
parts.push(`- ${result.summarizedDiffs.length} summarized (head/tail shown)`);
parts.push(`- ${result.metadataOnly.length} metadata only`);
parts.push(`Estimated tokens: ~${result.totalTokens.toLocaleString()}`);
if (result.metadataOnly.length > 0) {
parts.push('');
parts.push('NOTE: Some files excluded to stay within token budget.');
parts.push('Use Read tool with specific file paths to see full content.');
}
return parts.join('\n');
}
}

View File

@@ -33,19 +33,19 @@ export class TypeDoc {
include: [],
};
let startDirectory = '';
if (plugins.smartfile.fs.isDirectory(plugins.path.join(paths.cwd, './ts'))) {
if (await plugins.fsInstance.directory(plugins.path.join(paths.cwd, './ts')).exists()) {
data.include.push(plugins.path.join(paths.cwd, './ts/**/*'));
startDirectory = 'ts';
}
if (plugins.smartfile.fs.isDirectory(plugins.path.join(paths.cwd, './ts_web'))) {
if (await plugins.fsInstance.directory(plugins.path.join(paths.cwd, './ts_web')).exists()) {
data.include.push(plugins.path.join(paths.cwd, './ts_web/**/*'));
if (!startDirectory) {
startDirectory = 'ts_web';
}
}
await plugins.smartfile.memory.toFs(JSON.stringify(data), paths.tsconfigFile);
await plugins.fsInstance.file(paths.tsconfigFile).encoding('utf8').write(JSON.stringify(data));
let targetDir = paths.publicDir;
if (options?.publicSubdir) {
targetDir = plugins.path.join(targetDir, options.publicSubdir);
@@ -53,6 +53,6 @@ export class TypeDoc {
await this.smartshellInstance.exec(
`typedoc --tsconfig ${paths.tsconfigFile} --out ${targetDir} ${startDirectory}/index.ts`,
);
plugins.smartfile.fs.remove(paths.tsconfigFile);
await plugins.fsInstance.file(paths.tsconfigFile).delete();
}
}

122
ts/cli.ts
View File

@@ -4,7 +4,6 @@ import { logger } from './logging.js';
import { TypeDoc } from './classes.typedoc.js';
import { AiDoc } from './classes.aidoc.js';
import * as context from './context/index.js';
export const run = async () => {
const tsdocCli = new plugins.smartcli.Smartcli();
@@ -32,17 +31,6 @@ export const run = async () => {
const aidocInstance = new AiDoc();
await aidocInstance.start();
// Get context token count if requested
if (argvArg.tokens || argvArg.showTokens) {
logger.log('info', `Calculating context token count...`);
const tokenCount = await aidocInstance.getProjectContextTokenCount(paths.cwd);
logger.log('ok', `Total context token count: ${tokenCount}`);
if (argvArg.tokensOnly) {
return; // Exit early if we only want token count
}
}
logger.log('info', `Generating new readme...`);
logger.log('info', `This may take some time...`);
await aidocInstance.buildReadme(paths.cwd);
@@ -51,108 +39,40 @@ export const run = async () => {
await aidocInstance.buildDescription(paths.cwd);
});
tsdocCli.addCommand('tokens').subscribe(async (argvArg) => {
tsdocCli.addCommand('readme').subscribe(async (argvArg) => {
const aidocInstance = new AiDoc();
await aidocInstance.start();
logger.log('info', `Calculating context token count...`);
logger.log('info', `Generating new readme...`);
logger.log('info', `This may take some time...`);
await aidocInstance.buildReadme(paths.cwd);
});
// Get task type if specified
let taskType: context.TaskType | undefined = undefined;
if (argvArg.task) {
if (['readme', 'commit', 'description'].includes(argvArg.task)) {
taskType = argvArg.task as context.TaskType;
} else {
logger.log('warn', `Unknown task type: ${argvArg.task}. Using default (readme).`);
taskType = 'readme';
}
} else {
// Default to readme if no task specified
taskType = 'readme';
}
tsdocCli.addCommand('description').subscribe(async (argvArg) => {
const aidocInstance = new AiDoc();
await aidocInstance.start();
// Use iterative context building
const taskFactory = new context.TaskContextFactory(paths.cwd);
await taskFactory.initialize();
logger.log('info', `Generating new description and keywords...`);
logger.log('info', `This may take some time...`);
await aidocInstance.buildDescription(paths.cwd);
});
let contextResult: context.IIterativeContextResult;
tsdocCli.addCommand('commit').subscribe(async (argvArg) => {
const aidocInstance = new AiDoc();
await aidocInstance.start();
if (argvArg.all) {
// Show stats for all task types
const stats = await taskFactory.getTokenStats();
logger.log('info', `Generating commit message...`);
logger.log('info', `This may take some time...`);
const commitObject = await aidocInstance.buildNextCommitObject(paths.cwd);
logger.log('ok', 'Token statistics by task:');
for (const [task, data] of Object.entries(stats)) {
logger.log('info', `\n${task.toUpperCase()}:`);
logger.log('info', ` Tokens: ${data.tokenCount}`);
logger.log('info', ` Token savings: ${data.savings}`);
logger.log('info', ` Files: ${data.includedFiles} included, ${data.trimmedFiles} trimmed, ${data.excludedFiles} excluded`);
// Calculate percentage of model context
const o4MiniPercentage = (data.tokenCount / 200000 * 100).toFixed(2);
logger.log('info', ` Context usage: ${o4MiniPercentage}% of o4-mini (200K tokens)`);
}
return;
}
// Get context for specific task
contextResult = await taskFactory.createContextForTask(taskType);
// Display results
logger.log('ok', `Total context token count: ${contextResult.tokenCount}`);
logger.log('info', `Files included: ${contextResult.includedFiles.length}`);
logger.log('info', `Files trimmed: ${contextResult.trimmedFiles.length}`);
logger.log('info', `Files excluded: ${contextResult.excludedFiles.length}`);
logger.log('info', `Token savings: ${contextResult.tokenSavings}`);
if (argvArg.detailed) {
// Show more detailed info about the context and token usage
const o4MiniPercentage = (contextResult.tokenCount / 200000 * 100).toFixed(2);
logger.log('info', `Token usage: ${o4MiniPercentage}% of o4-mini 200K token context window`);
if (argvArg.model) {
// Show percentages for different models
if (argvArg.model === 'gpt4') {
const gpt4Percentage = (contextResult.tokenCount / 8192 * 100).toFixed(2);
logger.log('info', `Token usage (GPT-4): ${gpt4Percentage}% of 8192 token context window`);
} else if (argvArg.model === 'gpt35') {
const gpt35Percentage = (contextResult.tokenCount / 4096 * 100).toFixed(2);
logger.log('info', `Token usage (GPT-3.5): ${gpt35Percentage}% of 4096 token context window`);
}
}
// Estimate cost (approximate values)
const o4MiniInputCost = 0.00005; // per 1K tokens for o4-mini
const estimatedCost = (contextResult.tokenCount / 1000 * o4MiniInputCost).toFixed(6);
logger.log('info', `Estimated input cost: $${estimatedCost} (o4-mini)`);
if (argvArg.listFiles) {
// List files included in context
logger.log('info', '\nIncluded files:');
contextResult.includedFiles.forEach(file => {
logger.log('info', ` ${file.relativePath} (${file.tokenCount} tokens)`);
});
logger.log('info', '\nTrimmed files:');
contextResult.trimmedFiles.forEach(file => {
logger.log('info', ` ${file.relativePath} (${file.tokenCount} tokens)`);
});
if (contextResult.excludedFiles.length > 0) {
logger.log('info', '\nExcluded files:');
contextResult.excludedFiles.forEach(file => {
logger.log('info', ` ${file.relativePath} (${file.tokenCount} tokens)`);
});
}
}
}
logger.log('ok', `Commit message generated:`);
console.log(JSON.stringify(commitObject, null, 2));
});
tsdocCli.addCommand('test').subscribe((argvArg) => {
tsdocCli.triggerCommand('typedoc', argvArg);
process.on('exit', async () => {
await plugins.smartfile.fs.remove(paths.publicDir);
await plugins.fsInstance.directory(paths.publicDir).recursive().delete();
});
});

View File

@@ -1,369 +0,0 @@
import * as plugins from '../plugins.js';
import * as fs from 'fs';
import type {
IContextConfig,
ITrimConfig,
ITaskConfig,
TaskType,
ContextMode,
ICacheConfig,
IAnalyzerConfig,
IPrioritizationWeights,
ITierConfig,
IIterativeConfig
} from './types.js';
/**
* Manages configuration for context building
*/
export class ConfigManager {
private static instance: ConfigManager;
private config: IContextConfig;
private projectDir: string = '';
private configCache: { mtime: number; config: IContextConfig } | null = null;
/**
* Get the singleton instance of ConfigManager
*/
public static getInstance(): ConfigManager {
if (!ConfigManager.instance) {
ConfigManager.instance = new ConfigManager();
}
return ConfigManager.instance;
}
/**
* Private constructor for singleton pattern
*/
private constructor() {
this.config = this.getDefaultConfig();
}
/**
* Initialize the config manager with a project directory
* @param projectDir The project directory
*/
public async initialize(projectDir: string): Promise<void> {
this.projectDir = projectDir;
await this.loadConfig();
}
/**
* Get the default configuration
*/
private getDefaultConfig(): IContextConfig {
return {
maxTokens: 190000, // Default for o4-mini with some buffer
defaultMode: 'trimmed',
taskSpecificSettings: {
readme: {
mode: 'trimmed',
includePaths: ['ts/', 'src/'],
excludePaths: ['test/', 'node_modules/']
},
commit: {
mode: 'trimmed',
focusOnChangedFiles: true
},
description: {
mode: 'trimmed',
includePackageInfo: true
}
},
trimming: {
removeImplementations: true,
preserveInterfaces: true,
preserveTypeDefs: true,
preserveJSDoc: true,
maxFunctionLines: 5,
removeComments: true,
removeBlankLines: true
},
cache: {
enabled: true,
ttl: 3600, // 1 hour
maxSize: 100, // 100MB
directory: undefined // Will be set to .nogit/context-cache by ContextCache
},
analyzer: {
useAIRefinement: false, // Disabled by default for now
aiModel: 'haiku'
},
prioritization: {
dependencyWeight: 0.3,
relevanceWeight: 0.4,
efficiencyWeight: 0.2,
recencyWeight: 0.1
},
tiers: {
essential: { minScore: 0.8, trimLevel: 'none' },
important: { minScore: 0.5, trimLevel: 'light' },
optional: { minScore: 0.2, trimLevel: 'aggressive' }
},
iterative: {
maxIterations: 5,
firstPassFileLimit: 10,
subsequentPassFileLimit: 5,
temperature: 0.3,
model: 'gpt-4-turbo-preview'
}
};
}
/**
* Load configuration from npmextra.json
*/
private async loadConfig(): Promise<void> {
try {
if (!this.projectDir) {
return;
}
const npmextraJsonPath = plugins.path.join(this.projectDir, 'npmextra.json');
// Check if file exists
const fileExists = await plugins.smartfile.fs.fileExists(npmextraJsonPath);
if (!fileExists) {
return;
}
// Check cache
const stats = await fs.promises.stat(npmextraJsonPath);
const currentMtime = Math.floor(stats.mtimeMs);
if (this.configCache && this.configCache.mtime === currentMtime) {
// Use cached config
this.config = this.configCache.config;
return;
}
// Read the npmextra.json file
const npmextraJsonFile = await plugins.smartfile.SmartFile.fromFilePath(npmextraJsonPath);
const npmextraContent = JSON.parse(npmextraJsonFile.contents.toString());
// Check for tsdoc context configuration
if (npmextraContent?.tsdoc?.context) {
// Merge with default config
this.config = this.mergeConfigs(this.config, npmextraContent.tsdoc.context);
}
// Cache the config
this.configCache = {
mtime: currentMtime,
config: { ...this.config }
};
} catch (error) {
console.error('Error loading context configuration:', error);
}
}
/**
* Merge configurations, with userConfig taking precedence
* @param defaultConfig The default configuration
* @param userConfig The user configuration
*/
private mergeConfigs(defaultConfig: IContextConfig, userConfig: Partial<IContextConfig>): IContextConfig {
const result: IContextConfig = { ...defaultConfig };
// Merge top-level properties
if (userConfig.maxTokens !== undefined) result.maxTokens = userConfig.maxTokens;
if (userConfig.defaultMode !== undefined) result.defaultMode = userConfig.defaultMode;
// Merge task-specific settings
if (userConfig.taskSpecificSettings) {
result.taskSpecificSettings = result.taskSpecificSettings || {};
// For each task type, merge settings
(['readme', 'commit', 'description'] as TaskType[]).forEach(taskType => {
if (userConfig.taskSpecificSettings?.[taskType]) {
result.taskSpecificSettings![taskType] = {
...result.taskSpecificSettings![taskType],
...userConfig.taskSpecificSettings[taskType]
};
}
});
}
// Merge trimming configuration
if (userConfig.trimming) {
result.trimming = {
...result.trimming,
...userConfig.trimming
};
}
// Merge cache configuration
if (userConfig.cache) {
result.cache = {
...result.cache,
...userConfig.cache
};
}
// Merge analyzer configuration
if (userConfig.analyzer) {
result.analyzer = {
...result.analyzer,
...userConfig.analyzer
};
}
// Merge prioritization weights
if (userConfig.prioritization) {
result.prioritization = {
...result.prioritization,
...userConfig.prioritization
};
}
// Merge tier configuration
if (userConfig.tiers) {
result.tiers = {
...result.tiers,
...userConfig.tiers
};
}
// Merge iterative configuration
if (userConfig.iterative) {
result.iterative = {
...result.iterative,
...userConfig.iterative
};
}
return result;
}
/**
* Get the complete configuration
*/
public getConfig(): IContextConfig {
return this.config;
}
/**
* Get the trimming configuration
*/
public getTrimConfig(): ITrimConfig {
return this.config.trimming || {};
}
/**
* Get configuration for a specific task
* @param taskType The type of task
*/
public getTaskConfig(taskType: TaskType): ITaskConfig {
// Get task-specific config or empty object
const taskConfig = this.config.taskSpecificSettings?.[taskType] || {};
// If mode is not specified, use default mode
if (!taskConfig.mode) {
taskConfig.mode = this.config.defaultMode;
}
return taskConfig;
}
/**
* Get the maximum tokens allowed for context
*/
public getMaxTokens(): number {
return this.config.maxTokens || 190000;
}
/**
* Update the configuration
* @param config The new configuration
*/
public async updateConfig(config: Partial<IContextConfig>): Promise<void> {
// Merge with existing config
this.config = this.mergeConfigs(this.config, config);
// Invalidate cache
this.configCache = null;
try {
if (!this.projectDir) {
return;
}
// Read the existing npmextra.json file
const npmextraJsonPath = plugins.path.join(this.projectDir, 'npmextra.json');
let npmextraContent = {};
if (await plugins.smartfile.fs.fileExists(npmextraJsonPath)) {
const npmextraJsonFile = await plugins.smartfile.SmartFile.fromFilePath(npmextraJsonPath);
npmextraContent = JSON.parse(npmextraJsonFile.contents.toString()) || {};
}
// Update the tsdoc context configuration
const typedContent = npmextraContent as any;
if (!typedContent.tsdoc) typedContent.tsdoc = {};
typedContent.tsdoc.context = this.config;
// Write back to npmextra.json
const updatedContent = JSON.stringify(npmextraContent, null, 2);
await plugins.smartfile.memory.toFs(updatedContent, npmextraJsonPath);
} catch (error) {
console.error('Error updating context configuration:', error);
}
}
/**
* Get cache configuration
*/
public getCacheConfig(): ICacheConfig {
return this.config.cache || { enabled: true, ttl: 3600, maxSize: 100 };
}
/**
* Get analyzer configuration
*/
public getAnalyzerConfig(): IAnalyzerConfig {
return this.config.analyzer || { useAIRefinement: false, aiModel: 'haiku' };
}
/**
* Get prioritization weights
*/
public getPrioritizationWeights(): IPrioritizationWeights {
return this.config.prioritization || {
dependencyWeight: 0.3,
relevanceWeight: 0.4,
efficiencyWeight: 0.2,
recencyWeight: 0.1
};
}
/**
* Get tier configuration
*/
public getTierConfig(): ITierConfig {
return this.config.tiers || {
essential: { minScore: 0.8, trimLevel: 'none' },
important: { minScore: 0.5, trimLevel: 'light' },
optional: { minScore: 0.2, trimLevel: 'aggressive' }
};
}
/**
* Get iterative configuration
*/
public getIterativeConfig(): IIterativeConfig {
return this.config.iterative || {
maxIterations: 5,
firstPassFileLimit: 10,
subsequentPassFileLimit: 5,
temperature: 0.3,
model: 'gpt-4-turbo-preview'
};
}
/**
* Clear the config cache (force reload on next access)
*/
public clearCache(): void {
this.configCache = null;
}
}

View File

@@ -1,391 +0,0 @@
import * as plugins from '../plugins.js';
import type {
IFileMetadata,
IFileDependencies,
IFileAnalysis,
IAnalysisResult,
TaskType,
IPrioritizationWeights,
ITierConfig,
} from './types.js';
/**
* ContextAnalyzer provides intelligent file selection and prioritization
* based on dependency analysis, task relevance, and configurable weights
*/
export class ContextAnalyzer {
private projectRoot: string;
private weights: Required<IPrioritizationWeights>;
private tiers: Required<ITierConfig>;
/**
* Creates a new ContextAnalyzer
* @param projectRoot - Root directory of the project
* @param weights - Prioritization weights
* @param tiers - Tier configuration
*/
constructor(
projectRoot: string,
weights: Partial<IPrioritizationWeights> = {},
tiers: Partial<ITierConfig> = {}
) {
this.projectRoot = projectRoot;
// Default weights
this.weights = {
dependencyWeight: weights.dependencyWeight ?? 0.3,
relevanceWeight: weights.relevanceWeight ?? 0.4,
efficiencyWeight: weights.efficiencyWeight ?? 0.2,
recencyWeight: weights.recencyWeight ?? 0.1,
};
// Default tiers
this.tiers = {
essential: tiers.essential ?? { minScore: 0.8, trimLevel: 'none' },
important: tiers.important ?? { minScore: 0.5, trimLevel: 'light' },
optional: tiers.optional ?? { minScore: 0.2, trimLevel: 'aggressive' },
};
}
/**
* Analyzes files for a specific task type
* @param metadata - Array of file metadata to analyze
* @param taskType - Type of task being performed
* @param changedFiles - Optional list of recently changed files (for commits)
* @returns Analysis result with scored files
*/
public async analyze(
metadata: IFileMetadata[],
taskType: TaskType,
changedFiles: string[] = []
): Promise<IAnalysisResult> {
const startTime = Date.now();
// Build dependency graph
const dependencyGraph = await this.buildDependencyGraph(metadata);
// Calculate centrality scores
this.calculateCentrality(dependencyGraph);
// Analyze each file
const files: IFileAnalysis[] = [];
for (const meta of metadata) {
const analysis = await this.analyzeFile(
meta,
taskType,
dependencyGraph,
changedFiles
);
files.push(analysis);
}
// Sort by importance score (highest first)
files.sort((a, b) => b.importanceScore - a.importanceScore);
const analysisDuration = Date.now() - startTime;
return {
taskType,
files,
dependencyGraph,
totalFiles: metadata.length,
analysisDuration,
};
}
/**
* Builds a dependency graph from file metadata
* @param metadata - Array of file metadata
* @returns Dependency graph as a map
*/
private async buildDependencyGraph(
metadata: IFileMetadata[]
): Promise<Map<string, IFileDependencies>> {
const graph = new Map<string, IFileDependencies>();
// Initialize graph entries
for (const meta of metadata) {
graph.set(meta.path, {
path: meta.path,
imports: [],
importedBy: [],
centrality: 0,
});
}
// Parse imports from each file
for (const meta of metadata) {
try {
const contents = await plugins.smartfile.fs.toStringSync(meta.path);
const imports = this.extractImports(contents, meta.path);
const deps = graph.get(meta.path)!;
deps.imports = imports;
// Update importedBy for imported files
for (const importPath of imports) {
const importedDeps = graph.get(importPath);
if (importedDeps) {
importedDeps.importedBy.push(meta.path);
}
}
} catch (error) {
console.warn(`Failed to parse imports from ${meta.path}:`, error.message);
}
}
return graph;
}
/**
* Extracts import statements from file contents
* @param contents - File contents
* @param filePath - Path of the file being analyzed
* @returns Array of absolute paths to imported files
*/
private extractImports(contents: string, filePath: string): string[] {
const imports: string[] = [];
const fileDir = plugins.path.dirname(filePath);
// Match various import patterns
const importRegex = /(?:import|export).*?from\s+['"](.+?)['"]/g;
let match;
while ((match = importRegex.exec(contents)) !== null) {
const importPath = match[1];
// Skip external modules
if (!importPath.startsWith('.')) {
continue;
}
// Resolve relative import to absolute path
let resolvedPath = plugins.path.resolve(fileDir, importPath);
// Handle various file extensions
const extensions = ['.ts', '.js', '.tsx', '.jsx', '/index.ts', '/index.js'];
let found = false;
for (const ext of extensions) {
const testPath = resolvedPath.endsWith(ext) ? resolvedPath : resolvedPath + ext;
try {
// Use synchronous file check to avoid async in this context
const fs = require('fs');
const exists = fs.existsSync(testPath);
if (exists) {
imports.push(testPath);
found = true;
break;
}
} catch (error) {
// Continue trying other extensions
}
}
if (!found && !resolvedPath.includes('.')) {
// Try with .ts extension as default
imports.push(resolvedPath + '.ts');
}
}
return imports;
}
/**
* Calculates centrality scores for all nodes in the dependency graph
* Uses a simplified PageRank-like algorithm
* @param graph - Dependency graph
*/
private calculateCentrality(graph: Map<string, IFileDependencies>): void {
const damping = 0.85;
const iterations = 10;
const nodeCount = graph.size;
// Initialize scores
const scores = new Map<string, number>();
for (const path of graph.keys()) {
scores.set(path, 1.0 / nodeCount);
}
// Iterative calculation
for (let i = 0; i < iterations; i++) {
const newScores = new Map<string, number>();
for (const [path, deps] of graph.entries()) {
let score = (1 - damping) / nodeCount;
// Add contributions from nodes that import this file
for (const importerPath of deps.importedBy) {
const importerDeps = graph.get(importerPath);
if (importerDeps) {
const importerScore = scores.get(importerPath) ?? 0;
const outgoingCount = importerDeps.imports.length || 1;
score += damping * (importerScore / outgoingCount);
}
}
newScores.set(path, score);
}
// Update scores
for (const [path, score] of newScores) {
scores.set(path, score);
}
}
// Normalize scores to 0-1 range
const maxScore = Math.max(...scores.values());
if (maxScore > 0) {
for (const deps of graph.values()) {
const score = scores.get(deps.path) ?? 0;
deps.centrality = score / maxScore;
}
}
}
/**
* Analyzes a single file
* @param meta - File metadata
* @param taskType - Task being performed
* @param graph - Dependency graph
* @param changedFiles - Recently changed files
* @returns File analysis
*/
private async analyzeFile(
meta: IFileMetadata,
taskType: TaskType,
graph: Map<string, IFileDependencies>,
changedFiles: string[]
): Promise<IFileAnalysis> {
const deps = graph.get(meta.path);
const centralityScore = deps?.centrality ?? 0;
// Calculate task-specific relevance
const relevanceScore = this.calculateRelevance(meta, taskType);
// Calculate efficiency (information per token)
const efficiencyScore = this.calculateEfficiency(meta);
// Calculate recency (for commit tasks)
const recencyScore = this.calculateRecency(meta, changedFiles);
// Calculate combined importance score
const importanceScore =
relevanceScore * this.weights.relevanceWeight +
centralityScore * this.weights.dependencyWeight +
efficiencyScore * this.weights.efficiencyWeight +
recencyScore * this.weights.recencyWeight;
// Assign tier
const tier = this.assignTier(importanceScore);
return {
path: meta.path,
relevanceScore,
centralityScore,
efficiencyScore,
recencyScore,
importanceScore,
tier,
reason: this.generateReason(meta, taskType, importanceScore, tier),
};
}
/**
* Calculates task-specific relevance score
*/
private calculateRelevance(meta: IFileMetadata, taskType: TaskType): number {
const relativePath = meta.relativePath.toLowerCase();
let score = 0.5; // Base score
// README generation - prioritize public APIs and main exports
if (taskType === 'readme') {
if (relativePath.includes('index.ts')) score += 0.3;
if (relativePath.match(/^ts\/[^\/]+\.ts$/)) score += 0.2; // Root level exports
if (relativePath.includes('test/')) score -= 0.3;
if (relativePath.includes('classes/')) score += 0.1;
if (relativePath.includes('interfaces/')) score += 0.1;
}
// Commit messages - prioritize changed files and their dependencies
if (taskType === 'commit') {
if (relativePath.includes('test/')) score -= 0.2;
// Recency will handle changed files
}
// Description generation - prioritize main exports and core interfaces
if (taskType === 'description') {
if (relativePath.includes('index.ts')) score += 0.4;
if (relativePath.match(/^ts\/[^\/]+\.ts$/)) score += 0.3;
if (relativePath.includes('test/')) score -= 0.4;
if (relativePath.includes('interfaces/')) score += 0.2;
}
return Math.max(0, Math.min(1, score));
}
/**
* Calculates efficiency score (information density)
*/
private calculateEfficiency(meta: IFileMetadata): number {
// Prefer files that are not too large (good signal-to-noise ratio)
const optimalSize = 5000; // ~1250 tokens
const distance = Math.abs(meta.estimatedTokens - optimalSize);
const normalized = Math.max(0, 1 - distance / optimalSize);
return normalized;
}
/**
* Calculates recency score for changed files
*/
private calculateRecency(meta: IFileMetadata, changedFiles: string[]): number {
if (changedFiles.length === 0) {
return 0;
}
// Check if this file was changed
const isChanged = changedFiles.some((changed) => changed === meta.path);
return isChanged ? 1.0 : 0.0;
}
/**
* Assigns a tier based on importance score
*/
private assignTier(score: number): 'essential' | 'important' | 'optional' | 'excluded' {
if (score >= this.tiers.essential.minScore) return 'essential';
if (score >= this.tiers.important.minScore) return 'important';
if (score >= this.tiers.optional.minScore) return 'optional';
return 'excluded';
}
/**
* Generates a human-readable reason for the score
*/
private generateReason(
meta: IFileMetadata,
taskType: TaskType,
score: number,
tier: string
): string {
const reasons: string[] = [];
if (meta.relativePath.includes('index.ts')) {
reasons.push('main export file');
}
if (meta.relativePath.includes('test/')) {
reasons.push('test file (lower priority)');
}
if (taskType === 'readme' && meta.relativePath.match(/^ts\/[^\/]+\.ts$/)) {
reasons.push('root-level module');
}
reasons.push(`score: ${score.toFixed(2)}`);
reasons.push(`tier: ${tier}`);
return reasons.join(', ');
}
}

View File

@@ -1,286 +0,0 @@
import * as plugins from '../plugins.js';
import * as fs from 'fs';
import type { ICacheEntry, ICacheConfig } from './types.js';
import { logger } from '../logging.js';
/**
* ContextCache provides persistent caching of file contents and token counts
* with automatic invalidation on file changes
*/
export class ContextCache {
private cacheDir: string;
private cache: Map<string, ICacheEntry> = new Map();
private config: Required<ICacheConfig>;
private cacheIndexPath: string;
/**
* Creates a new ContextCache
* @param projectRoot - Root directory of the project
* @param config - Cache configuration
*/
constructor(projectRoot: string, config: Partial<ICacheConfig> = {}) {
this.config = {
enabled: config.enabled ?? true,
ttl: config.ttl ?? 3600, // 1 hour default
maxSize: config.maxSize ?? 100, // 100MB default
directory: config.directory ?? plugins.path.join(projectRoot, '.nogit', 'context-cache'),
};
this.cacheDir = this.config.directory;
this.cacheIndexPath = plugins.path.join(this.cacheDir, 'index.json');
}
/**
* Initializes the cache by loading from disk
*/
public async init(): Promise<void> {
if (!this.config.enabled) {
return;
}
// Ensure cache directory exists
await plugins.smartfile.fs.ensureDir(this.cacheDir);
// Load cache index if it exists
try {
const indexExists = await plugins.smartfile.fs.fileExists(this.cacheIndexPath);
if (indexExists) {
const indexContent = await plugins.smartfile.fs.toStringSync(this.cacheIndexPath);
const indexData = JSON.parse(indexContent) as ICacheEntry[];
if (Array.isArray(indexData)) {
for (const entry of indexData) {
this.cache.set(entry.path, entry);
}
}
}
} catch (error) {
console.warn('Failed to load cache index:', error.message);
// Start with empty cache if loading fails
}
// Clean up expired and invalid entries
await this.cleanup();
}
/**
* Gets a cached entry if it's still valid
* @param filePath - Absolute path to the file
* @returns Cache entry if valid, null otherwise
*/
public async get(filePath: string): Promise<ICacheEntry | null> {
if (!this.config.enabled) {
return null;
}
const entry = this.cache.get(filePath);
if (!entry) {
return null;
}
// Check if entry is expired
const now = Date.now();
if (now - entry.cachedAt > this.config.ttl * 1000) {
this.cache.delete(filePath);
return null;
}
// Check if file has been modified
try {
const stats = await fs.promises.stat(filePath);
const currentMtime = Math.floor(stats.mtimeMs);
if (currentMtime !== entry.mtime) {
// File has changed, invalidate cache
this.cache.delete(filePath);
return null;
}
return entry;
} catch (error) {
// File doesn't exist anymore
this.cache.delete(filePath);
return null;
}
}
/**
* Stores a cache entry
* @param entry - Cache entry to store
*/
public async set(entry: ICacheEntry): Promise<void> {
if (!this.config.enabled) {
return;
}
this.cache.set(entry.path, entry);
// Check cache size and evict old entries if needed
await this.enforceMaxSize();
// Persist to disk (async, don't await)
this.persist().catch((error) => {
console.warn('Failed to persist cache:', error.message);
});
}
/**
* Stores multiple cache entries
* @param entries - Array of cache entries
*/
public async setMany(entries: ICacheEntry[]): Promise<void> {
if (!this.config.enabled) {
return;
}
for (const entry of entries) {
this.cache.set(entry.path, entry);
}
await this.enforceMaxSize();
await this.persist();
}
/**
* Checks if a file is cached and valid
* @param filePath - Absolute path to the file
* @returns True if cached and valid
*/
public async has(filePath: string): Promise<boolean> {
const entry = await this.get(filePath);
return entry !== null;
}
/**
* Gets cache statistics
*/
public getStats(): {
entries: number;
totalSize: number;
oldestEntry: number | null;
newestEntry: number | null;
} {
let totalSize = 0;
let oldestEntry: number | null = null;
let newestEntry: number | null = null;
for (const entry of this.cache.values()) {
totalSize += entry.contents.length;
if (oldestEntry === null || entry.cachedAt < oldestEntry) {
oldestEntry = entry.cachedAt;
}
if (newestEntry === null || entry.cachedAt > newestEntry) {
newestEntry = entry.cachedAt;
}
}
return {
entries: this.cache.size,
totalSize,
oldestEntry,
newestEntry,
};
}
/**
* Clears all cache entries
*/
public async clear(): Promise<void> {
this.cache.clear();
await this.persist();
}
/**
* Clears specific cache entries
* @param filePaths - Array of file paths to clear
*/
public async clearPaths(filePaths: string[]): Promise<void> {
for (const path of filePaths) {
this.cache.delete(path);
}
await this.persist();
}
/**
* Cleans up expired and invalid cache entries
*/
private async cleanup(): Promise<void> {
const now = Date.now();
const toDelete: string[] = [];
for (const [path, entry] of this.cache.entries()) {
// Check expiration
if (now - entry.cachedAt > this.config.ttl * 1000) {
toDelete.push(path);
continue;
}
// Check if file still exists and hasn't changed
try {
const stats = await fs.promises.stat(path);
const currentMtime = Math.floor(stats.mtimeMs);
if (currentMtime !== entry.mtime) {
toDelete.push(path);
}
} catch (error) {
// File doesn't exist
toDelete.push(path);
}
}
for (const path of toDelete) {
this.cache.delete(path);
}
if (toDelete.length > 0) {
await this.persist();
}
}
/**
* Enforces maximum cache size by evicting oldest entries
*/
private async enforceMaxSize(): Promise<void> {
const stats = this.getStats();
const maxSizeBytes = this.config.maxSize * 1024 * 1024; // Convert MB to bytes
if (stats.totalSize <= maxSizeBytes) {
return;
}
// Sort entries by age (oldest first)
const entries = Array.from(this.cache.entries()).sort(
(a, b) => a[1].cachedAt - b[1].cachedAt
);
// Remove oldest entries until we're under the limit
let currentSize = stats.totalSize;
for (const [path, entry] of entries) {
if (currentSize <= maxSizeBytes) {
break;
}
currentSize -= entry.contents.length;
this.cache.delete(path);
}
}
/**
* Persists cache index to disk
*/
private async persist(): Promise<void> {
if (!this.config.enabled) {
return;
}
try {
const entries = Array.from(this.cache.values());
const content = JSON.stringify(entries, null, 2);
await plugins.smartfile.memory.toFs(content, this.cacheIndexPath);
} catch (error) {
console.warn('Failed to persist cache index:', error.message);
}
}
}

View File

@@ -1,310 +0,0 @@
import * as plugins from '../plugins.js';
import type { ITrimConfig, ContextMode } from './types.js';
/**
* Class responsible for trimming file contents to reduce token usage
* while preserving important information for context
*/
export class ContextTrimmer {
private config: ITrimConfig;
/**
* Create a new ContextTrimmer with the given configuration
* @param config The trimming configuration
*/
constructor(config?: ITrimConfig) {
this.config = {
removeImplementations: true,
preserveInterfaces: true,
preserveTypeDefs: true,
preserveJSDoc: true,
maxFunctionLines: 5,
removeComments: true,
removeBlankLines: true,
...config
};
}
/**
* Trim a file's contents based on the configuration
* @param filePath The path to the file
* @param content The file's contents
* @param mode The context mode to use
* @returns The trimmed file contents
*/
public trimFile(filePath: string, content: string, mode: ContextMode = 'trimmed'): string {
// If mode is 'full', return the original content
if (mode === 'full') {
return content;
}
// Process based on file type
if (filePath.endsWith('.ts') || filePath.endsWith('.tsx')) {
return this.trimTypeScriptFile(content);
} else if (filePath.endsWith('.md')) {
return this.trimMarkdownFile(content);
} else if (filePath.endsWith('.json')) {
return this.trimJsonFile(content);
}
// Default to returning the original content for unknown file types
return content;
}
/**
* Trim a TypeScript file to reduce token usage
* @param content The TypeScript file contents
* @returns The trimmed file contents
*/
private trimTypeScriptFile(content: string): string {
let result = content;
// Step 1: Preserve JSDoc comments if configured
const jsDocComments: string[] = [];
if (this.config.preserveJSDoc) {
const jsDocRegex = /\/\*\*[\s\S]*?\*\//g;
const matches = result.match(jsDocRegex) || [];
jsDocComments.push(...matches);
}
// Step 2: Remove comments if configured
if (this.config.removeComments) {
// Remove single-line comments
result = result.replace(/\/\/.*$/gm, '');
// Remove multi-line comments (except JSDoc if preserveJSDoc is true)
if (!this.config.preserveJSDoc) {
result = result.replace(/\/\*[\s\S]*?\*\//g, '');
} else {
// Only remove non-JSDoc comments
result = result.replace(/\/\*(?!\*)[\s\S]*?\*\//g, '');
}
}
// Step 3: Remove function implementations if configured
if (this.config.removeImplementations) {
// Match function and method bodies
result = result.replace(
/(\b(function|constructor|async function)\s+[\w$]*\s*\([^)]*\)\s*{)([\s\S]*?)(})/g,
(match, start, funcType, body, end) => {
// Keep function signature and opening brace, replace body with comment
return `${start} /* implementation removed */ ${end}`;
}
);
// Match arrow function bodies
result = result.replace(
/(\([^)]*\)\s*=>\s*{)([\s\S]*?)(})/g,
(match, start, body, end) => {
return `${start} /* implementation removed */ ${end}`;
}
);
// Match method declarations
result = result.replace(
/(^\s*[\w$]*\s*\([^)]*\)\s*{)([\s\S]*?)(})/gm,
(match, start, body, end) => {
return `${start} /* implementation removed */ ${end}`;
}
);
// Match class methods
result = result.replace(
/(\b(public|private|protected|static|async)?\s+[\w$]+\s*\([^)]*\)\s*{)([\s\S]*?)(})/g,
(match, start, modifier, body, end) => {
return `${start} /* implementation removed */ ${end}`;
}
);
} else if (this.config.maxFunctionLines && this.config.maxFunctionLines > 0) {
// If not removing implementations completely, limit the number of lines
// Match function and method bodies
result = result.replace(
/(\b(function|constructor|async function)\s+[\w$]*\s*\([^)]*\)\s*{)([\s\S]*?)(})/g,
(match, start, funcType, body, end) => {
return this.limitFunctionBody(start, body, end);
}
);
// Match arrow function bodies
result = result.replace(
/(\([^)]*\)\s*=>\s*{)([\s\S]*?)(})/g,
(match, start, body, end) => {
return this.limitFunctionBody(start, body, end);
}
);
// Match method declarations
result = result.replace(
/(^\s*[\w$]*\s*\([^)]*\)\s*{)([\s\S]*?)(})/gm,
(match, start, body, end) => {
return this.limitFunctionBody(start, body, end);
}
);
// Match class methods
result = result.replace(
/(\b(public|private|protected|static|async)?\s+[\w$]+\s*\([^)]*\)\s*{)([\s\S]*?)(})/g,
(match, start, modifier, body, end) => {
return this.limitFunctionBody(start, body, end);
}
);
}
// Step 4: Remove blank lines if configured
if (this.config.removeBlankLines) {
result = result.replace(/^\s*[\r\n]/gm, '');
}
// Step 5: Restore preserved JSDoc comments
if (this.config.preserveJSDoc && jsDocComments.length > 0) {
// This is a placeholder; we already preserved JSDoc comments in the regex steps
}
return result;
}
/**
* Limit a function body to a maximum number of lines
* @param start The function signature and opening brace
* @param body The function body
* @param end The closing brace
* @returns The limited function body
*/
private limitFunctionBody(start: string, body: string, end: string): string {
const lines = body.split('\n');
if (lines.length > this.config.maxFunctionLines!) {
const limitedBody = lines.slice(0, this.config.maxFunctionLines!).join('\n');
return `${start}${limitedBody}\n // ... (${lines.length - this.config.maxFunctionLines!} lines trimmed)\n${end}`;
}
return `${start}${body}${end}`;
}
/**
* Trim a Markdown file to reduce token usage
* @param content The Markdown file contents
* @returns The trimmed file contents
*/
private trimMarkdownFile(content: string): string {
// For markdown files, we generally want to keep most content
// but we can remove lengthy code blocks if needed
return content;
}
/**
* Trim a JSON file to reduce token usage
* @param content The JSON file contents
* @returns The trimmed file contents
*/
private trimJsonFile(content: string): string {
try {
// Parse the JSON
const json = JSON.parse(content);
// For package.json, keep only essential information
if ('name' in json && 'version' in json && 'dependencies' in json) {
const essentialKeys = [
'name', 'version', 'description', 'author', 'license',
'main', 'types', 'exports', 'type'
];
const trimmedJson: any = {};
essentialKeys.forEach(key => {
if (key in json) {
trimmedJson[key] = json[key];
}
});
// Add dependency information without versions
if ('dependencies' in json) {
trimmedJson.dependencies = Object.keys(json.dependencies).reduce((acc, dep) => {
acc[dep] = '*'; // Replace version with wildcard
return acc;
}, {} as Record<string, string>);
}
// Return the trimmed JSON
return JSON.stringify(trimmedJson, null, 2);
}
// For other JSON files, leave as is
return content;
} catch (error) {
// If there's an error parsing the JSON, return the original content
return content;
}
}
/**
* Update the trimmer configuration
* @param config The new configuration to apply
*/
public updateConfig(config: ITrimConfig): void {
this.config = {
...this.config,
...config
};
}
/**
* Trim a file based on its importance tier
* @param filePath The path to the file
* @param content The file's contents
* @param level The trimming level to apply ('none', 'light', 'aggressive')
* @returns The trimmed file contents
*/
public trimFileWithLevel(
filePath: string,
content: string,
level: 'none' | 'light' | 'aggressive'
): string {
// No trimming for essential files
if (level === 'none') {
return content;
}
// Create a temporary config based on level
const originalConfig = { ...this.config };
try {
if (level === 'light') {
// Light trimming: preserve signatures, remove only complex implementations
this.config = {
...this.config,
removeImplementations: false,
preserveInterfaces: true,
preserveTypeDefs: true,
preserveJSDoc: true,
maxFunctionLines: 10,
removeComments: false,
removeBlankLines: true
};
} else if (level === 'aggressive') {
// Aggressive trimming: remove all implementations, keep only signatures
this.config = {
...this.config,
removeImplementations: true,
preserveInterfaces: true,
preserveTypeDefs: true,
preserveJSDoc: true,
maxFunctionLines: 3,
removeComments: true,
removeBlankLines: true
};
}
// Process based on file type
let result = content;
if (filePath.endsWith('.ts') || filePath.endsWith('.tsx')) {
result = this.trimTypeScriptFile(content);
} else if (filePath.endsWith('.md')) {
result = this.trimMarkdownFile(content);
} else if (filePath.endsWith('.json')) {
result = this.trimJsonFile(content);
}
return result;
} finally {
// Restore original config
this.config = originalConfig;
}
}
}

View File

@@ -1,332 +0,0 @@
import * as plugins from '../plugins.js';
import type { ContextMode, IContextResult, IFileInfo, TaskType, IFileMetadata } from './types.js';
import { ContextTrimmer } from './context-trimmer.js';
import { ConfigManager } from './config-manager.js';
import { LazyFileLoader } from './lazy-file-loader.js';
import { ContextCache } from './context-cache.js';
import { ContextAnalyzer } from './context-analyzer.js';
/**
* Enhanced ProjectContext that supports context optimization strategies
*/
export class EnhancedContext {
private projectDir: string;
private trimmer: ContextTrimmer;
private configManager: ConfigManager;
private lazyLoader: LazyFileLoader;
private cache: ContextCache;
private analyzer: ContextAnalyzer;
private contextMode: ContextMode = 'trimmed';
private tokenBudget: number = 190000; // Default for o4-mini
private contextResult: IContextResult = {
context: '',
tokenCount: 0,
includedFiles: [],
trimmedFiles: [],
excludedFiles: [],
tokenSavings: 0
};
/**
* Create a new EnhancedContext
* @param projectDirArg The project directory
*/
constructor(projectDirArg: string) {
this.projectDir = projectDirArg;
this.configManager = ConfigManager.getInstance();
this.trimmer = new ContextTrimmer(this.configManager.getTrimConfig());
this.lazyLoader = new LazyFileLoader(projectDirArg);
this.cache = new ContextCache(projectDirArg, this.configManager.getCacheConfig());
this.analyzer = new ContextAnalyzer(
projectDirArg,
this.configManager.getPrioritizationWeights(),
this.configManager.getTierConfig()
);
}
/**
* Initialize the context builder
*/
public async initialize(): Promise<void> {
await this.configManager.initialize(this.projectDir);
this.tokenBudget = this.configManager.getMaxTokens();
this.trimmer.updateConfig(this.configManager.getTrimConfig());
await this.cache.init();
}
/**
* Set the context mode
* @param mode The context mode to use
*/
public setContextMode(mode: ContextMode): void {
this.contextMode = mode;
}
/**
* Set the token budget
* @param maxTokens The maximum tokens to use
*/
public setTokenBudget(maxTokens: number): void {
this.tokenBudget = maxTokens;
}
/**
* Convert files to context with smart analysis and prioritization
* @param metadata - File metadata to analyze
* @param taskType - Task type for context-aware prioritization
* @param mode - Context mode to use
* @returns Context string
*/
public async convertFilesToContextWithAnalysis(
metadata: IFileMetadata[],
taskType: TaskType,
mode: ContextMode = this.contextMode
): Promise<string> {
// Reset context result
this.contextResult = {
context: '',
tokenCount: 0,
includedFiles: [],
trimmedFiles: [],
excludedFiles: [],
tokenSavings: 0
};
// Analyze files for smart prioritization
const analysis = await this.analyzer.analyze(metadata, taskType, []);
// Sort files by importance score (highest first)
const sortedAnalysis = [...analysis.files].sort(
(a, b) => b.importanceScore - a.importanceScore
);
// Filter out excluded tier
const relevantFiles = sortedAnalysis.filter(f => f.tier !== 'excluded');
let totalTokenCount = 0;
let totalOriginalTokens = 0;
const processedFiles: string[] = [];
// Load files with cache support
for (const fileAnalysis of relevantFiles) {
try {
// Check cache first
let contents: string;
let originalTokenCount: number;
const cached = await this.cache.get(fileAnalysis.path);
if (cached) {
contents = cached.contents;
originalTokenCount = cached.tokenCount;
} else {
// Load file
const fileData = await plugins.smartfile.fs.toStringSync(fileAnalysis.path);
contents = fileData;
originalTokenCount = this.countTokens(contents);
// Cache it
await this.cache.set({
path: fileAnalysis.path,
contents,
tokenCount: originalTokenCount,
mtime: Date.now(),
cachedAt: Date.now()
});
}
totalOriginalTokens += originalTokenCount;
// Apply tier-based trimming
let processedContent = contents;
let trimLevel: 'none' | 'light' | 'aggressive' = 'light';
if (fileAnalysis.tier === 'essential') {
trimLevel = 'none';
} else if (fileAnalysis.tier === 'important') {
trimLevel = 'light';
} else if (fileAnalysis.tier === 'optional') {
trimLevel = 'aggressive';
}
// Apply trimming based on mode and tier
if (mode !== 'full' && trimLevel !== 'none') {
const relativePath = plugins.path.relative(this.projectDir, fileAnalysis.path);
processedContent = this.trimmer.trimFileWithLevel(
relativePath,
contents,
trimLevel
);
}
// Calculate token count
const processedTokenCount = this.countTokens(processedContent);
// Check token budget
if (totalTokenCount + processedTokenCount > this.tokenBudget) {
// We don't have budget for this file
const relativePath = plugins.path.relative(this.projectDir, fileAnalysis.path);
this.contextResult.excludedFiles.push({
path: fileAnalysis.path,
contents,
relativePath,
tokenCount: originalTokenCount,
importanceScore: fileAnalysis.importanceScore
});
continue;
}
// Format the file for context
const relativePath = plugins.path.relative(this.projectDir, fileAnalysis.path);
const formattedContent = `
====== START OF FILE ${relativePath} ======
${processedContent}
====== END OF FILE ${relativePath} ======
`;
processedFiles.push(formattedContent);
totalTokenCount += processedTokenCount;
// Track file in appropriate list
const fileInfo: IFileInfo = {
path: fileAnalysis.path,
contents: processedContent,
relativePath,
tokenCount: processedTokenCount,
importanceScore: fileAnalysis.importanceScore
};
if (trimLevel === 'none' || processedContent === contents) {
this.contextResult.includedFiles.push(fileInfo);
} else {
this.contextResult.trimmedFiles.push(fileInfo);
this.contextResult.tokenSavings += (originalTokenCount - processedTokenCount);
}
} catch (error) {
console.warn(`Failed to process file ${fileAnalysis.path}:`, error.message);
}
}
// Join all processed files
const context = processedFiles.join('\n');
// Update context result
this.contextResult.context = context;
this.contextResult.tokenCount = totalTokenCount;
return context;
}
/**
* Build context for the project using smart analysis
* @param taskType Task type for context-aware prioritization (defaults to 'description')
*/
public async buildContext(taskType?: TaskType): Promise<IContextResult> {
// Initialize if needed
if (this.tokenBudget === 0) {
await this.initialize();
}
// Smart context building always requires a task type for optimal prioritization
// Default to 'description' if not provided
const effectiveTaskType = taskType || 'description';
// Get task-specific configuration
const taskConfig = this.configManager.getTaskConfig(effectiveTaskType);
if (taskConfig.mode) {
this.setContextMode(taskConfig.mode);
}
// Build globs for scanning
const includeGlobs = taskConfig?.includePaths?.map(p => `${p}/**/*.ts`) || [
'ts/**/*.ts',
'ts*/**/*.ts'
];
// Add config files
const configGlobs = [
'package.json',
'readme.md',
'readme.hints.md',
'npmextra.json'
];
// Scan files for metadata (fast, doesn't load contents)
const metadata = await this.lazyLoader.scanFiles([...configGlobs, ...includeGlobs]);
// Use smart analyzer to build context with intelligent prioritization
await this.convertFilesToContextWithAnalysis(metadata, effectiveTaskType, this.contextMode);
return this.contextResult;
}
/**
* Update the context with git diff information for commit tasks
* @param gitDiff The git diff to include
*/
public updateWithGitDiff(gitDiff: string): IContextResult {
// If we don't have a context yet, return empty result
if (!this.contextResult.context) {
return this.contextResult;
}
// Add git diff to context
const diffSection = `
====== GIT DIFF ======
${gitDiff}
====== END GIT DIFF ======
`;
const diffTokenCount = this.countTokens(diffSection);
// Update context and token count
this.contextResult.context += diffSection;
this.contextResult.tokenCount += diffTokenCount;
return this.contextResult;
}
/**
* Count tokens in a string
* @param text The text to count tokens for
* @param model The model to use for token counting
*/
public countTokens(text: string, model: string = 'gpt-3.5-turbo'): number {
try {
// Use the gpt-tokenizer library to count tokens
const tokens = plugins.gptTokenizer.encode(text);
return tokens.length;
} catch (error) {
console.error('Error counting tokens:', error);
// Provide a rough estimate if tokenization fails
return Math.ceil(text.length / 4);
}
}
/**
* Get the context result
*/
public getContextResult(): IContextResult {
return this.contextResult;
}
/**
* Get the token count for the current context
*/
public getTokenCount(): number {
return this.contextResult.tokenCount;
}
/**
* Get both the context string and its token count
*/
public getContextWithTokenCount(): { context: string; tokenCount: number } {
return {
context: this.contextResult.context,
tokenCount: this.contextResult.tokenCount
};
}
}

View File

@@ -1,62 +0,0 @@
import { EnhancedContext } from './enhanced-context.js';
import { TaskContextFactory } from './task-context-factory.js';
import { ConfigManager } from './config-manager.js';
import { ContextTrimmer } from './context-trimmer.js';
import { LazyFileLoader } from './lazy-file-loader.js';
import { ContextCache } from './context-cache.js';
import { ContextAnalyzer } from './context-analyzer.js';
import type {
ContextMode,
IContextConfig,
IContextResult,
IFileInfo,
ITrimConfig,
ITaskConfig,
TaskType,
ICacheConfig,
IAnalyzerConfig,
IPrioritizationWeights,
ITierConfig,
ITierSettings,
IFileMetadata,
ICacheEntry,
IFileDependencies,
IFileAnalysis,
IAnalysisResult,
IIterativeConfig,
IIterativeContextResult
} from './types.js';
export {
// Classes
EnhancedContext,
TaskContextFactory,
ConfigManager,
ContextTrimmer,
LazyFileLoader,
ContextCache,
ContextAnalyzer,
};
// Types
export type {
ContextMode,
IContextConfig,
IContextResult,
IFileInfo,
ITrimConfig,
ITaskConfig,
TaskType,
ICacheConfig,
IAnalyzerConfig,
IPrioritizationWeights,
ITierConfig,
ITierSettings,
IFileMetadata,
ICacheEntry,
IFileDependencies,
IFileAnalysis,
IAnalysisResult,
IIterativeConfig,
IIterativeContextResult
};

View File

@@ -1,495 +0,0 @@
import * as plugins from '../plugins.js';
import * as fs from 'fs';
import { logger } from '../logging.js';
import type {
TaskType,
IFileMetadata,
IFileInfo,
IIterativeContextResult,
IIterationState,
IFileSelectionDecision,
IContextSufficiencyDecision,
IIterativeConfig,
} from './types.js';
import { LazyFileLoader } from './lazy-file-loader.js';
import { ContextCache } from './context-cache.js';
import { ContextAnalyzer } from './context-analyzer.js';
import { ConfigManager } from './config-manager.js';
/**
* Iterative context builder that uses AI to intelligently select files
* across multiple iterations until sufficient context is gathered
*/
export class IterativeContextBuilder {
private projectRoot: string;
private lazyLoader: LazyFileLoader;
private cache: ContextCache;
private analyzer: ContextAnalyzer;
private config: Required<IIterativeConfig>;
private tokenBudget: number = 190000;
private openaiInstance: plugins.smartai.OpenAiProvider;
private externalOpenaiInstance?: plugins.smartai.OpenAiProvider;
/**
* Creates a new IterativeContextBuilder
* @param projectRoot - Root directory of the project
* @param config - Iterative configuration
* @param openaiInstance - Optional pre-configured OpenAI provider instance
*/
constructor(
projectRoot: string,
config?: Partial<IIterativeConfig>,
openaiInstance?: plugins.smartai.OpenAiProvider
) {
this.projectRoot = projectRoot;
this.lazyLoader = new LazyFileLoader(projectRoot);
this.cache = new ContextCache(projectRoot);
this.analyzer = new ContextAnalyzer(projectRoot);
this.externalOpenaiInstance = openaiInstance;
// Default configuration
this.config = {
maxIterations: config?.maxIterations ?? 5,
firstPassFileLimit: config?.firstPassFileLimit ?? 10,
subsequentPassFileLimit: config?.subsequentPassFileLimit ?? 5,
temperature: config?.temperature ?? 0.3,
model: config?.model ?? 'gpt-4-turbo-preview',
};
}
/**
* Initialize the builder
*/
public async initialize(): Promise<void> {
await this.cache.init();
const configManager = ConfigManager.getInstance();
await configManager.initialize(this.projectRoot);
this.tokenBudget = configManager.getMaxTokens();
// Use external OpenAI instance if provided, otherwise create a new one
if (this.externalOpenaiInstance) {
this.openaiInstance = this.externalOpenaiInstance;
} else {
// Initialize OpenAI instance from environment
const qenvInstance = new plugins.qenv.Qenv();
const openaiToken = await qenvInstance.getEnvVarOnDemand('OPENAI_TOKEN');
if (!openaiToken) {
throw new Error('OPENAI_TOKEN environment variable is required for iterative context building');
}
this.openaiInstance = new plugins.smartai.OpenAiProvider({
openaiToken,
});
await this.openaiInstance.start();
}
}
/**
* Build context iteratively using AI decision making
* @param taskType - Type of task being performed
* @param additionalContext - Optional additional context (e.g., git diff for commit tasks)
* @returns Complete iterative context result
*/
public async buildContextIteratively(taskType: TaskType, additionalContext?: string): Promise<IIterativeContextResult> {
const startTime = Date.now();
logger.log('info', '🤖 Starting iterative context building...');
logger.log('info', ` Task: ${taskType}, Budget: ${this.tokenBudget} tokens, Max iterations: ${this.config.maxIterations}`);
// Phase 1: Scan project files for metadata
logger.log('info', '📋 Scanning project files...');
const metadata = await this.scanProjectFiles(taskType);
const totalEstimatedTokens = metadata.reduce((sum, m) => sum + m.estimatedTokens, 0);
logger.log('info', ` Found ${metadata.length} files (~${totalEstimatedTokens} estimated tokens)`);
// Phase 2: Analyze files for initial prioritization
logger.log('info', '🔍 Analyzing file dependencies and importance...');
const analysis = await this.analyzer.analyze(metadata, taskType, []);
logger.log('info', ` Analysis complete in ${analysis.analysisDuration}ms`);
// Track state across iterations
const iterations: IIterationState[] = [];
let totalTokensUsed = 0;
let apiCallCount = 0;
let loadedContent = '';
const includedFiles: IFileInfo[] = [];
// If additional context (e.g., git diff) is provided, prepend it
if (additionalContext) {
const diffSection = `
====== GIT DIFF ======
${additionalContext}
====== END OF GIT DIFF ======
`;
loadedContent = diffSection;
const diffTokens = this.countTokens(diffSection);
totalTokensUsed += diffTokens;
logger.log('info', `📝 Added git diff to context (${diffTokens} tokens)`);
}
// Phase 3: Iterative file selection and loading
for (let iteration = 1; iteration <= this.config.maxIterations; iteration++) {
const iterationStart = Date.now();
logger.log('info', `\n🤔 Iteration ${iteration}/${this.config.maxIterations}: Asking AI which files to examine...`);
const remainingBudget = this.tokenBudget - totalTokensUsed;
logger.log('info', ` Token budget remaining: ${remainingBudget}/${this.tokenBudget} (${Math.round((remainingBudget / this.tokenBudget) * 100)}%)`);
// Get AI decision on which files to load
const decision = await this.getFileSelectionDecision(
metadata,
analysis.files.slice(0, 30), // Top 30 files by importance
taskType,
iteration,
totalTokensUsed,
remainingBudget,
loadedContent
);
apiCallCount++;
logger.log('info', ` AI reasoning: ${decision.reasoning}`);
logger.log('info', ` AI requested ${decision.filesToLoad.length} files`);
// Load requested files
const iterationFiles: IFileInfo[] = [];
let iterationTokens = 0;
if (decision.filesToLoad.length > 0) {
logger.log('info', '📥 Loading requested files...');
for (const filePath of decision.filesToLoad) {
try {
const fileInfo = await this.loadFile(filePath);
if (totalTokensUsed + fileInfo.tokenCount! <= this.tokenBudget) {
const formattedFile = this.formatFileForContext(fileInfo);
loadedContent += formattedFile;
includedFiles.push(fileInfo);
iterationFiles.push(fileInfo);
iterationTokens += fileInfo.tokenCount!;
totalTokensUsed += fileInfo.tokenCount!;
logger.log('info', `${fileInfo.relativePath} (${fileInfo.tokenCount} tokens)`);
} else {
logger.log('warn', `${fileInfo.relativePath} - would exceed budget, skipping`);
}
} catch (error) {
logger.log('warn', ` ✗ Failed to load ${filePath}: ${error.message}`);
}
}
}
// Record iteration state
const iterationDuration = Date.now() - iterationStart;
iterations.push({
iteration,
filesLoaded: iterationFiles,
tokensUsed: iterationTokens,
totalTokensUsed,
decision,
duration: iterationDuration,
});
logger.log('info', ` Iteration ${iteration} complete: ${iterationFiles.length} files loaded, ${iterationTokens} tokens used`);
// Check if we should continue
if (totalTokensUsed >= this.tokenBudget * 0.95) {
logger.log('warn', '⚠️ Approaching token budget limit, stopping iterations');
break;
}
// Ask AI if context is sufficient
if (iteration < this.config.maxIterations) {
logger.log('info', '🤔 Asking AI if context is sufficient...');
const sufficiencyDecision = await this.evaluateContextSufficiency(
loadedContent,
taskType,
iteration,
totalTokensUsed,
remainingBudget - iterationTokens
);
apiCallCount++;
logger.log('info', ` AI decision: ${sufficiencyDecision.sufficient ? '✅ SUFFICIENT' : '⏭️ NEEDS MORE'}`);
logger.log('info', ` Reasoning: ${sufficiencyDecision.reasoning}`);
if (sufficiencyDecision.sufficient) {
logger.log('ok', '✅ Context building complete - AI determined context is sufficient');
break;
}
}
}
const totalDuration = Date.now() - startTime;
logger.log('ok', `\n✅ Iterative context building complete!`);
logger.log('info', ` Files included: ${includedFiles.length}`);
logger.log('info', ` Token usage: ${totalTokensUsed}/${this.tokenBudget} (${Math.round((totalTokensUsed / this.tokenBudget) * 100)}%)`);
logger.log('info', ` Iterations: ${iterations.length}, API calls: ${apiCallCount}`);
logger.log('info', ` Total duration: ${(totalDuration / 1000).toFixed(2)}s`);
return {
context: loadedContent,
tokenCount: totalTokensUsed,
includedFiles,
trimmedFiles: [],
excludedFiles: [],
tokenSavings: 0,
iterationCount: iterations.length,
iterations,
apiCallCount,
totalDuration,
};
}
/**
* Scan project files based on task type
*/
private async scanProjectFiles(taskType: TaskType): Promise<IFileMetadata[]> {
const configManager = ConfigManager.getInstance();
const taskConfig = configManager.getTaskConfig(taskType);
const includeGlobs = taskConfig?.includePaths?.map(p => `${p}/**/*.ts`) || [
'ts/**/*.ts',
'ts*/**/*.ts'
];
const configGlobs = [
'package.json',
'readme.md',
'readme.hints.md',
'npmextra.json'
];
return await this.lazyLoader.scanFiles([...configGlobs, ...includeGlobs]);
}
/**
* Get AI decision on which files to load
*/
private async getFileSelectionDecision(
allMetadata: IFileMetadata[],
analyzedFiles: any[],
taskType: TaskType,
iteration: number,
tokensUsed: number,
remainingBudget: number,
loadedContent: string
): Promise<IFileSelectionDecision> {
const isFirstIteration = iteration === 1;
const fileLimit = isFirstIteration
? this.config.firstPassFileLimit
: this.config.subsequentPassFileLimit;
const systemPrompt = this.buildFileSelectionPrompt(
allMetadata,
analyzedFiles,
taskType,
iteration,
tokensUsed,
remainingBudget,
loadedContent,
fileLimit
);
const response = await this.openaiInstance.chat({
systemMessage: `You are an AI assistant that helps select the most relevant files for code analysis.
You must respond ONLY with valid JSON that can be parsed with JSON.parse().
Do not wrap the JSON in markdown code blocks or add any other text.`,
userMessage: systemPrompt,
messageHistory: [],
});
// Parse JSON response, handling potential markdown formatting
const content = response.message.replace('```json', '').replace('```', '').trim();
const parsed = JSON.parse(content);
return {
reasoning: parsed.reasoning || 'No reasoning provided',
filesToLoad: parsed.files_to_load || [],
estimatedTokensNeeded: parsed.estimated_tokens_needed,
};
}
/**
* Build prompt for file selection
*/
private buildFileSelectionPrompt(
metadata: IFileMetadata[],
analyzedFiles: any[],
taskType: TaskType,
iteration: number,
tokensUsed: number,
remainingBudget: number,
loadedContent: string,
fileLimit: number
): string {
const taskDescriptions = {
readme: 'generating a comprehensive README that explains the project\'s purpose, features, and API',
commit: 'analyzing code changes to generate an intelligent commit message',
description: 'generating a concise project description for package.json',
};
const alreadyLoadedFiles = loadedContent
? loadedContent.split('\n======').slice(1).map(section => {
const match = section.match(/START OF FILE (.+?) ======/);
return match ? match[1] : '';
}).filter(Boolean)
: [];
const availableFiles = metadata
.filter(m => !alreadyLoadedFiles.includes(m.relativePath))
.map(m => {
const analysis = analyzedFiles.find(a => a.path === m.path);
return `- ${m.relativePath} (${m.size} bytes, ~${m.estimatedTokens} tokens${analysis ? `, importance: ${analysis.importanceScore.toFixed(2)}` : ''})`;
})
.join('\n');
return `You are building context for ${taskDescriptions[taskType]} in a TypeScript project.
ITERATION: ${iteration}
TOKENS USED: ${tokensUsed}/${tokensUsed + remainingBudget} (${Math.round((tokensUsed / (tokensUsed + remainingBudget)) * 100)}%)
REMAINING BUDGET: ${remainingBudget} tokens
${alreadyLoadedFiles.length > 0 ? `FILES ALREADY LOADED:\n${alreadyLoadedFiles.map(f => `- ${f}`).join('\n')}\n\n` : ''}AVAILABLE FILES (not yet loaded):
${availableFiles}
Your task: Select up to ${fileLimit} files that will give you the MOST understanding for this ${taskType} task.
${iteration === 1 ? `This is the FIRST iteration. Focus on:
- Main entry points (index.ts, main exports)
- Core classes and interfaces
- Package configuration
` : `This is iteration ${iteration}. You've already seen some files. Now focus on:
- Files that complement what you've already loaded
- Dependencies of already-loaded files
- Missing pieces for complete understanding
`}
Consider:
1. File importance scores (if provided)
2. File paths (ts/index.ts is likely more important than ts/internal/utils.ts)
3. Token efficiency (prefer smaller files if they provide good information)
4. Remaining budget (${remainingBudget} tokens)
Respond in JSON format:
{
"reasoning": "Brief explanation of why you're selecting these files",
"files_to_load": ["path/to/file1.ts", "path/to/file2.ts"],
"estimated_tokens_needed": 15000
}`;
}
/**
* Evaluate if current context is sufficient
*/
private async evaluateContextSufficiency(
loadedContent: string,
taskType: TaskType,
iteration: number,
tokensUsed: number,
remainingBudget: number
): Promise<IContextSufficiencyDecision> {
const prompt = `You have been building context for a ${taskType} task across ${iteration} iterations.
CURRENT STATE:
- Tokens used: ${tokensUsed}
- Remaining budget: ${remainingBudget}
- Files loaded: ${loadedContent.split('\n======').length - 1}
CONTEXT SO FAR:
${loadedContent.substring(0, 3000)}... (truncated for brevity)
Question: Do you have SUFFICIENT context to successfully complete the ${taskType} task?
Consider:
- For README: Do you understand the project's purpose, main features, API surface, and usage patterns?
- For commit: Do you understand what changed and why?
- For description: Do you understand the project's core value proposition?
Respond in JSON format:
{
"sufficient": true or false,
"reasoning": "Detailed explanation of your decision"
}`;
const response = await this.openaiInstance.chat({
systemMessage: `You are an AI assistant that evaluates whether gathered context is sufficient for a task.
You must respond ONLY with valid JSON that can be parsed with JSON.parse().
Do not wrap the JSON in markdown code blocks or add any other text.`,
userMessage: prompt,
messageHistory: [],
});
// Parse JSON response, handling potential markdown formatting
const content = response.message.replace('```json', '').replace('```', '').trim();
const parsed = JSON.parse(content);
return {
sufficient: parsed.sufficient || false,
reasoning: parsed.reasoning || 'No reasoning provided',
};
}
/**
* Load a single file with caching
*/
private async loadFile(filePath: string): Promise<IFileInfo> {
// Try cache first
const cached = await this.cache.get(filePath);
if (cached) {
return {
path: filePath,
relativePath: plugins.path.relative(this.projectRoot, filePath),
contents: cached.contents,
tokenCount: cached.tokenCount,
};
}
// Load from disk
const contents = await plugins.smartfile.fs.toStringSync(filePath);
const tokenCount = this.countTokens(contents);
const relativePath = plugins.path.relative(this.projectRoot, filePath);
// Cache it
const stats = await fs.promises.stat(filePath);
await this.cache.set({
path: filePath,
contents,
tokenCount,
mtime: Math.floor(stats.mtimeMs),
cachedAt: Date.now(),
});
return {
path: filePath,
relativePath,
contents,
tokenCount,
};
}
/**
* Format a file for inclusion in context
*/
private formatFileForContext(file: IFileInfo): string {
return `
====== START OF FILE ${file.relativePath} ======
${file.contents}
====== END OF FILE ${file.relativePath} ======
`;
}
/**
* Count tokens in text
*/
private countTokens(text: string): number {
try {
const tokens = plugins.gptTokenizer.encode(text);
return tokens.length;
} catch (error) {
return Math.ceil(text.length / 4);
}
}
}

View File

@@ -1,191 +0,0 @@
import * as plugins from '../plugins.js';
import * as fs from 'fs';
import type { IFileMetadata, IFileInfo } from './types.js';
/**
* LazyFileLoader handles efficient file loading by:
* - Scanning files for metadata without loading contents
* - Providing fast file size and token estimates
* - Loading contents only when requested
* - Parallel loading of selected files
*/
export class LazyFileLoader {
private projectRoot: string;
private metadataCache: Map<string, IFileMetadata> = new Map();
/**
* Creates a new LazyFileLoader
* @param projectRoot - Root directory of the project
*/
constructor(projectRoot: string) {
this.projectRoot = projectRoot;
}
/**
* Scans files in given globs and creates metadata without loading contents
* @param globs - File patterns to scan (e.g., ['ts/**\/*.ts', 'test/**\/*.ts'])
* @returns Array of file metadata
*/
public async scanFiles(globs: string[]): Promise<IFileMetadata[]> {
const metadata: IFileMetadata[] = [];
for (const globPattern of globs) {
try {
const smartFiles = await plugins.smartfile.fs.fileTreeToObject(this.projectRoot, globPattern);
const fileArray = Array.isArray(smartFiles) ? smartFiles : [smartFiles];
for (const smartFile of fileArray) {
try {
const meta = await this.getMetadata(smartFile.path);
metadata.push(meta);
} catch (error) {
// Skip files that can't be read
console.warn(`Failed to get metadata for ${smartFile.path}:`, error.message);
}
}
} catch (error) {
// Skip patterns that don't match any files
console.warn(`No files found for pattern ${globPattern}`);
}
}
return metadata;
}
/**
* Gets metadata for a single file without loading contents
* @param filePath - Absolute path to the file
* @returns File metadata
*/
public async getMetadata(filePath: string): Promise<IFileMetadata> {
// Check cache first
if (this.metadataCache.has(filePath)) {
const cached = this.metadataCache.get(filePath)!;
const currentStats = await fs.promises.stat(filePath);
// Return cached if file hasn't changed
if (cached.mtime === Math.floor(currentStats.mtimeMs)) {
return cached;
}
}
// Get file stats
const stats = await fs.promises.stat(filePath);
const relativePath = plugins.path.relative(this.projectRoot, filePath);
// Estimate tokens: rough estimate of ~4 characters per token
// This is faster than reading and tokenizing the entire file
const estimatedTokens = Math.ceil(stats.size / 4);
const metadata: IFileMetadata = {
path: filePath,
relativePath,
size: stats.size,
mtime: Math.floor(stats.mtimeMs),
estimatedTokens,
};
// Cache the metadata
this.metadataCache.set(filePath, metadata);
return metadata;
}
/**
* Loads file contents for selected files in parallel
* @param metadata - Array of file metadata to load
* @param tokenizer - Function to calculate accurate token count
* @returns Array of complete file info with contents
*/
public async loadFiles(
metadata: IFileMetadata[],
tokenizer: (content: string) => number
): Promise<IFileInfo[]> {
// Load files in parallel
const loadPromises = metadata.map(async (meta) => {
try {
const contents = await plugins.smartfile.fs.toStringSync(meta.path);
const tokenCount = tokenizer(contents);
const fileInfo: IFileInfo = {
path: meta.path,
relativePath: meta.relativePath,
contents,
tokenCount,
importanceScore: meta.importanceScore,
};
return fileInfo;
} catch (error) {
console.warn(`Failed to load file ${meta.path}:`, error.message);
return null;
}
});
// Wait for all loads to complete and filter out failures
const results = await Promise.all(loadPromises);
return results.filter((r): r is IFileInfo => r !== null);
}
/**
* Loads a single file with contents
* @param filePath - Absolute path to the file
* @param tokenizer - Function to calculate accurate token count
* @returns Complete file info with contents
*/
public async loadFile(
filePath: string,
tokenizer: (content: string) => number
): Promise<IFileInfo> {
const meta = await this.getMetadata(filePath);
const contents = await plugins.smartfile.fs.toStringSync(filePath);
const tokenCount = tokenizer(contents);
const relativePath = plugins.path.relative(this.projectRoot, filePath);
return {
path: filePath,
relativePath,
contents,
tokenCount,
importanceScore: meta.importanceScore,
};
}
/**
* Updates importance scores for metadata entries
* @param scores - Map of file paths to importance scores
*/
public updateImportanceScores(scores: Map<string, number>): void {
for (const [path, score] of scores) {
const meta = this.metadataCache.get(path);
if (meta) {
meta.importanceScore = score;
}
}
}
/**
* Clears the metadata cache
*/
public clearCache(): void {
this.metadataCache.clear();
}
/**
* Gets total estimated tokens for all cached metadata
*/
public getTotalEstimatedTokens(): number {
let total = 0;
for (const meta of this.metadataCache.values()) {
total += meta.estimatedTokens;
}
return total;
}
/**
* Gets cached metadata entries
*/
public getCachedMetadata(): IFileMetadata[] {
return Array.from(this.metadataCache.values());
}
}

View File

@@ -1,120 +0,0 @@
import * as plugins from '../plugins.js';
import { IterativeContextBuilder } from './iterative-context-builder.js';
import { ConfigManager } from './config-manager.js';
import type { IIterativeContextResult, TaskType } from './types.js';
/**
* Factory class for creating task-specific context using iterative context building
*/
export class TaskContextFactory {
private projectDir: string;
private configManager: ConfigManager;
private openaiInstance?: any; // OpenAI provider instance
/**
* Create a new TaskContextFactory
* @param projectDirArg The project directory
* @param openaiInstance Optional pre-configured OpenAI provider instance
*/
constructor(projectDirArg: string, openaiInstance?: any) {
this.projectDir = projectDirArg;
this.configManager = ConfigManager.getInstance();
this.openaiInstance = openaiInstance;
}
/**
* Initialize the factory
*/
public async initialize(): Promise<void> {
await this.configManager.initialize(this.projectDir);
}
/**
* Create context for README generation
*/
public async createContextForReadme(): Promise<IIterativeContextResult> {
const iterativeBuilder = new IterativeContextBuilder(
this.projectDir,
this.configManager.getIterativeConfig(),
this.openaiInstance
);
await iterativeBuilder.initialize();
return await iterativeBuilder.buildContextIteratively('readme');
}
/**
* Create context for description generation
*/
public async createContextForDescription(): Promise<IIterativeContextResult> {
const iterativeBuilder = new IterativeContextBuilder(
this.projectDir,
this.configManager.getIterativeConfig(),
this.openaiInstance
);
await iterativeBuilder.initialize();
return await iterativeBuilder.buildContextIteratively('description');
}
/**
* Create context for commit message generation
* @param gitDiff Optional git diff to include in the context
*/
public async createContextForCommit(gitDiff?: string): Promise<IIterativeContextResult> {
const iterativeBuilder = new IterativeContextBuilder(
this.projectDir,
this.configManager.getIterativeConfig(),
this.openaiInstance
);
await iterativeBuilder.initialize();
return await iterativeBuilder.buildContextIteratively('commit', gitDiff);
}
/**
* Create context for any task type
* @param taskType The task type to create context for
* @param additionalContent Optional additional content (currently not used)
*/
public async createContextForTask(
taskType: TaskType,
additionalContent?: string
): Promise<IIterativeContextResult> {
switch (taskType) {
case 'readme':
return this.createContextForReadme();
case 'description':
return this.createContextForDescription();
case 'commit':
return this.createContextForCommit(additionalContent);
default:
// Default to readme for unknown task types
return this.createContextForReadme();
}
}
/**
* Get token stats for all task types
*/
public async getTokenStats(): Promise<Record<TaskType, {
tokenCount: number;
savings: number;
includedFiles: number;
trimmedFiles: number;
excludedFiles: number;
}>> {
const taskTypes: TaskType[] = ['readme', 'description', 'commit'];
const stats: Record<TaskType, any> = {} as any;
for (const taskType of taskTypes) {
const result = await this.createContextForTask(taskType);
stats[taskType] = {
tokenCount: result.tokenCount,
savings: result.tokenSavings,
includedFiles: result.includedFiles.length,
trimmedFiles: result.trimmedFiles.length,
excludedFiles: result.excludedFiles.length
};
}
return stats;
}
}

View File

@@ -1,321 +0,0 @@
/**
* Context processing mode to control how context is built
*/
export type ContextMode = 'full' | 'trimmed' | 'summarized';
/**
* Configuration for context trimming
*/
export interface ITrimConfig {
/** Whether to remove function implementations */
removeImplementations?: boolean;
/** Whether to preserve interface definitions */
preserveInterfaces?: boolean;
/** Whether to preserve type definitions */
preserveTypeDefs?: boolean;
/** Whether to preserve JSDoc comments */
preserveJSDoc?: boolean;
/** Maximum lines to keep for function bodies (if not removing completely) */
maxFunctionLines?: number;
/** Whether to remove normal comments (non-JSDoc) */
removeComments?: boolean;
/** Whether to remove blank lines */
removeBlankLines?: boolean;
}
/**
* Task types that require different context optimization
*/
export type TaskType = 'readme' | 'commit' | 'description';
/**
* Configuration for different tasks
*/
export interface ITaskConfig {
/** The context mode to use for this task */
mode?: ContextMode;
/** File paths to include for this task */
includePaths?: string[];
/** File paths to exclude for this task */
excludePaths?: string[];
/** For commit tasks, whether to focus on changed files */
focusOnChangedFiles?: boolean;
/** For description tasks, whether to include package info */
includePackageInfo?: boolean;
}
/**
* Complete context configuration
*/
export interface IContextConfig {
/** Maximum tokens to use for context */
maxTokens?: number;
/** Default context mode */
defaultMode?: ContextMode;
/** Task-specific settings */
taskSpecificSettings?: {
[key in TaskType]?: ITaskConfig;
};
/** Trimming configuration */
trimming?: ITrimConfig;
/** Cache configuration */
cache?: ICacheConfig;
/** Analyzer configuration */
analyzer?: IAnalyzerConfig;
/** Prioritization weights */
prioritization?: IPrioritizationWeights;
/** Tier configuration for adaptive trimming */
tiers?: ITierConfig;
/** Iterative context building configuration */
iterative?: IIterativeConfig;
}
/**
* Cache configuration
*/
export interface ICacheConfig {
/** Whether caching is enabled */
enabled?: boolean;
/** Time-to-live in seconds */
ttl?: number;
/** Maximum cache size in MB */
maxSize?: number;
/** Cache directory path */
directory?: string;
}
/**
* Analyzer configuration
* Note: Smart analysis is always enabled; this config only controls advanced options
*/
export interface IAnalyzerConfig {
/** Whether to use AI refinement for selection (advanced, disabled by default) */
useAIRefinement?: boolean;
/** AI model to use for refinement */
aiModel?: string;
}
/**
* Weights for file prioritization
*/
export interface IPrioritizationWeights {
/** Weight for dependency centrality */
dependencyWeight?: number;
/** Weight for task relevance */
relevanceWeight?: number;
/** Weight for token efficiency */
efficiencyWeight?: number;
/** Weight for file recency */
recencyWeight?: number;
}
/**
* Tier configuration for adaptive trimming
*/
export interface ITierConfig {
essential?: ITierSettings;
important?: ITierSettings;
optional?: ITierSettings;
}
/**
* Settings for a single tier
*/
export interface ITierSettings {
/** Minimum score to qualify for this tier */
minScore: number;
/** Trimming level to apply */
trimLevel: 'none' | 'light' | 'aggressive';
}
/**
* Basic file information interface
*/
export interface IFileInfo {
/** The file path */
path: string;
/** The file contents */
contents: string;
/** The file's relative path from the project root */
relativePath: string;
/** The estimated token count of the file */
tokenCount?: number;
/** The file's importance score (higher is more important) */
importanceScore?: number;
}
/**
* Result of context building
*/
export interface IContextResult {
/** The generated context string */
context: string;
/** The total token count of the context */
tokenCount: number;
/** Files included in the context */
includedFiles: IFileInfo[];
/** Files that were trimmed */
trimmedFiles: IFileInfo[];
/** Files that were excluded */
excludedFiles: IFileInfo[];
/** Token savings from trimming */
tokenSavings: number;
}
/**
* File metadata without contents (for lazy loading)
*/
export interface IFileMetadata {
/** The file path */
path: string;
/** The file's relative path from the project root */
relativePath: string;
/** File size in bytes */
size: number;
/** Last modified time (Unix timestamp) */
mtime: number;
/** Estimated token count (without loading full contents) */
estimatedTokens: number;
/** The file's importance score */
importanceScore?: number;
}
/**
* Cache entry for a file
*/
export interface ICacheEntry {
/** File path */
path: string;
/** File contents */
contents: string;
/** Token count */
tokenCount: number;
/** Last modified time when cached */
mtime: number;
/** When this cache entry was created */
cachedAt: number;
}
/**
* Dependency information for a file
*/
export interface IFileDependencies {
/** File path */
path: string;
/** Files this file imports */
imports: string[];
/** Files that import this file */
importedBy: string[];
/** Centrality score (0-1) - how central this file is in the dependency graph */
centrality: number;
}
/**
* Analysis result for a file
*/
export interface IFileAnalysis {
/** File path */
path: string;
/** Task relevance score (0-1) */
relevanceScore: number;
/** Dependency centrality score (0-1) */
centralityScore: number;
/** Token efficiency score (0-1) */
efficiencyScore: number;
/** Recency score (0-1) */
recencyScore: number;
/** Combined importance score (0-1) */
importanceScore: number;
/** Assigned tier */
tier: 'essential' | 'important' | 'optional' | 'excluded';
/** Reason for the score */
reason?: string;
}
/**
* Result of context analysis
*/
export interface IAnalysisResult {
/** Task type being analyzed */
taskType: TaskType;
/** Analyzed files with scores */
files: IFileAnalysis[];
/** Dependency graph */
dependencyGraph: Map<string, IFileDependencies>;
/** Total files analyzed */
totalFiles: number;
/** Analysis duration in ms */
analysisDuration: number;
}
/**
* Configuration for iterative context building
*/
export interface IIterativeConfig {
/** Maximum number of iterations allowed */
maxIterations?: number;
/** Maximum files to request in first iteration */
firstPassFileLimit?: number;
/** Maximum files to request in subsequent iterations */
subsequentPassFileLimit?: number;
/** Temperature for AI decision making (0-1) */
temperature?: number;
/** Model to use for iterative decisions */
model?: string;
}
/**
* AI decision for file selection
*/
export interface IFileSelectionDecision {
/** AI's reasoning for file selection */
reasoning: string;
/** File paths to load */
filesToLoad: string[];
/** Estimated tokens needed */
estimatedTokensNeeded?: number;
}
/**
* AI decision for context sufficiency
*/
export interface IContextSufficiencyDecision {
/** Whether context is sufficient */
sufficient: boolean;
/** AI's reasoning */
reasoning: string;
/** Additional files needed (if not sufficient) */
additionalFilesNeeded?: string[];
}
/**
* State for a single iteration
*/
export interface IIterationState {
/** Iteration number (1-based) */
iteration: number;
/** Files loaded in this iteration */
filesLoaded: IFileInfo[];
/** Tokens used in this iteration */
tokensUsed: number;
/** Total tokens used so far */
totalTokensUsed: number;
/** AI decision made in this iteration */
decision: IFileSelectionDecision | IContextSufficiencyDecision;
/** Duration of this iteration in ms */
duration: number;
}
/**
* Result of iterative context building
*/
export interface IIterativeContextResult extends IContextResult {
/** Number of iterations performed */
iterationCount: number;
/** Details of each iteration */
iterations: IIterationState[];
/** Total API calls made */
apiCallCount: number;
/** Total duration in ms */
totalDuration: number;
}

View File

@@ -6,10 +6,12 @@ export { path };
// pushrocks scope
import * as npmextra from '@push.rocks/npmextra';
import * as qenv from '@push.rocks/qenv';
import * as smartagent from '@push.rocks/smartagent';
import * as smartai from '@push.rocks/smartai';
import * as smartcli from '@push.rocks/smartcli';
import * as smartdelay from '@push.rocks/smartdelay';
import * as smartfile from '@push.rocks/smartfile';
import * as smartfs from '@push.rocks/smartfs';
import * as smartgit from '@push.rocks/smartgit';
import * as smartinteract from '@push.rocks/smartinteract';
import * as smartlog from '@push.rocks/smartlog';
@@ -21,10 +23,12 @@ import * as smarttime from '@push.rocks/smarttime';
export {
npmextra,
qenv,
smartagent,
smartai,
smartcli,
smartdelay,
smartfile,
smartfs,
smartgit,
smartinteract,
smartlog,
@@ -34,6 +38,13 @@ export {
smarttime,
};
// Create a shared SmartFs instance for filesystem operations
const smartFsNodeProvider = new smartfs.SmartFsProviderNode();
export const fsInstance = new smartfs.SmartFs(smartFsNodeProvider);
// Create a shared SmartFileFactory for in-memory file operations
export const smartfileFactory = smartfile.SmartFileFactory.nodeFs();
// @git.zone scope
import * as tspublish from '@git.zone/tspublish';
@@ -41,6 +52,5 @@ export { tspublish };
// third party scope
import * as typedoc from 'typedoc';
import * as gptTokenizer from 'gpt-tokenizer';
export { typedoc, gptTokenizer };
export { typedoc };