Compare commits
43 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| fc85f28f69 | |||
| 6b2957b272 | |||
| 883985dbc0 | |||
| 21006b41d0 | |||
| 5d0411a5ba | |||
| 39f5410b76 | |||
| 1a517fdd1b | |||
| 90af6eb1b1 | |||
| 3485392979 | |||
| 89adae2cff | |||
| 3451ab7456 | |||
| bcded1eafa | |||
| 9cae46e2fe | |||
| 65c1df30da | |||
| e8f2add812 | |||
| 8fcc304ee3 | |||
| 69802b46b6 | |||
| e500455557 | |||
| 4029691ccd | |||
| 3b1c84d7e8 | |||
| f8d0895aab | |||
| d7ec2220a1 | |||
| c24ce31b1f | |||
| fec2017cc6 | |||
| 88fac91c79 | |||
| ce4da89da9 | |||
| 6524adea18 | |||
| 4bf0c02618 | |||
| f84a65217d | |||
| 3f22fc91ae | |||
| 11e65b92ec | |||
| 0a3080518f | |||
| d0a4ddbb4b | |||
| 481339d3cb | |||
| ebc3d760af | |||
| a6d678e36c | |||
| 8c3e16a4f2 | |||
| 2276fb0c0c | |||
| 0a9d535df4 | |||
| d46fd1590e | |||
| 1d7317f063 | |||
| fe5121ec9c | |||
| c084b20390 |
129
changelog.md
129
changelog.md
@@ -1,5 +1,134 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 2026-03-11 - 2.0.0 - BREAKING CHANGE(aidoc)
|
||||||
|
migrate agent orchestration to new runAgent API and filesystem tools; refactor model handling and update README and tests
|
||||||
|
|
||||||
|
- Replace DualAgentOrchestrator with plugins.smartagent.runAgent and scoped filesystem tools
|
||||||
|
- Introduce smartagentTools export and use filesystemTool for agents
|
||||||
|
- Replace smartAiInstance with model via plugins.smartai.getModel() and remove previous lifecycle methods (breaking API change)
|
||||||
|
- Normalize agent output property from result to text and standardize log messages (removed emojis)
|
||||||
|
- Update changelog/README/description generation flows to use new agent interface
|
||||||
|
- Bump several devDependencies and dependencies (tsbuild, tstest, @types/node, tspublish, push.rocks packages, typedoc, typescript)
|
||||||
|
- Change test entry to export default tap.start()
|
||||||
|
- Revise README content and structure
|
||||||
|
|
||||||
|
## 2026-01-04 - 1.12.0 - feat(commit)
|
||||||
|
add token budgeting and dynamic diff token calculation to avoid OpenAI context limit issues
|
||||||
|
|
||||||
|
- Introduce TOKEN_BUDGET constants and calculateMaxDiffTokens() in ts/aidocs_classes/commit.ts
|
||||||
|
- Use dynamic maxDiffTokens for DiffProcessor and validate/log warnings when estimated tokens approach limits
|
||||||
|
- Add token budgeting notes to readme.hints.md (guidance for splitting large commits and adjusting overhead)
|
||||||
|
- Bump dependencies/devDependencies: @git.zone/tstest ^3.1.4, @types/node ^25.0.3, @git.zone/tspublish ^1.11.0, @push.rocks/smartfs ^1.3.1
|
||||||
|
|
||||||
|
## 2025-12-16 - 1.11.4 - fix(aidocs_classes)
|
||||||
|
clarify recommendedNextVersionMessage field to require only the description body without the type(scope) prefix
|
||||||
|
|
||||||
|
- Updated inline documentation in ts/aidocs_classes/commit.ts to explicitly state that recommendedNextVersionMessage must be only the description body (example: 'bump dependency to ^1.2.6') and not include the type(scope) prefix.
|
||||||
|
- Removes ambiguity in the example text and improves guidance for commit message generation.
|
||||||
|
|
||||||
|
## 2025-12-15 - 1.11.0 - feat(commit)
|
||||||
|
Integrate DualAgentOrchestrator for commit message generation and improve diff/context handling
|
||||||
|
|
||||||
|
- Add @push.rocks/smartagent dependency and export it from plugins
|
||||||
|
- Use DualAgentOrchestrator to generate and guardian-validate commit messages
|
||||||
|
- Use DualAgentOrchestrator for changelog generation with guardian validation
|
||||||
|
- Switch commit flow to TaskContextFactory and DiffProcessor for token-efficient context
|
||||||
|
- Expose getOpenaiToken() and wire orchestrator with the project OpenAI token
|
||||||
|
- Enhance iterative context builder and context components to better manage token budgets and sampling
|
||||||
|
- Update npmextra.json with release config for @git.zone/cli and reference local smartagent package in package.json
|
||||||
|
|
||||||
|
## 2025-12-02 - 1.10.0 - feat(diff-processor)
|
||||||
|
Improve diff sampling and file prioritization: increase inclusion thresholds, expand sampled context, and boost priority for interface/type and entry-point files
|
||||||
|
|
||||||
|
- Raise small/medium file thresholds used by DiffProcessor (smallFileLines 50 -> 300, mediumFileLines 200 -> 800) so more source files are included fully or summarized rather than treated as large metadata-only files
|
||||||
|
- Increase sample window for medium files (sampleHeadLines/sampleTailLines 20 -> 75) to provide more context when summarizing diffs
|
||||||
|
- Boost importance scoring for interfaces/type files and entry points (adds +20 for interfaces/.types and +15 for index/mod entry files) to prioritize critical API surface in diff processing
|
||||||
|
- Keep other prioritization rules intact (source/test/config/docs/build heuristics), and align the aidoc commit DiffProcessor usage with the new defaults
|
||||||
|
|
||||||
|
## 2025-11-04 - 1.9.2 - fix(deps)
|
||||||
|
Update dependencies and devDependencies to newer versions (bump multiple packages)
|
||||||
|
|
||||||
|
- Bumped devDependencies: @git.zone/tsbuild 2.6.8 -> 2.7.1, @git.zone/tsrun 1.2.46 -> 1.6.2, @git.zone/tstest 2.3.6 -> 2.7.0
|
||||||
|
- Bumped runtime dependencies: @push.rocks/smartai 0.5.11 -> 0.8.0, @push.rocks/smartcli 4.0.11 -> 4.0.19, @push.rocks/smartgit 3.2.1 -> 3.3.1, @push.rocks/smartlog 3.1.9 -> 3.1.10, gpt-tokenizer 3.0.1 -> 3.2.0, typedoc 0.28.12 -> 0.28.14, typescript 5.9.2 -> 5.9.3
|
||||||
|
- No source code changes in this commit; dependency-only updates. Run the test suite and CI to verify compatibility.
|
||||||
|
|
||||||
|
## 2025-11-04 - 1.9.1 - fix(iterative-context-builder)
|
||||||
|
Rely on DiffProcessor for git diff pre-processing; remove raw char truncation, raise diff token safety, and improve logging
|
||||||
|
|
||||||
|
- Removed raw character-based truncation of additionalContext — diffs are expected to be pre-processed by DiffProcessor instead of blind substring truncation.
|
||||||
|
- Now validates pre-processed diff token count only and treats DiffProcessor as the primary sampler (DiffProcessor typically uses a ~100k token budget).
|
||||||
|
- Increased MAX_DIFF_TOKENS safety net to 200,000 to cover edge cases and avoid false positives; updated logs to reflect pre-processed diffs.
|
||||||
|
- Improved error messaging to indicate a likely DiffProcessor misconfiguration when pre-processed diffs exceed the safety limit.
|
||||||
|
- Updated informational logs to state that a pre-processed git diff was added to context.
|
||||||
|
|
||||||
|
## 2025-11-04 - 1.9.0 - feat(context)
|
||||||
|
Add intelligent DiffProcessor to summarize and prioritize git diffs and integrate it into the commit context pipeline
|
||||||
|
|
||||||
|
- Add DiffProcessor (ts/context/diff-processor.ts) to intelligently process git diffs: include small files fully, summarize medium files (head/tail sampling), and mark very large files as metadata-only to stay within token budgets.
|
||||||
|
- Integrate DiffProcessor into commit workflow (ts/aidocs_classes/commit.ts): preprocess raw diffs, emit processed diff statistics, and pass a token-efficient diff section into the TaskContextFactory for commit context generation.
|
||||||
|
- Export DiffProcessor and its types through the context index and types (ts/context/index.ts, ts/context/types.ts) so other context components can reuse it.
|
||||||
|
- Add comprehensive tests for the DiffProcessor behavior and integration (test/test.diffprocessor.node.ts) covering small/medium/large diffs, added/deleted files, prioritization, token budgets, and formatting for context.
|
||||||
|
- Minor adjustments across context/task factories and builders to accept and propagate processed diff strings rather than raw diffs, reducing risk of token overflows during iterative context building.
|
||||||
|
|
||||||
|
## 2025-11-04 - 1.8.3 - fix(context)
|
||||||
|
Prevent enormous git diffs and OOM during context building by adding exclusion patterns, truncation, and diagnostic logging
|
||||||
|
|
||||||
|
- Add comprehensive git diff exclusion globs (locks, build artifacts, maps, bundles, IDE folders, logs, caches) when collecting uncommitted diffs to avoid noisy/huge diffs
|
||||||
|
- Pass glob patterns directly to smartgit.getUncommittedDiff for efficient server-side matching
|
||||||
|
- Emit diagnostic statistics for diffs (files changed, total characters, estimated tokens, number of exclusion patterns) and warn on unusually large diffs
|
||||||
|
- Introduce pre-tokenization safety checks in iterative context builder: truncate raw diff text if it exceeds MAX_DIFF_CHARS and throw a clear error if token count still exceeds MAX_DIFF_TOKENS
|
||||||
|
- Format and log token counts using locale-aware formatting for clarity
|
||||||
|
- Improve robustness of commit context generation to reduce risk of OOM / model-limit overruns
|
||||||
|
|
||||||
|
## 2025-11-03 - 1.8.0 - feat(context)
|
||||||
|
Wire OpenAI provider through task context factory and add git-diff support to iterative context builder
|
||||||
|
|
||||||
|
- Pass AiDoc.openaiInstance through TaskContextFactory into IterativeContextBuilder to reuse the same OpenAI provider and avoid reinitialization.
|
||||||
|
- IterativeContextBuilder now accepts an optional OpenAiProvider and an additionalContext string; when provided, git diffs (or other extra context) are prepended to the AI context and token counts are updated.
|
||||||
|
- createContextForCommit now forwards the git diff into the iterative builder so commit-specific context includes the diff.
|
||||||
|
- Updated aidocs_classes (commit, description, readme) to supply the existing openaiInstance when creating the TaskContextFactory.
|
||||||
|
|
||||||
|
## 2025-11-03 - 1.7.0 - feat(IterativeContextBuilder)
|
||||||
|
Add iterative AI-driven context builder and integrate into task factory; add tests and iterative configuration
|
||||||
|
|
||||||
|
- Introduce IterativeContextBuilder: iterative, token-aware context construction that asks the AI which files to load and evaluates context sufficiency.
|
||||||
|
- Switch TaskContextFactory to use IterativeContextBuilder for readme, description and commit tasks (replaces earlier EnhancedContext flow for these tasks).
|
||||||
|
- Add iterative configuration options (maxIterations, firstPassFileLimit, subsequentPassFileLimit, temperature, model) in types and ConfigManager and merge support for user config.
|
||||||
|
- Update CLI (tokens and aidoc flows) to use the iterative context factory and improve task handling and messaging.
|
||||||
|
- Add test coverage: test/test.iterativecontextbuilder.node.ts to validate initialization, iterative builds, token budget respect and multiple task types.
|
||||||
|
- Enhance ContextCache, LazyFileLoader, ContextAnalyzer and ContextTrimmer to support the iterative pipeline and smarter prioritization/prompts.
|
||||||
|
|
||||||
|
## 2025-11-03 - 1.6.1 - fix(context)
|
||||||
|
Improve context building, caching and test robustness
|
||||||
|
|
||||||
|
- EnhancedContext: refactored smart context building to use the analyzer and TaskContextFactory by default; taskType now defaults to 'description' and task-specific modes are applied.
|
||||||
|
- ConfigManager: simplified analyzer configuration (removed enabled flag) and fixed getAnalyzerConfig fallback shape.
|
||||||
|
- ContextCache: more robust mtime handling and persistence; tests updated to use real file mtimes so cache validation works reliably.
|
||||||
|
- LazyFileLoader: adjusted token estimation tolerance and improved metadata caching behavior.
|
||||||
|
- ContextAnalyzer & trimming pipeline: improved prioritization and trimming integration to better enforce token budgets.
|
||||||
|
- Tests: relaxed strict timing/boolean checks and made assertions more tolerant (toEqual vs toBe) to reduce false negatives.
|
||||||
|
|
||||||
|
## 2025-11-02 - 1.6.0 - feat(context)
|
||||||
|
Introduce smart context system: analyzer, lazy loader, cache and README/docs improvements
|
||||||
|
|
||||||
|
- Add ContextAnalyzer for dependency-based file scoring and prioritization (PageRank-like centrality, relevance, efficiency, recency)
|
||||||
|
- Add LazyFileLoader to scan metadata and load files in parallel with lightweight token estimates
|
||||||
|
- Add ContextCache for persistent file content/token caching with TTL and max-size eviction
|
||||||
|
- Enhance ContextTrimmer with tier-based trimming and configurable light/aggressive levels
|
||||||
|
- Integrate new components into EnhancedContext and TaskContextFactory to build task-aware, token-optimized contexts
|
||||||
|
- Extend ConfigManager and types to support cache, analyzer, prioritization weights and tier configs (npmextra.json driven)
|
||||||
|
- Add comprehensive unit tests for ContextAnalyzer, ContextCache and LazyFileLoader
|
||||||
|
- Update README with Smart Context Building docs, examples, configuration options and CI workflow snippet
|
||||||
|
|
||||||
|
## 2025-09-07 - 1.5.2 - fix(package)
|
||||||
|
Bump dependencies, refine test script and imports, and overhaul README and docs
|
||||||
|
|
||||||
|
- Bumped multiple dependencies and devDependencies (including @git.zone/tspublish, @git.zone/tsbuild, @git.zone/tstest, @push.rocks/npmextra, @push.rocks/qenv, @push.rocks/smartfile, @push.rocks/smartlog, @push.rocks/smartshell, gpt-tokenizer, typedoc, etc.).
|
||||||
|
- Updated test script to run tstest with verbose, logfile and increased timeout; adjusted testCli script invocation.
|
||||||
|
- Fixed test import in test/test.aidoc.nonci.ts to use @git.zone/tstest tapbundle.
|
||||||
|
- Large README rewrite: reorganized and expanded content, added quick start, CLI commands, examples, configuration, troubleshooting and usage sections.
|
||||||
|
- Minor clarification added to commit prompt in ts/aidocs_classes/commit.ts (text cleanup and guidance).
|
||||||
|
|
||||||
## 2025-08-16 - 1.5.1 - fix(aidoc)
|
## 2025-08-16 - 1.5.1 - fix(aidoc)
|
||||||
Bump dependencies, add pnpm workspace config, and add AiDoc.stop()
|
Bump dependencies, add pnpm workspace config, and add AiDoc.stop()
|
||||||
|
|
||||||
|
|||||||
@@ -31,5 +31,14 @@
|
|||||||
},
|
},
|
||||||
"tsdoc": {
|
"tsdoc": {
|
||||||
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
||||||
|
},
|
||||||
|
"@git.zone/cli": {
|
||||||
|
"release": {
|
||||||
|
"registries": [
|
||||||
|
"https://verdaccio.lossless.digital",
|
||||||
|
"https://registry.npmjs.org"
|
||||||
|
],
|
||||||
|
"accessLevel": "public"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
44
package.json
44
package.json
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@git.zone/tsdoc",
|
"name": "@git.zone/tsdoc",
|
||||||
"version": "1.5.1",
|
"version": "2.0.0",
|
||||||
"private": false,
|
"private": false,
|
||||||
"description": "A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.",
|
"description": "A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
@@ -13,37 +13,37 @@
|
|||||||
"tsdoc": "cli.js"
|
"tsdoc": "cli.js"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "(tstest test/) && npm run testCli",
|
"test": "(tstest test/ --verbose --logfile --timeout 600) && npm run testCli",
|
||||||
"testCli": "(node ./cli.ts.js) && (node ./cli.ts.js aidocs)",
|
"testCli": "(node ./cli.ts.js) && (node ./cli.ts.js aidocs)",
|
||||||
"build": "(tsbuild --web --allowimplicitany)",
|
"build": "(tsbuild --web --allowimplicitany)",
|
||||||
"buildDocs": "tsdoc"
|
"buildDocs": "tsdoc"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.6.4",
|
"@git.zone/tsbuild": "^4.3.0",
|
||||||
"@git.zone/tsrun": "^1.2.46",
|
"@git.zone/tsrun": "^2.0.1",
|
||||||
"@git.zone/tstest": "^2.3.2",
|
"@git.zone/tstest": "^3.3.2",
|
||||||
"@push.rocks/tapbundle": "^6.0.3",
|
"@types/node": "^25.4.0"
|
||||||
"@types/node": "^22.15.17"
|
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@git.zone/tspublish": "^1.10.1",
|
"@git.zone/tspublish": "^1.11.2",
|
||||||
"@push.rocks/early": "^4.0.3",
|
"@push.rocks/early": "^4.0.4",
|
||||||
"@push.rocks/npmextra": "^5.3.1",
|
"@push.rocks/npmextra": "^5.3.3",
|
||||||
"@push.rocks/qenv": "^6.1.2",
|
"@push.rocks/qenv": "^6.1.3",
|
||||||
"@push.rocks/smartai": "^0.5.11",
|
"@push.rocks/smartagent": "^3.0.2",
|
||||||
"@push.rocks/smartcli": "^4.0.11",
|
"@push.rocks/smartai": "^2.0.0",
|
||||||
|
"@push.rocks/smartcli": "^4.0.20",
|
||||||
"@push.rocks/smartdelay": "^3.0.5",
|
"@push.rocks/smartdelay": "^3.0.5",
|
||||||
"@push.rocks/smartfile": "^11.2.5",
|
"@push.rocks/smartfile": "^13.1.2",
|
||||||
"@push.rocks/smartgit": "^3.2.1",
|
"@push.rocks/smartfs": "^1.5.0",
|
||||||
"@push.rocks/smartinteract": "^2.0.15",
|
"@push.rocks/smartgit": "^3.3.1",
|
||||||
"@push.rocks/smartlog": "^3.1.8",
|
"@push.rocks/smartinteract": "^2.0.16",
|
||||||
|
"@push.rocks/smartlog": "^3.2.1",
|
||||||
"@push.rocks/smartlog-destination-local": "^9.0.2",
|
"@push.rocks/smartlog-destination-local": "^9.0.2",
|
||||||
"@push.rocks/smartpath": "^6.0.0",
|
"@push.rocks/smartpath": "^6.0.0",
|
||||||
"@push.rocks/smartshell": "^3.2.4",
|
"@push.rocks/smartshell": "^3.3.7",
|
||||||
"@push.rocks/smarttime": "^4.0.6",
|
"@push.rocks/smarttime": "^4.2.3",
|
||||||
"gpt-tokenizer": "^2.9.0",
|
"typedoc": "^0.28.17",
|
||||||
"typedoc": "^0.28.10",
|
"typescript": "^5.9.3"
|
||||||
"typescript": "^5.9.2"
|
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
"ts/**/*",
|
"ts/**/*",
|
||||||
|
|||||||
9168
pnpm-lock.yaml
generated
9168
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -1,5 +0,0 @@
|
|||||||
onlyBuiltDependencies:
|
|
||||||
- esbuild
|
|
||||||
- mongodb-memory-server
|
|
||||||
- puppeteer
|
|
||||||
- sharp
|
|
||||||
@@ -2,4 +2,13 @@
|
|||||||
* alternatively can be used through npx, if installed locally
|
* alternatively can be used through npx, if installed locally
|
||||||
* cli parameters are concluded from ./ts/cli.ts
|
* cli parameters are concluded from ./ts/cli.ts
|
||||||
* this module is not intended for API use.
|
* this module is not intended for API use.
|
||||||
* Read carefully through the TypeScript files. Don't make stuff up.
|
* Read carefully through the TypeScript files. Don't make stuff up.
|
||||||
|
|
||||||
|
## Token Budgeting (commit.ts)
|
||||||
|
* OpenAI has a 272,000 token context limit
|
||||||
|
* The smartagent infrastructure adds ~180,000 tokens of overhead (system messages, tool descriptions, conversation history)
|
||||||
|
* TOKEN_BUDGET constants in commit.ts control the available tokens for diff content
|
||||||
|
* Dynamic calculation: 272K - 10K (safety) - 180K (overhead) - 2K (prompt) = 80K tokens for diff
|
||||||
|
* If token limit errors occur, consider:
|
||||||
|
- Splitting large commits into smaller ones
|
||||||
|
- Adjusting SMARTAGENT_OVERHEAD if actual overhead is different
|
||||||
811
readme.md
811
readme.md
@@ -1,730 +1,219 @@
|
|||||||
# @git.zone/tsdoc
|
# @git.zone/tsdoc
|
||||||
An advanced TypeScript documentation tool using AI to generate and enhance documentation for TypeScript projects.
|
|
||||||
|
AI-Powered Documentation for TypeScript Projects
|
||||||
|
|
||||||
|
## Issue Reporting and Security
|
||||||
|
|
||||||
|
For reporting bugs, issues, or security vulnerabilities, please visit [community.foss.global/](https://community.foss.global/). This is the central community hub for all issue reporting. Developers who sign and comply with our contribution agreement and go through identification can also get a [code.foss.global/](https://code.foss.global/) account to submit Pull Requests directly.
|
||||||
|
|
||||||
## Install
|
## Install
|
||||||
|
|
||||||
To install @git.zone/tsdoc, you have two options. You can install it globally so that the CLI commands are available throughout your system, or you can use it with npx if you prefer to keep the installation local to your project.
|
|
||||||
|
|
||||||
### Global Installation
|
|
||||||
|
|
||||||
To install the tool globally, run the following command in your terminal:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
npm install -g @git.zone/tsdoc
|
# Global installation (recommended for CLI usage)
|
||||||
|
pnpm add -g @git.zone/tsdoc
|
||||||
|
|
||||||
|
# Or use with npx
|
||||||
|
npx @git.zone/tsdoc
|
||||||
|
|
||||||
|
# Or install locally as a dependency
|
||||||
|
pnpm add @git.zone/tsdoc
|
||||||
```
|
```
|
||||||
|
|
||||||
Installing globally ensures that the CLI commands (such as tsdoc, tsdoc typedoc, tsdoc aidoc, etc.) are available anywhere on your machine without the need to refer to the local node_modules folder.
|
|
||||||
|
|
||||||
### Usage with npx
|
|
||||||
|
|
||||||
If you prefer not to install the tool globally, you can invoke it using npx directly from your project. This method works well if you intend to use the tool on a per-project basis:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
npx @git.zone/tsdoc <command>
|
|
||||||
```
|
|
||||||
|
|
||||||
In the commands below, you will see how to use the various functionalities that @git.zone/tsdoc provides for generating intricate and enhanced documentation for your TypeScript projects.
|
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
The @git.zone/tsdoc module provides a very rich and interactive CLI interface together with a set of programmable classes that let you integrate documentation generation into your build processes or workflows. This section will walk you through every aspect of the module—from its basic CLI commands to its advanced internal API usage. All examples provided below use ESM syntax with TypeScript and are designed to be comprehensive. Every code snippet is written so you can easily copy, paste, and adapt to your project. The following guide is divided into several sections covering every major feature, tool integration, and customization options available in the module.
|
`@git.zone/tsdoc` is a comprehensive TypeScript documentation tool that combines traditional TypeDoc API documentation generation with AI-powered documentation workflows. It uses OpenAI models via the Vercel AI SDK to generate READMEs, project descriptions, keywords, and semantic commit messages by intelligently exploring your project with agentic tool use.
|
||||||
|
|
||||||
-------------------------------------------------------------------
|
### CLI Commands
|
||||||
### Overview and Core Concepts
|
|
||||||
|
|
||||||
At its heart, @git.zone/tsdoc is a CLI tool that blends classic documentation generation (using libraries such as TypeDoc) with AI-enhanced techniques. The tool reads your project files, uses a context builder to optimize file content based on token usage and configurable trimming strategies, and then leverages an AI engine to generate enhanced documentation. This complete solution is designed to integrate smoothly into your project pipeline.
|
| Command | Description |
|
||||||
|
|---------|-------------|
|
||||||
|
| `tsdoc` | Auto-detects project type and runs TypeDoc |
|
||||||
|
| `tsdoc aidoc` | Generates AI-powered README + description/keywords |
|
||||||
|
| `tsdoc readme` | Generates AI-powered README only |
|
||||||
|
| `tsdoc description` | Generates AI-powered description and keywords only |
|
||||||
|
| `tsdoc commit` | Generates a semantic commit message from uncommitted changes |
|
||||||
|
| `tsdoc typedoc` | Generates traditional TypeDoc API documentation |
|
||||||
|
|
||||||
Key features include:
|
### Generating AI-Powered Documentation
|
||||||
- **Auto-detection of documentation format**: The CLI attempts to determine the best documentation strategy for your project.
|
|
||||||
- **Support for TypeDoc generation**: Build TypeDoc-compatible documentation directly.
|
|
||||||
- **AI-Enhanced Documentation (AiDoc)**: Generate a README and project description using artificial intelligence that analyzes your project’s code and context.
|
|
||||||
- **Plugin Integration**: The module leverages a variety of plugins (smartfile, smartgit, smartcli, smartai, etc.) to streamline tasks such as file manipulation, CLI interaction, shell command execution, and logging.
|
|
||||||
- **Context Trimming and Optimization**: To manage token usage (especially for AI input), the module includes advanced context-building strategies that trim and summarize code files intelligently.
|
|
||||||
- **Robust Internal API**: While the primary user interface is through the CLI, the underlying classes (AiDoc, TypeDoc, Readme, etc.) can be used to build custom integrations or extend the tool’s functionality.
|
|
||||||
|
|
||||||
Below, you will find detailed explanations along with ESM/TypeScript code examples for all core use cases.
|
The `aidoc` command combines README generation and description/keyword generation in one step:
|
||||||
|
|
||||||
-------------------------------------------------------------------
|
|
||||||
### Command-Line Interface (CLI) Usage
|
|
||||||
|
|
||||||
The most common way to interact with @git.zone/tsdoc is via its command-line interface (CLI). The CLI is designed to auto-detect your project’s context and trigger the appropriate commands based on your needs. Below is a guide on how to use the CLI commands.
|
|
||||||
|
|
||||||
#### Basic Invocation
|
|
||||||
|
|
||||||
When you run the command without any arguments, the tool attempts to determine the appropriate documentation generation mode:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
tsdoc
|
|
||||||
```
|
|
||||||
|
|
||||||
This will scan the project directory and attempt to detect whether your project follows a TypeDoc convention or if it would benefit from an AI-enhanced documentation build. The auto-detection logic uses the project context (for example, the presence of a ts directory or specific configuration files) to decide the best course of action.
|
|
||||||
|
|
||||||
##### Example Scenario
|
|
||||||
|
|
||||||
Imagine you have a TypeScript project with the following structure:
|
|
||||||
|
|
||||||
├── package.json
|
|
||||||
├── ts/
|
|
||||||
│ └── index.ts
|
|
||||||
└── readme.hints.md
|
|
||||||
|
|
||||||
When you execute:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
tsdoc
|
|
||||||
```
|
|
||||||
|
|
||||||
The tool will analyze the project directory, recognizing the ts/ folder, and it will route the command to the appropriate documentation generator, such as the TypeDoc generator if it detects valid structure.
|
|
||||||
|
|
||||||
#### TypeDoc Command
|
|
||||||
|
|
||||||
For projects that require a traditional documentation format, you can explicitly generate documentation using the TypeDoc integration:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
tsdoc typedoc --publicSubdir docs
|
|
||||||
```
|
|
||||||
|
|
||||||
This command instructs the module to generate HTML documentation using TypeDoc, placing the output into a public directory (or a custom subdirectory as specified).
|
|
||||||
|
|
||||||
**Inside a TypeScript file, this command can be mirrored by calling the TypeDoc class directly:**
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { TypeDoc } from '@git.zone/tsdoc';
|
|
||||||
import * as path from 'path';
|
|
||||||
|
|
||||||
const cwd = process.cwd();
|
|
||||||
const typeDocInstance = new TypeDoc(cwd);
|
|
||||||
|
|
||||||
const compileDocumentation = async (): Promise<void> => {
|
|
||||||
try {
|
|
||||||
// Specify the output subdirectory for documentation
|
|
||||||
await typeDocInstance.compile({
|
|
||||||
publicSubdir: 'docs'
|
|
||||||
});
|
|
||||||
console.log('Documentation successfully generated using TypeDoc.');
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error generating documentation with TypeDoc:', error);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
compileDocumentation();
|
|
||||||
```
|
|
||||||
|
|
||||||
In this example, the script creates an instance of the TypeDoc class passing the current working directory. The compile method is then called with an options object, indicating that the public subdirectory should be named “docs.” The method spawns a shell command using the smart shell plugin to execute the TypeDoc binary.
|
|
||||||
|
|
||||||
#### AI-Enhanced Documentation Command
|
|
||||||
|
|
||||||
One of the standout features of this module is its AI-enhanced documentation capabilities. The `aidoc` command integrates with an OpenAI interface to produce a more contextual and detailed README and project description. This is particularly useful when your project codebase has evolved and requires documentation updates based on the current source code.
|
|
||||||
|
|
||||||
To run the AI-enhanced documentation generation:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
# In your project root
|
||||||
tsdoc aidoc
|
tsdoc aidoc
|
||||||
```
|
```
|
||||||
|
|
||||||
In an ESM/TypeScript project, you can use the AiDoc class to programmatically run the same functionality:
|
This will:
|
||||||
|
1. Analyze your codebase using an AI agent with filesystem access
|
||||||
|
2. Generate a comprehensive `readme.md`
|
||||||
|
3. Update `package.json` and `npmextra.json` with an AI-generated description and keywords
|
||||||
|
|
||||||
```typescript
|
You can also run these separately:
|
||||||
import { AiDoc } from '@git.zone/tsdoc';
|
|
||||||
|
|
||||||
const buildEnhancedDocs = async (): Promise<void> => {
|
|
||||||
const aiDoc = new AiDoc({ OPENAI_TOKEN: 'your-openai-token' });
|
|
||||||
try {
|
|
||||||
// Start the AI interface; this internally checks if the token is valid and persists it
|
|
||||||
await aiDoc.start();
|
|
||||||
|
|
||||||
// Build the README file for the project directory
|
|
||||||
console.log('Generating README file using AI...');
|
|
||||||
await aiDoc.buildReadme(process.cwd());
|
|
||||||
|
|
||||||
// Build a new project description based on the codebase
|
|
||||||
console.log('Generating updated project description...');
|
|
||||||
await aiDoc.buildDescription(process.cwd());
|
|
||||||
|
|
||||||
console.log('AI-enhanced documentation generated successfully.');
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Failed to generate AI-enhanced documentation:', error);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
buildEnhancedDocs();
|
|
||||||
```
|
|
||||||
|
|
||||||
In the above snippet, we import the AiDoc class and create an instance with an OpenAI token. The methods start(), buildReadme(), and buildDescription() streamline the process of generating enhanced documentation by leveraging the underlying AI engine. This code example should serve as a blueprint for those wishing to integrate AI-driven documentation updates as part of their CI/CD pipelines.
|
|
||||||
|
|
||||||
#### Testing Your Documentation Setup
|
|
||||||
|
|
||||||
Before you commit changes to your project documentation, it is often worthwhile to run tests to ensure that your documentation generation process is behaving as expected. The module includes a `test` command:
|
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
tsdoc test
|
# Generate only the README
|
||||||
|
tsdoc readme
|
||||||
|
|
||||||
|
# Generate only the description and keywords
|
||||||
|
tsdoc description
|
||||||
```
|
```
|
||||||
|
|
||||||
This command verifies that all components (CLI commands, TypeDoc compilation, AI integration, etc.) are properly configured.
|
### Generating Commit Messages
|
||||||
|
|
||||||
Here is an example test script written in TypeScript using a test bundle:
|
The `commit` command analyzes your uncommitted changes and produces a structured commit object:
|
||||||
|
|
||||||
```typescript
|
```bash
|
||||||
import { expect, tap } from '@push.rocks/tapbundle';
|
tsdoc commit
|
||||||
import { AiDoc } from '@git.zone/tsdoc';
|
|
||||||
|
|
||||||
tap.test('AiDoc instance creation', async () => {
|
|
||||||
const aidoc = new AiDoc({ OPENAI_TOKEN: 'dummy-token' });
|
|
||||||
expect(aidoc).toBeInstanceOf(AiDoc);
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('Running AI documentation generation', async () => {
|
|
||||||
const aidoc = new AiDoc({ OPENAI_TOKEN: 'dummy-token' });
|
|
||||||
await aidoc.start();
|
|
||||||
|
|
||||||
// Attempt buildReadme and buildDescription synchronously for test coverage
|
|
||||||
await aidoc.buildReadme(process.cwd());
|
|
||||||
await aidoc.buildDescription(process.cwd());
|
|
||||||
|
|
||||||
// If no errors are thrown, we assume the process works as expected
|
|
||||||
expect(true).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.start();
|
|
||||||
```
|
```
|
||||||
|
|
||||||
This test script demonstrates how to automate the validation process by using the provided AiDoc class. Using a testing framework like tap ensures that your documentation generation remains robust even as new features are added or as the project evolves.
|
Output is a JSON object following conventional commits:
|
||||||
|
|
||||||
-------------------------------------------------------------------
|
```json
|
||||||
### Advanced Usage Scenarios
|
{
|
||||||
|
"recommendedNextVersionLevel": "feat",
|
||||||
|
"recommendedNextVersionScope": "core",
|
||||||
|
"recommendedNextVersionMessage": "add new feature for better documentation",
|
||||||
|
"recommendedNextVersionDetails": [
|
||||||
|
"implemented X",
|
||||||
|
"refactored Y"
|
||||||
|
],
|
||||||
|
"recommendedNextVersion": "1.13.0",
|
||||||
|
"changelog": "# Changelog\n\n## 2026-03-11 - 1.13.0 - core\n..."
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
Beyond using the CLI, @git.zone/tsdoc provides various classes and plugins that allow you to deeply integrate documentation generation within your project. The following sections document advanced usage scenarios where you programmatically interact with different components.
|
The commit command includes intelligent diff processing that:
|
||||||
|
- Excludes lock files, build artifacts, IDE directories, and caches from the diff
|
||||||
|
- Prioritizes source files over build artifacts
|
||||||
|
- Samples large diffs with head/tail extraction to stay within token budgets
|
||||||
|
- Automatically generates or updates the changelog
|
||||||
|
|
||||||
#### 1. Deep Dive into AiDoc Functionality
|
### Generating TypeDoc
|
||||||
|
|
||||||
The AiDoc class is the core of the AI-enhanced documentation generation. It manages interactions with the OpenAI API, handles token validations, and integrates with project-specific configurations.
|
For traditional API documentation via TypeDoc:
|
||||||
|
|
||||||
Consider the following advanced usage example:
|
```bash
|
||||||
|
# Generate to default ./public directory
|
||||||
|
tsdoc typedoc
|
||||||
|
|
||||||
|
# Generate to a specific subdirectory
|
||||||
|
tsdoc typedoc --publicSubdir docs
|
||||||
|
```
|
||||||
|
|
||||||
|
### Monorepo Support
|
||||||
|
|
||||||
|
When generating READMEs, tsdoc automatically detects monorepo submodules via `@git.zone/tspublish` conventions. Each submodule directory with a `tspublish.json` gets its own generated README.
|
||||||
|
|
||||||
|
### Programmatic API
|
||||||
|
|
||||||
|
You can also use tsdoc programmatically:
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
import { AiDoc } from '@git.zone/tsdoc';
|
import { AiDoc } from '@git.zone/tsdoc';
|
||||||
import * as path from 'path';
|
|
||||||
|
|
||||||
const generateProjectDocs = async () => {
|
const aidoc = new AiDoc();
|
||||||
// Create an instance of the AiDoc class with a configuration object
|
await aidoc.start(); // Initializes the AI model (prompts for OpenAI token if needed)
|
||||||
// that includes your OpenAI token. This token will be used to query the AI.
|
|
||||||
const aiDoc = new AiDoc({ OPENAI_TOKEN: 'your-openai-token' });
|
|
||||||
|
|
||||||
// Initialize the AI documentation system.
|
// Generate a README
|
||||||
await aiDoc.start();
|
await aidoc.buildReadme('/path/to/project');
|
||||||
|
|
||||||
// Build the README file for the current project.
|
// Generate description and keywords
|
||||||
console.log('Building README for the project...');
|
await aidoc.buildDescription('/path/to/project');
|
||||||
await aiDoc.buildReadme(process.cwd());
|
|
||||||
|
|
||||||
// Build an updated project description based on the analysis of the source files.
|
// Generate a commit message object
|
||||||
console.log('Building project description...');
|
const commitObj = await aidoc.buildNextCommitObject('/path/to/project');
|
||||||
await aiDoc.buildDescription(process.cwd());
|
console.log(commitObj);
|
||||||
|
|
||||||
// You can also generate a commit message based on code changes by using the next commit object generation.
|
// Get project context information
|
||||||
try {
|
const context = await aidoc.getProjectContext('/path/to/project');
|
||||||
console.log('Generating commit message based on your project changes...');
|
|
||||||
const nextCommit = await aiDoc.buildNextCommitObject(process.cwd());
|
|
||||||
console.log('Next commit message object:', nextCommit);
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error generating commit message:', error);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
generateProjectDocs();
|
// Get token count for a project
|
||||||
|
const tokenCount = await aidoc.getProjectContextTokenCount('/path/to/project');
|
||||||
|
|
||||||
|
// Estimate tokens in arbitrary text
|
||||||
|
const tokens = aidoc.countTokens('some text here');
|
||||||
|
|
||||||
|
await aidoc.stop();
|
||||||
```
|
```
|
||||||
|
|
||||||
In this example, the AiDoc class handles multiple tasks:
|
## Configuration
|
||||||
|
|
||||||
- It starts by validating and printing the sanitized token.
|
### OpenAI Token
|
||||||
- It generates and writes the README file based on dynamic analysis.
|
|
||||||
- It updates the project description stored in your configuration files.
|
|
||||||
- It even integrates with Git to produce a suggested commit message that factors in the current state of the project directory.
|
|
||||||
|
|
||||||
Internally, methods such as buildReadme() interact with the ProjectContext class to gather files and determine the relevant context. This context is trimmed and processed based on token budgets, thus ensuring that the AI interface only receives the information it can effectively process.
|
An OpenAI API key is required for all AI features. It can be provided in three ways:
|
||||||
|
|
||||||
#### 2. Interacting with the TypeDoc Class Programmatically
|
1. **Environment variable**: `OPENAI_TOKEN`
|
||||||
|
2. **Interactive prompt**: On first run, tsdoc will prompt for the token and persist it
|
||||||
|
3. **Constructor argument**: Pass `{ OPENAI_TOKEN: 'sk-...' }` to `new AiDoc()`
|
||||||
|
|
||||||
The TypeDoc class does not merely wrap the standard TypeDoc tool. It adds a layer of automation by preparing the TypeScript environment, generating a temporary tsconfig file, and invoking TypeDoc with the proper configuration. You can use this functionality to conditionally generate documentation or integrate it into your build steps.
|
The token is persisted at `~/.npmextra/kv/@git.zone/tsdoc.json` for subsequent runs.
|
||||||
|
|
||||||
Below is another example demonstrating the integration:
|
### npmextra.json
|
||||||
|
|
||||||
```typescript
|
tsdoc uses `npmextra.json` for project metadata. The `tsdoc` key holds legal information that gets appended to generated READMEs:
|
||||||
import { TypeDoc } from '@git.zone/tsdoc';
|
|
||||||
import * as path from 'path';
|
|
||||||
|
|
||||||
const generateTypeDocDocs = async () => {
|
```json
|
||||||
// Assume you are in the root directory of your TypeScript project
|
{
|
||||||
const cwd = process.cwd();
|
"tsdoc": {
|
||||||
|
"legal": "\n## License and Legal Information\n\n..."
|
||||||
// Create an instance of the TypeDoc class
|
},
|
||||||
const typeDocInstance = new TypeDoc(cwd);
|
"gitzone": {
|
||||||
|
"module": {
|
||||||
// Prepare additional options if necessary (e.g., setting a public subdirectory for docs)
|
"githost": "gitlab.com",
|
||||||
const options = { publicSubdir: 'documentation' };
|
"gitscope": "gitzone",
|
||||||
|
"gitrepo": "tsdoc",
|
||||||
// Compile your TypeScript project documentation.
|
"npmPackagename": "@git.zone/tsdoc",
|
||||||
// The compile method handles creating the tsconfig file, running the shell command, and cleaning up afterward.
|
"description": "...",
|
||||||
try {
|
"keywords": ["..."]
|
||||||
console.log('Compiling TypeScript documentation using TypeDoc...');
|
|
||||||
await typeDocInstance.compile(options);
|
|
||||||
console.log('Documentation generated at:', path.join(cwd, 'public', 'documentation'));
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error compiling TypeDoc documentation:', error);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
generateTypeDocDocs();
|
|
||||||
```
|
|
||||||
|
|
||||||
This script clearly demonstrates how TypeDoc automation is structured inside the module. By invoking the compile() method, the class takes care of setting directory paths, preparing command arguments, and executing the underlying TypeDoc binary using the smart shell plugin.
|
|
||||||
|
|
||||||
#### 3. Customizing Context Building
|
|
||||||
|
|
||||||
One of the critical functionalities within @git.zone/tsdoc is its ability to build a smart context for documentation generation. The module not only collects file content from your project (like package.json, readme.hints.md, and other source files) but also intelligently trims and summarizes these contents to fit within token limits for AI processing.
|
|
||||||
|
|
||||||
Consider the following deep-dive example into the context building process:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { EnhancedContext } from '@git.zone/tsdoc';
|
|
||||||
import { ConfigManager } from '@git.zone/tsdoc/dist/ts/context/config-manager.js';
|
|
||||||
|
|
||||||
const buildProjectContext = async () => {
|
|
||||||
const projectDir = process.cwd();
|
|
||||||
|
|
||||||
// Create an instance of the EnhancedContext class to optimize file content for AI use.
|
|
||||||
const enhancedContext = new EnhancedContext(projectDir);
|
|
||||||
|
|
||||||
// Initialize the context builder. This ensures that configuration (e.g., token budgets, trimming options) is loaded.
|
|
||||||
await enhancedContext.initialize();
|
|
||||||
|
|
||||||
// Optionally, you can choose to set a custom token budget and context mode.
|
|
||||||
enhancedContext.setTokenBudget(100000); // for example, limit tokens to 100K
|
|
||||||
enhancedContext.setContextMode('trimmed');
|
|
||||||
|
|
||||||
// Build the context string from selected files in your project.
|
|
||||||
const contextResult = await enhancedContext.buildContext('readme');
|
|
||||||
|
|
||||||
console.log('Context generated with token count:', contextResult.tokenCount);
|
|
||||||
console.log('Token savings due to trimming:', contextResult.tokenSavings);
|
|
||||||
|
|
||||||
// The context string includes file boundaries and token information.
|
|
||||||
console.log('Generated Context:', contextResult.context);
|
|
||||||
};
|
|
||||||
|
|
||||||
buildProjectContext();
|
|
||||||
```
|
|
||||||
|
|
||||||
In this example:
|
|
||||||
- The EnhancedContext class is initialized with the project directory.
|
|
||||||
- Configuration is loaded via the ConfigManager, which reads parameters from npmextra.json.
|
|
||||||
- The context builder then gathers files (such as package.json, readme hints, TypeScript sources, etc.), trims unnecessary content, and builds a context string.
|
|
||||||
- Finally, it prints the overall token counts and savings, giving you valuable feedback on how the context was optimized for the AI input.
|
|
||||||
|
|
||||||
This detailed context-building mechanism is essential for managing large TypeScript projects. It ensures that the AI engine can process relevant code data without being overwhelmed by too many tokens.
|
|
||||||
|
|
||||||
#### 4. Working with the Readme Class for Automatic README Generation
|
|
||||||
|
|
||||||
The Readme class in @git.zone/tsdoc takes the AI-enhanced documentation further by not only generating a project-level README but also iterating over submodules within your project. This ensures that every published module has its own complete, AI-generated README.
|
|
||||||
|
|
||||||
Here is an advanced example demonstrating how to trigger README generation for the main project and its submodules:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { AiDoc } from '@git.zone/tsdoc';
|
|
||||||
import * as path from 'path';
|
|
||||||
import { logger } from '@git.zone/tsdoc/dist/ts/logging.js';
|
|
||||||
import { readFileSync } from 'fs';
|
|
||||||
|
|
||||||
const buildProjectReadme = async () => {
|
|
||||||
const projectDir = process.cwd();
|
|
||||||
|
|
||||||
// Create an instance of the AiDoc class to handle AI-enhanced docs generation
|
|
||||||
const aiDoc = new AiDoc({ OPENAI_TOKEN: 'your-openai-token' });
|
|
||||||
|
|
||||||
// Start the AI interface
|
|
||||||
await aiDoc.start();
|
|
||||||
|
|
||||||
// Build the primary README for the project directory
|
|
||||||
console.log('Generating primary README...');
|
|
||||||
await aiDoc.buildReadme(projectDir);
|
|
||||||
|
|
||||||
// Logging function to verify submodule processing
|
|
||||||
logger.log('info', `Primary README generated in ${projectDir}`);
|
|
||||||
|
|
||||||
// Assume that submodules are organized in distinct directories.
|
|
||||||
// Here we simulate the process of scanning subdirectories and triggering README generation for each.
|
|
||||||
const subModules = ['submodule1', 'submodule2'];
|
|
||||||
|
|
||||||
// Loop through each submodule directory to generate its README.
|
|
||||||
for (const subModule of subModules) {
|
|
||||||
const subModuleDir = path.join(projectDir, subModule);
|
|
||||||
logger.log('info', `Generating README for submodule: ${subModule}`);
|
|
||||||
|
|
||||||
// Each submodule README is generated independently.
|
|
||||||
await aiDoc.buildReadme(subModuleDir);
|
|
||||||
|
|
||||||
// Optionally, read the generated README content for verification.
|
|
||||||
const readmePath = path.join(subModuleDir, 'readme.md');
|
|
||||||
try {
|
|
||||||
const readmeContent = readFileSync(readmePath, 'utf8');
|
|
||||||
logger.log('info', `Generated README for ${subModule}:\n${readmeContent.substring(0, 200)}...`);
|
|
||||||
} catch (error) {
|
|
||||||
logger.log('error', `Failed to read README for ${subModule}: ${error}`);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
|
|
||||||
buildProjectReadme();
|
|
||||||
```
|
```
|
||||||
|
|
||||||
In this example, the script:
|
## Architecture
|
||||||
- Starts by building the AI-enhanced README for the entire project.
|
|
||||||
- Then iterates over a list of submodule directories and generates READMEs for each.
|
|
||||||
- Uses the logging utility to provide immediate feedback on the generation process.
|
|
||||||
- Optionally, reads back a snippet of the generated file to verify successful documentation generation.
|
|
||||||
|
|
||||||
This approach ensures that projects with multiple submodules or packages maintain a consistent and high-quality documentation standard across every component.
|
### Core Components
|
||||||
|
|
||||||
-------------------------------------------------------------------
|
```
|
||||||
### Plugin-Based Architecture and Integrations
|
@git.zone/tsdoc
|
||||||
|
├── AiDoc # Main orchestrator - manages AI model and delegates to task classes
|
||||||
Under the hood, @git.zone/tsdoc leverages a number of smaller, focused plugins that extend its functionality. These plugins facilitate file system operations, shell command execution, environment variable management, and logging. The modular design makes it easy to extend or customize the tool according to your needs.
|
├── TypeDoc # Traditional TypeDoc API documentation generation
|
||||||
|
├── ProjectContext # Gathers project files for context (package.json, ts/, test/)
|
||||||
The relevant plugins include:
|
├── DiffProcessor # Intelligent git diff processing with prioritization and sampling
|
||||||
- smartai: Provides the API integration with OpenAI.
|
├── Readme # AI-powered README generation using runAgent with filesystem tools
|
||||||
- smartcli: Handles CLI input parsing and command setup.
|
├── Commit # AI-powered commit message generation with diff analysis
|
||||||
- smartdelay: Manages asynchronous delays and debouncing.
|
├── Description # AI-powered description and keyword generation
|
||||||
- smartfile: Offers an abstraction over file I/O operations.
|
└── CLI # Command-line interface built on @push.rocks/smartcli
|
||||||
- smartgit: Facilitates integration with git repositories (e.g., retrieving diffs, commit status).
|
|
||||||
- smartinteract: Eases interaction with the user (prompting for tokens, confirming actions).
|
|
||||||
- smartlog and smartlogDestinationLocal: Provide comprehensive logging mechanisms.
|
|
||||||
- smartpath, smartshell, and smarttime: Manage file paths, execute shell commands, and process time data respectively.
|
|
||||||
|
|
||||||
Below is a sample snippet illustrating how you might directly interact with a few of these plugins to, for example, run a custom shell command or log events:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import * as plugins from '@git.zone/tsdoc/dist/ts/plugins.js';
|
|
||||||
import { logger } from '@git.zone/tsdoc/dist/ts/logging.js';
|
|
||||||
|
|
||||||
const runCustomCommand = async () => {
|
|
||||||
// Create an instance of the smart shell utility
|
|
||||||
const smartshellInstance = new plugins.smartshell.Smartshell({
|
|
||||||
executor: 'bash',
|
|
||||||
pathDirectories: [plugins.smartpath.join(process.cwd(), 'node_modules/.bin')]
|
|
||||||
});
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Execute a sample shell command, e.g., listing files in the current directory
|
|
||||||
const output = await smartshellInstance.exec('ls -la');
|
|
||||||
logger.log('info', `Shell command output:\n${output}`);
|
|
||||||
} catch (error) {
|
|
||||||
logger.log('error', 'Error executing custom shell command:', error);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
runCustomCommand();
|
|
||||||
```
|
```
|
||||||
|
|
||||||
This example shows how to:
|
### AI Agent Architecture
|
||||||
- Import the necessary plugins.
|
|
||||||
- Set up a smartshell instance by specifying the shell executor and the directories where executables are located.
|
|
||||||
- Execute a command (in this case, listing directory contents) and log the results using the provided logging plugin.
|
|
||||||
|
|
||||||
Such examples demonstrate the flexibility provided by the module’s internal API. They also illustrate that even if you choose not to use the CLI, you can still leverage the @git.zone/tsdoc functionality programmatically in a highly integrated fashion.
|
tsdoc uses `@push.rocks/smartagent`'s `runAgent()` function with `@push.rocks/smartai`'s `getModel()` for all AI tasks. Each documentation task (readme, commit, description) runs an autonomous AI agent that:
|
||||||
|
|
||||||
-------------------------------------------------------------------
|
1. Receives a system prompt defining its role and constraints
|
||||||
### Handling Git Commit Messages with AI
|
2. Gets access to scoped filesystem tools (read-only, limited to project directory)
|
||||||
|
3. Explores the project structure autonomously using tool calls
|
||||||
|
4. Produces the final output (README markdown, commit JSON, or description JSON)
|
||||||
|
|
||||||
A unique feature of this tool is its capacity to assist with creating smart commit messages based on code changes. The Commit class (found within the aidocs_classes directory) ties together output from smartgit and AiDoc to suggest commit messages that are both descriptive and formatted according to conventional commit guidelines.
|
### Diff Processing
|
||||||
|
|
||||||
Consider this example where you generate a commit message based on the diff from your git repository:
|
The `DiffProcessor` class handles large git diffs intelligently:
|
||||||
|
|
||||||
```typescript
|
- **Small files** (< 300 lines): Included in full
|
||||||
import { AiDoc } from '@git.zone/tsdoc';
|
- **Medium files** (< 800 lines): Head/tail sampling with context
|
||||||
|
- **Large files**: Metadata only (filepath, lines added/removed)
|
||||||
|
- Files are prioritized by importance: source > test > config > docs > build artifacts
|
||||||
|
- Token budget is enforced dynamically based on OpenAI's context limits
|
||||||
|
|
||||||
const generateCommitMessage = async () => {
|
## Requirements
|
||||||
// Create an instance of AiDoc
|
|
||||||
const aiDoc = new AiDoc({ OPENAI_TOKEN: 'your-openai-token' });
|
|
||||||
|
|
||||||
// Initialize the AI service
|
- **Node.js** >= 18.0.0
|
||||||
await aiDoc.start();
|
- **TypeScript** project with `ts/` source directory
|
||||||
|
- **OpenAI API key** (for AI features)
|
||||||
try {
|
|
||||||
// Generate and retrieve the next commit object based on the uncommitted changes in the repository.
|
|
||||||
const commitObject = await aiDoc.buildNextCommitObject(process.cwd());
|
|
||||||
console.log('Recommended commit object:', commitObject);
|
|
||||||
|
|
||||||
// The commit object is structured with the following fields:
|
|
||||||
// - recommendedNextVersionLevel: Indicates whether the commit is a fix, feature, or breaking change.
|
|
||||||
// - recommendedNextVersionScope: The scope of changes.
|
|
||||||
// - recommendedNextVersionMessage: A short commit message.
|
|
||||||
// - recommendedNextVersionDetails: A list of details explaining the changes.
|
|
||||||
// - recommendedNextVersion: A computed version string.
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error generating commit message:', error);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
generateCommitMessage();
|
|
||||||
```
|
|
||||||
|
|
||||||
The process works as follows:
|
|
||||||
1. The AiDoc instance is created and started.
|
|
||||||
2. The tool uses the smartgit plugin to fetch uncommitted changes from the repository.
|
|
||||||
3. It then builds a context string incorporating file diffs and project metadata.
|
|
||||||
4. Finally, the OpenAI API is queried to produce a commit message formatted as JSON. This JSON object is parsed and can be used directly in your git workflow.
|
|
||||||
|
|
||||||
This advanced integration assists teams in maintaining consistent commit message standards while reducing the manual burden of summarizing code changes.
|
|
||||||
|
|
||||||
-------------------------------------------------------------------
|
|
||||||
### Detailed Explanation of Internal Mechanics
|
|
||||||
|
|
||||||
If you are curious about the intricate inner workings of @git.zone/tsdoc and wish to extend or debug its behavior, here is an in-depth explanation of some internal mechanisms.
|
|
||||||
|
|
||||||
#### Context Trimming Strategy
|
|
||||||
|
|
||||||
Managing token count is critical when interfacing with APIs that have strict limits. The module uses a multi-step process:
|
|
||||||
- It gathers various files (such as package.json, ts files, readme hints).
|
|
||||||
- It sorts the files and calculates the token count using the GPT tokenizer.
|
|
||||||
- It applies trimming strategies such as removing function implementations or comments in TypeScript files, based on a configurable set of parameters.
|
|
||||||
- Finally, it constructs a unified context string that includes file boundaries for clarity.
|
|
||||||
|
|
||||||
For example, the ContextTrimmer class carries out these transformations:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { ContextTrimmer } from '@git.zone/tsdoc/dist/ts/context/context-trimmer.js';
|
|
||||||
|
|
||||||
const trimFileContent = (filePath: string, content: string): string => {
|
|
||||||
// Create an instance of ContextTrimmer with default configuration
|
|
||||||
const trimmer = new ContextTrimmer({
|
|
||||||
removeImplementations: true,
|
|
||||||
preserveJSDoc: true,
|
|
||||||
maxFunctionLines: 5,
|
|
||||||
removeComments: true,
|
|
||||||
removeBlankLines: true
|
|
||||||
});
|
|
||||||
|
|
||||||
// Trim the content based on the file type and configured options
|
|
||||||
const trimmedContent = trimmer.trimFile(filePath, content, 'trimmed');
|
|
||||||
return trimmedContent;
|
|
||||||
};
|
|
||||||
|
|
||||||
// Example usage with a TypeScript file
|
|
||||||
const tsFileContent = `
|
|
||||||
/**
|
|
||||||
* This function calculates the sum of two numbers.
|
|
||||||
*/
|
|
||||||
export const add = (a: number, b: number): number => {
|
|
||||||
// Calculation logic
|
|
||||||
return a + b;
|
|
||||||
};
|
|
||||||
`;
|
|
||||||
|
|
||||||
const trimmedTSContent = trimFileContent('src/math.ts', tsFileContent);
|
|
||||||
console.log('Trimmed TypeScript File Content:\n', trimmedTSContent);
|
|
||||||
```
|
|
||||||
|
|
||||||
This process helps in reducing the number of tokens before sending the data to the AI API while preserving the essential context needed for documentation generation.
|
|
||||||
|
|
||||||
#### Dynamic Configuration Management
|
|
||||||
|
|
||||||
The module’s configuration is stored in the npmextra.json file and includes settings for context building, trimming strategies, and task-specific options. The ConfigManager class reads these settings and merges them with default values. This dynamic configuration system ensures that the behavior of the documentation tool can be easily adjusted without altering the source code.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { ConfigManager } from '@git.zone/tsdoc/dist/ts/context/config-manager.js';
|
|
||||||
|
|
||||||
const updateDocumentationConfig = async () => {
|
|
||||||
const projectDir = process.cwd();
|
|
||||||
const configManager = ConfigManager.getInstance();
|
|
||||||
|
|
||||||
// Initialize the configuration manager with the project directory
|
|
||||||
await configManager.initialize(projectDir);
|
|
||||||
|
|
||||||
// Retrieve the current configuration
|
|
||||||
let currentConfig = configManager.getConfig();
|
|
||||||
console.log('Current context configuration:', currentConfig);
|
|
||||||
|
|
||||||
// If you want to change some parameters (e.g., maxTokens), update and then save the new configuration
|
|
||||||
const newConfig = { maxTokens: 150000 };
|
|
||||||
await configManager.updateConfig(newConfig);
|
|
||||||
|
|
||||||
console.log('Configuration updated successfully.');
|
|
||||||
};
|
|
||||||
|
|
||||||
updateDocumentationConfig();
|
|
||||||
```
|
|
||||||
|
|
||||||
In this snippet, the ConfigManager:
|
|
||||||
- Loads current configuration from npmextra.json.
|
|
||||||
- Allows updates to specific keys (such as token limits).
|
|
||||||
- Persists these changes back to the file system using the smartfile plugin.
|
|
||||||
|
|
||||||
#### Logging and Diagnostic Output
|
|
||||||
|
|
||||||
Throughout its execution, @git.zone/tsdoc logs important information such as token counts, file statistics, and shell command outputs. This logging is accomplished through a combination of the smartlog and smartlogDestinationLocal plugins. The following example illustrates how logging can help diagnose execution issues:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { logger } from '@git.zone/tsdoc/dist/ts/logging.js';
|
|
||||||
|
|
||||||
const logDiagnosticInfo = () => {
|
|
||||||
logger.log('info', 'Starting documentation generation process...');
|
|
||||||
|
|
||||||
// Log additional contextual information
|
|
||||||
logger.log('debug', 'Project directory:', process.cwd());
|
|
||||||
logger.log('debug', 'Token budget set for context building:', 150000);
|
|
||||||
|
|
||||||
// Simulate a long-running process
|
|
||||||
setTimeout(() => {
|
|
||||||
logger.log('info', 'Documentation generation process completed successfully.');
|
|
||||||
}, 2000);
|
|
||||||
};
|
|
||||||
|
|
||||||
logDiagnosticInfo();
|
|
||||||
```
|
|
||||||
|
|
||||||
Using comprehensive logging, the tool provides feedback not only during normal execution but also in error scenarios, allowing developers to troubleshoot and optimize their documentation generation workflow.
|
|
||||||
|
|
||||||
-------------------------------------------------------------------
|
|
||||||
### Integrating @git.zone/tsdoc into a Continuous Integration Pipeline
|
|
||||||
|
|
||||||
For teams looking to integrate documentation generation into their CI processes, @git.zone/tsdoc can be harnessed by scripting the CLI commands or by embedding the class-based API directly into your build scripts. Here’s an example of a CI script written in TypeScript that runs as part of a GitHub Action or similar workflow:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { runCli } from '@git.zone/tsdoc';
|
|
||||||
import { logger } from '@git.zone/tsdoc/dist/ts/logging.js';
|
|
||||||
|
|
||||||
const runDocumentationPipeline = async () => {
|
|
||||||
try {
|
|
||||||
logger.log('info', 'Starting the documentation pipeline...');
|
|
||||||
|
|
||||||
// Run the CLI which automatically detects the project context and generates docs.
|
|
||||||
await runCli();
|
|
||||||
|
|
||||||
logger.log('info', 'Documentation pipeline completed successfully.');
|
|
||||||
} catch (error) {
|
|
||||||
logger.log('error', 'Documentation pipeline encountered an error:', error);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
runDocumentationPipeline();
|
|
||||||
```
|
|
||||||
|
|
||||||
In a CI environment, you can invoke this script to ensure that documentation is generated or updated as part of your deployment process. The process includes building the README, updating project descriptions, and generating TypeDoc documentation if the project structure warrants it.
|
|
||||||
|
|
||||||
-------------------------------------------------------------------
|
|
||||||
### Comprehensive Workflow Example
|
|
||||||
|
|
||||||
Below is a full-fledged example that combines many of the above functionalities into a single workflow. This script is intended to be run as part of your build process or as a standalone command, and it demonstrates how to initialize all parts of the module, generate documentation for the main project and its submodules, update configuration, and log key diagnostics.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { AiDoc } from '@git.zone/tsdoc';
|
|
||||||
import { TypeDoc } from '@git.zone/tsdoc';
|
|
||||||
import { ConfigManager } from '@git.zone/tsdoc/dist/ts/context/config-manager.js';
|
|
||||||
import { EnhancedContext } from '@git.zone/tsdoc/dist/ts/context/enhanced-context.js';
|
|
||||||
import * as path from 'path';
|
|
||||||
import { logger } from '@git.zone/tsdoc/dist/ts/logging.js';
|
|
||||||
|
|
||||||
const runFullDocumentationWorkflow = async () => {
|
|
||||||
const projectDir = process.cwd();
|
|
||||||
|
|
||||||
// Initialize configuration management
|
|
||||||
const configManager = ConfigManager.getInstance();
|
|
||||||
await configManager.initialize(projectDir);
|
|
||||||
logger.log('info', `Loaded configuration for project at ${projectDir}`);
|
|
||||||
|
|
||||||
// Step 1: Generate conventional TypeDoc documentation
|
|
||||||
const typeDocInstance = new TypeDoc(projectDir);
|
|
||||||
try {
|
|
||||||
logger.log('info', 'Starting TypeDoc documentation generation...');
|
|
||||||
await typeDocInstance.compile({ publicSubdir: 'docs' });
|
|
||||||
logger.log('info', `TypeDoc documentation generated in ${path.join(projectDir, 'public', 'docs')}`);
|
|
||||||
} catch (error) {
|
|
||||||
logger.log('error', 'Error during TypeDoc generation:', error);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 2: Run AI-enhanced documentation generation
|
|
||||||
const aiDoc = new AiDoc({ OPENAI_TOKEN: 'your-openai-token' });
|
|
||||||
await aiDoc.start();
|
|
||||||
|
|
||||||
// Generate main README and updated project description
|
|
||||||
try {
|
|
||||||
logger.log('info', 'Generating main README via AI-enhanced documentation...');
|
|
||||||
await aiDoc.buildReadme(projectDir);
|
|
||||||
logger.log('info', 'Main README generated successfully.');
|
|
||||||
|
|
||||||
logger.log('info', 'Generating updated project description...');
|
|
||||||
await aiDoc.buildDescription(projectDir);
|
|
||||||
logger.log('info', 'Project description updated successfully.');
|
|
||||||
} catch (error) {
|
|
||||||
logger.log('error', 'Error generating AI-enhanced documentation:', error);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 3: Generate contextual data using EnhancedContext
|
|
||||||
const enhancedContext = new EnhancedContext(projectDir);
|
|
||||||
await enhancedContext.initialize();
|
|
||||||
enhancedContext.setContextMode('trimmed');
|
|
||||||
enhancedContext.setTokenBudget(150000);
|
|
||||||
|
|
||||||
const contextResult = await enhancedContext.buildContext('readme');
|
|
||||||
logger.log('info', `Context built successfully. Total tokens: ${contextResult.tokenCount}. Savings: ${contextResult.tokenSavings}`);
|
|
||||||
|
|
||||||
// Step 4: Process submodules (if any) and generate READMEs
|
|
||||||
const subModules = ['submodule1', 'submodule2'];
|
|
||||||
for (const subModule of subModules) {
|
|
||||||
const subModuleDir = path.join(projectDir, subModule);
|
|
||||||
logger.log('info', `Processing submodule: ${subModule}`);
|
|
||||||
try {
|
|
||||||
await aiDoc.buildReadme(subModuleDir);
|
|
||||||
logger.log('info', `Submodule README generated for ${subModule}`);
|
|
||||||
} catch (error) {
|
|
||||||
logger.log('error', `Failed to generate README for ${subModule}:`, error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Optional: Generate a commit message suggestion based on current changes
|
|
||||||
try {
|
|
||||||
logger.log('info', 'Generating commit message suggestion...');
|
|
||||||
const commitObject = await aiDoc.buildNextCommitObject(projectDir);
|
|
||||||
logger.log('info', 'Suggested commit message object:', commitObject);
|
|
||||||
} catch (error) {
|
|
||||||
logger.log('error', 'Error generating commit message suggestion:', error);
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.log('info', 'Full documentation workflow completed successfully.');
|
|
||||||
};
|
|
||||||
|
|
||||||
runFullDocumentationWorkflow();
|
|
||||||
```
|
|
||||||
|
|
||||||
This comprehensive workflow showcases the integration of various facets of the @git.zone/tsdoc module:
|
|
||||||
- Loading and updating configuration via the ConfigManager.
|
|
||||||
- Generating static documentation using TypeDoc.
|
|
||||||
- Enhancing documentation with AI through the AiDoc class.
|
|
||||||
- Optimizing project context with the EnhancedContext class.
|
|
||||||
- Iterating over submodules to ensure all parts of your project are documented.
|
|
||||||
- Providing useful diagnostic logging for every step.
|
|
||||||
|
|
||||||
-------------------------------------------------------------------
|
|
||||||
### Wrapping Up the Usage Guide
|
|
||||||
|
|
||||||
The examples provided above demonstrate that @git.zone/tsdoc is not simply a CLI tool—it is a complete documentation framework designed to adapt to your workflow. Whether you are a developer looking to automate documentation updates in your CI pipeline or a team seeking an AI-powered enhancement for your project metadata, this module offers a wide range of interfaces and hooks for you to leverage.
|
|
||||||
|
|
||||||
Key takeaways:
|
|
||||||
- The CLI handles most routine tasks automatically while also exposing commands for specific documentation generation strategies.
|
|
||||||
- Programmatic usage allows deep integration with your project’s build and commit processes.
|
|
||||||
- The internal architecture—built on plugins, context optimization, and extensive logging—ensures that the tool can scale with project complexity.
|
|
||||||
- Advanced users can customize context trimming, file inclusion rules, and even modify AI queries to better suit their project’s needs.
|
|
||||||
|
|
||||||
Each code example provided here is written using modern ESM syntax and TypeScript to ensure compatibility with current development practices. Since the module is designed with extensibility in mind, developers are encouraged to explore the source code (especially the classes in the ts/ and ts/aidocs_classes directories) for further customization opportunities.
|
|
||||||
|
|
||||||
By integrating @git.zone/tsdoc into your workflow, you ensure that your project documentation remains accurate, comprehensive, and reflective of your latest code changes—whether you are generating a simple README or a complex API documentation set enhanced by AI insights.
|
|
||||||
|
|
||||||
Happy documenting!
|
|
||||||
|
|
||||||
## License and Legal Information
|
## License and Legal Information
|
||||||
|
|
||||||
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
||||||
|
|
||||||
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
@@ -734,7 +223,7 @@ This project is owned and maintained by Task Venture Capital GmbH. The names and
|
|||||||
|
|
||||||
### Company Information
|
### Company Information
|
||||||
|
|
||||||
Task Venture Capital GmbH
|
Task Venture Capital GmbH
|
||||||
Registered at District court Bremen HRB 35230 HB, Germany
|
Registered at District court Bremen HRB 35230 HB, Germany
|
||||||
|
|
||||||
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
||||||
|
|||||||
@@ -1,4 +1,4 @@
|
|||||||
import { tap, expect } from '@push.rocks/tapbundle';
|
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||||
import * as qenv from '@push.rocks/qenv';
|
import * as qenv from '@push.rocks/qenv';
|
||||||
let testQenv = new qenv.Qenv('./', '.nogit/');
|
let testQenv = new qenv.Qenv('./', '.nogit/');
|
||||||
|
|
||||||
@@ -33,7 +33,10 @@ tap.test('should build commit object', async () => {
|
|||||||
expect(commitObject).toHaveProperty('recommendedNextVersionLevel');
|
expect(commitObject).toHaveProperty('recommendedNextVersionLevel');
|
||||||
expect(commitObject).toHaveProperty('recommendedNextVersionScope');
|
expect(commitObject).toHaveProperty('recommendedNextVersionScope');
|
||||||
expect(commitObject).toHaveProperty('recommendedNextVersionMessage');
|
expect(commitObject).toHaveProperty('recommendedNextVersionMessage');
|
||||||
|
});
|
||||||
|
|
||||||
})
|
tap.test('should stop AIdocs', async () => {
|
||||||
|
await aidocs.stop();
|
||||||
|
});
|
||||||
|
|
||||||
tap.start();
|
export default tap.start();
|
||||||
|
|||||||
304
test/test.diffprocessor.node.ts
Normal file
304
test/test.diffprocessor.node.ts
Normal file
@@ -0,0 +1,304 @@
|
|||||||
|
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||||
|
import { DiffProcessor } from '../ts/classes.diffprocessor.js';
|
||||||
|
|
||||||
|
// Sample diff strings for testing
|
||||||
|
const createSmallDiff = (filepath: string, addedLines = 5, removedLines = 3): string => {
|
||||||
|
const lines: string[] = [];
|
||||||
|
lines.push(`--- a/${filepath}`);
|
||||||
|
lines.push(`+++ b/${filepath}`);
|
||||||
|
lines.push(`@@ -1,10 +1,12 @@`);
|
||||||
|
|
||||||
|
for (let i = 0; i < removedLines; i++) {
|
||||||
|
lines.push(`-removed line ${i + 1}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < addedLines; i++) {
|
||||||
|
lines.push(`+added line ${i + 1}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
lines.push(' unchanged line');
|
||||||
|
|
||||||
|
return lines.join('\n');
|
||||||
|
};
|
||||||
|
|
||||||
|
const createMediumDiff = (filepath: string): string => {
|
||||||
|
const lines: string[] = [];
|
||||||
|
lines.push(`--- a/${filepath}`);
|
||||||
|
lines.push(`+++ b/${filepath}`);
|
||||||
|
lines.push(`@@ -1,100 +1,150 @@`);
|
||||||
|
|
||||||
|
// 150 lines of changes
|
||||||
|
for (let i = 0; i < 75; i++) {
|
||||||
|
lines.push(`+added line ${i + 1}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < 75; i++) {
|
||||||
|
lines.push(`-removed line ${i + 1}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return lines.join('\n');
|
||||||
|
};
|
||||||
|
|
||||||
|
const createLargeDiff = (filepath: string): string => {
|
||||||
|
const lines: string[] = [];
|
||||||
|
lines.push(`--- a/${filepath}`);
|
||||||
|
lines.push(`+++ b/${filepath}`);
|
||||||
|
lines.push(`@@ -1,1000 +1,1500 @@`);
|
||||||
|
|
||||||
|
// 2500 lines of changes
|
||||||
|
for (let i = 0; i < 1250; i++) {
|
||||||
|
lines.push(`+added line ${i + 1}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < 1250; i++) {
|
||||||
|
lines.push(`-removed line ${i + 1}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return lines.join('\n');
|
||||||
|
};
|
||||||
|
|
||||||
|
const createDeletedFileDiff = (filepath: string): string => {
|
||||||
|
return `--- a/${filepath}
|
||||||
|
+++ /dev/null
|
||||||
|
@@ -1,5 +0,0 @@
|
||||||
|
-deleted line 1
|
||||||
|
-deleted line 2
|
||||||
|
-deleted line 3
|
||||||
|
-deleted line 4
|
||||||
|
-deleted line 5`;
|
||||||
|
};
|
||||||
|
|
||||||
|
const createAddedFileDiff = (filepath: string): string => {
|
||||||
|
return `--- /dev/null
|
||||||
|
+++ b/${filepath}
|
||||||
|
@@ -0,0 +1,5 @@
|
||||||
|
+added line 1
|
||||||
|
+added line 2
|
||||||
|
+added line 3
|
||||||
|
+added line 4
|
||||||
|
+added line 5`;
|
||||||
|
};
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should parse small diff correctly', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const smallDiff = createSmallDiff('src/test.ts', 5, 3);
|
||||||
|
|
||||||
|
const result = processor.processDiffs([smallDiff]);
|
||||||
|
|
||||||
|
expect(result.totalFiles).toEqual(1);
|
||||||
|
expect(result.fullDiffs.length).toEqual(1);
|
||||||
|
expect(result.summarizedDiffs.length).toEqual(0);
|
||||||
|
expect(result.metadataOnly.length).toEqual(0);
|
||||||
|
expect(result.totalTokens).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should summarize medium diff', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const mediumDiff = createMediumDiff('src/medium-file.ts');
|
||||||
|
|
||||||
|
const result = processor.processDiffs([mediumDiff]);
|
||||||
|
|
||||||
|
expect(result.totalFiles).toEqual(1);
|
||||||
|
expect(result.fullDiffs.length).toEqual(0);
|
||||||
|
expect(result.summarizedDiffs.length).toEqual(1);
|
||||||
|
expect(result.metadataOnly.length).toEqual(0);
|
||||||
|
|
||||||
|
// Verify the summarized diff contains the sample
|
||||||
|
const formatted = processor.formatForContext(result);
|
||||||
|
expect(formatted).toInclude('SUMMARIZED DIFFS');
|
||||||
|
expect(formatted).toInclude('lines omitted');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should handle large diff as metadata only', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const largeDiff = createLargeDiff('dist/bundle.js');
|
||||||
|
|
||||||
|
const result = processor.processDiffs([largeDiff]);
|
||||||
|
|
||||||
|
expect(result.totalFiles).toEqual(1);
|
||||||
|
expect(result.fullDiffs.length).toEqual(0);
|
||||||
|
expect(result.summarizedDiffs.length).toEqual(0);
|
||||||
|
expect(result.metadataOnly.length).toEqual(1);
|
||||||
|
|
||||||
|
const formatted = processor.formatForContext(result);
|
||||||
|
expect(formatted).toInclude('METADATA ONLY');
|
||||||
|
expect(formatted).toInclude('dist/bundle.js');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should prioritize source files over build artifacts', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const diffs = [
|
||||||
|
createSmallDiff('dist/bundle.js'),
|
||||||
|
createSmallDiff('src/important.ts'),
|
||||||
|
createSmallDiff('build/output.js'),
|
||||||
|
createSmallDiff('src/core.ts'),
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = processor.processDiffs(diffs);
|
||||||
|
|
||||||
|
expect(result.totalFiles).toEqual(4);
|
||||||
|
|
||||||
|
// Source files should be included fully first
|
||||||
|
const formatted = processor.formatForContext(result);
|
||||||
|
const srcImportantIndex = formatted.indexOf('src/important.ts');
|
||||||
|
const srcCoreIndex = formatted.indexOf('src/core.ts');
|
||||||
|
const distBundleIndex = formatted.indexOf('dist/bundle.js');
|
||||||
|
const buildOutputIndex = formatted.indexOf('build/output.js');
|
||||||
|
|
||||||
|
// Source files should appear before build artifacts
|
||||||
|
expect(srcImportantIndex).toBeLessThan(distBundleIndex);
|
||||||
|
expect(srcCoreIndex).toBeLessThan(buildOutputIndex);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should respect token budget', async () => {
|
||||||
|
const processor = new DiffProcessor({
|
||||||
|
maxDiffTokens: 500, // Very small budget to force metadata-only
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create multiple large diffs that will exceed budget
|
||||||
|
const diffs = [
|
||||||
|
createLargeDiff('src/file1.ts'),
|
||||||
|
createLargeDiff('src/file2.ts'),
|
||||||
|
createLargeDiff('src/file3.ts'),
|
||||||
|
createLargeDiff('src/file4.ts'),
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = processor.processDiffs(diffs);
|
||||||
|
|
||||||
|
expect(result.totalTokens).toBeLessThanOrEqual(500);
|
||||||
|
// With such a small budget and large files, most should be metadata only
|
||||||
|
expect(result.metadataOnly.length).toBeGreaterThanOrEqual(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should handle deleted files', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const deletedDiff = createDeletedFileDiff('src/old-file.ts');
|
||||||
|
|
||||||
|
const result = processor.processDiffs([deletedDiff]);
|
||||||
|
|
||||||
|
expect(result.totalFiles).toEqual(1);
|
||||||
|
// Small deleted file should be included fully
|
||||||
|
expect(result.fullDiffs.length).toEqual(1);
|
||||||
|
|
||||||
|
const formatted = processor.formatForContext(result);
|
||||||
|
expect(formatted).toInclude('src/old-file.ts');
|
||||||
|
// Verify the file appears in the output
|
||||||
|
expect(formatted).toInclude('FULL DIFFS');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should handle added files', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const addedDiff = createAddedFileDiff('src/new-file.ts');
|
||||||
|
|
||||||
|
const result = processor.processDiffs([addedDiff]);
|
||||||
|
|
||||||
|
expect(result.totalFiles).toEqual(1);
|
||||||
|
// Small added file should be included fully
|
||||||
|
expect(result.fullDiffs.length).toEqual(1);
|
||||||
|
|
||||||
|
const formatted = processor.formatForContext(result);
|
||||||
|
expect(formatted).toInclude('src/new-file.ts');
|
||||||
|
// Verify the file appears in the output
|
||||||
|
expect(formatted).toInclude('FULL DIFFS');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should handle mixed file sizes', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const diffs = [
|
||||||
|
createSmallDiff('src/small.ts'),
|
||||||
|
createMediumDiff('src/medium.ts'),
|
||||||
|
createLargeDiff('dist/large.js'),
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = processor.processDiffs(diffs);
|
||||||
|
|
||||||
|
expect(result.totalFiles).toEqual(3);
|
||||||
|
expect(result.fullDiffs.length).toEqual(1); // small file
|
||||||
|
expect(result.summarizedDiffs.length).toEqual(1); // medium file
|
||||||
|
expect(result.metadataOnly.length).toEqual(1); // large file
|
||||||
|
|
||||||
|
const formatted = processor.formatForContext(result);
|
||||||
|
expect(formatted).toInclude('FULL DIFFS (1 files)');
|
||||||
|
expect(formatted).toInclude('SUMMARIZED DIFFS (1 files)');
|
||||||
|
expect(formatted).toInclude('METADATA ONLY (1 files)');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should handle empty diff array', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const result = processor.processDiffs([]);
|
||||||
|
|
||||||
|
expect(result.totalFiles).toEqual(0);
|
||||||
|
expect(result.fullDiffs.length).toEqual(0);
|
||||||
|
expect(result.summarizedDiffs.length).toEqual(0);
|
||||||
|
expect(result.metadataOnly.length).toEqual(0);
|
||||||
|
expect(result.totalTokens).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should generate comprehensive summary', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const diffs = [
|
||||||
|
createSmallDiff('src/file1.ts'),
|
||||||
|
createSmallDiff('src/file2.ts'),
|
||||||
|
createMediumDiff('src/file3.ts'),
|
||||||
|
createLargeDiff('dist/bundle.js'),
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = processor.processDiffs(diffs);
|
||||||
|
const formatted = processor.formatForContext(result);
|
||||||
|
|
||||||
|
expect(formatted).toInclude('GIT DIFF SUMMARY');
|
||||||
|
expect(formatted).toInclude('Files changed: 4 total');
|
||||||
|
expect(formatted).toInclude('included in full');
|
||||||
|
expect(formatted).toInclude('summarized');
|
||||||
|
expect(formatted).toInclude('metadata only');
|
||||||
|
expect(formatted).toInclude('Estimated tokens:');
|
||||||
|
expect(formatted).toInclude('END OF GIT DIFF');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should handle custom options', async () => {
|
||||||
|
const processor = new DiffProcessor({
|
||||||
|
maxDiffTokens: 50000,
|
||||||
|
smallFileLines: 30,
|
||||||
|
mediumFileLines: 150,
|
||||||
|
sampleHeadLines: 10,
|
||||||
|
sampleTailLines: 10,
|
||||||
|
});
|
||||||
|
|
||||||
|
const mediumDiff = createMediumDiff('src/file.ts'); // 150 lines
|
||||||
|
const result = processor.processDiffs([mediumDiff]);
|
||||||
|
|
||||||
|
// With custom settings, this should be summarized (exactly at the mediumFileLines threshold)
|
||||||
|
expect(result.summarizedDiffs.length).toEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should prioritize test files appropriately', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const diffs = [
|
||||||
|
createSmallDiff('src/core.ts'),
|
||||||
|
createSmallDiff('test/core.test.ts'),
|
||||||
|
createSmallDiff('config.json'),
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = processor.processDiffs(diffs);
|
||||||
|
const formatted = processor.formatForContext(result);
|
||||||
|
|
||||||
|
// Source files should come before test files
|
||||||
|
const srcIndex = formatted.indexOf('src/core.ts');
|
||||||
|
const testIndex = formatted.indexOf('test/core.test.ts');
|
||||||
|
|
||||||
|
expect(srcIndex).toBeLessThan(testIndex);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should handle files with no changes gracefully', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const emptyDiff = `--- a/src/file.ts
|
||||||
|
+++ b/src/file.ts
|
||||||
|
@@ -1,1 +1,1 @@`;
|
||||||
|
|
||||||
|
const result = processor.processDiffs([emptyDiff]);
|
||||||
|
|
||||||
|
expect(result.totalFiles).toEqual(1);
|
||||||
|
expect(result.fullDiffs.length).toEqual(1); // Still included as a small file
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
import { expect, tap } from '@push.rocks/tapbundle';
|
|
||||||
import * as tsdoc from '../ts/index.js';
|
|
||||||
|
|
||||||
tap.test('first test', async () => {
|
|
||||||
console.log('test');
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.start();
|
|
||||||
@@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@git.zone/tsdoc',
|
name: '@git.zone/tsdoc',
|
||||||
version: '1.5.1',
|
version: '2.0.0',
|
||||||
description: 'A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.'
|
description: 'A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.'
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,27 @@
|
|||||||
import * as plugins from '../plugins.js';
|
import * as plugins from '../plugins.js';
|
||||||
import { AiDoc } from '../classes.aidoc.js';
|
import { AiDoc } from '../classes.aidoc.js';
|
||||||
import { ProjectContext } from './projectcontext.js';
|
import { ProjectContext } from './projectcontext.js';
|
||||||
|
import { DiffProcessor } from '../classes.diffprocessor.js';
|
||||||
|
import { logger } from '../logging.js';
|
||||||
|
|
||||||
|
// Token budget configuration for OpenAI API limits
|
||||||
|
const TOKEN_BUDGET = {
|
||||||
|
OPENAI_CONTEXT_LIMIT: 272000, // OpenAI's configured limit
|
||||||
|
SAFETY_MARGIN: 10000, // Buffer to avoid hitting exact limit
|
||||||
|
SMARTAGENT_OVERHEAD: 180000, // System msgs, tools, history, formatting
|
||||||
|
TASK_PROMPT_OVERHEAD: 2000, // Task prompt template size
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate max tokens available for diff content based on total budget
|
||||||
|
*/
|
||||||
|
function calculateMaxDiffTokens(): number {
|
||||||
|
const available = TOKEN_BUDGET.OPENAI_CONTEXT_LIMIT
|
||||||
|
- TOKEN_BUDGET.SAFETY_MARGIN
|
||||||
|
- TOKEN_BUDGET.SMARTAGENT_OVERHEAD
|
||||||
|
- TOKEN_BUDGET.TASK_PROMPT_OVERHEAD;
|
||||||
|
return Math.max(available, 30000);
|
||||||
|
}
|
||||||
|
|
||||||
export interface INextCommitObject {
|
export interface INextCommitObject {
|
||||||
recommendedNextVersionLevel: 'fix' | 'feat' | 'BREAKING CHANGE'; // the recommended next version level of the project
|
recommendedNextVersionLevel: 'fix' | 'feat' | 'BREAKING CHANGE'; // the recommended next version level of the project
|
||||||
@@ -27,105 +48,237 @@ export class Commit {
|
|||||||
smartgitInstance,
|
smartgitInstance,
|
||||||
this.projectDir
|
this.projectDir
|
||||||
);
|
);
|
||||||
const diffStringArray = await gitRepo.getUncommittedDiff([
|
|
||||||
|
// Define comprehensive exclusion patterns
|
||||||
|
// smartgit@3.3.0+ supports glob patterns natively
|
||||||
|
const excludePatterns = [
|
||||||
|
// Lock files
|
||||||
'pnpm-lock.yaml',
|
'pnpm-lock.yaml',
|
||||||
'package-lock.json',
|
'package-lock.json',
|
||||||
]);
|
'npm-shrinkwrap.json',
|
||||||
// Use the new TaskContextFactory for optimized context
|
'yarn.lock',
|
||||||
const taskContextFactory = new (await import('../context/index.js')).TaskContextFactory(this.projectDir);
|
'deno.lock',
|
||||||
await taskContextFactory.initialize();
|
'bun.lockb',
|
||||||
|
|
||||||
// Generate context specifically for commit task
|
// Build artifacts (main culprit for large diffs!)
|
||||||
const contextResult = await taskContextFactory.createContextForCommit(
|
'dist/**',
|
||||||
diffStringArray[0] ? diffStringArray.join('\n\n') : 'No changes.'
|
'dist_*/**', // dist_ts, dist_web, etc.
|
||||||
);
|
'build/**',
|
||||||
|
'.next/**',
|
||||||
// Get the optimized context string
|
'out/**',
|
||||||
let contextString = contextResult.context;
|
'public/dist/**',
|
||||||
|
|
||||||
// Log token usage statistics
|
// Compiled/bundled files
|
||||||
console.log(`Token usage - Context: ${contextResult.tokenCount}, Files: ${contextResult.includedFiles.length + contextResult.trimmedFiles.length}, Savings: ${contextResult.tokenSavings}`);
|
'**/*.js.map',
|
||||||
|
'**/*.d.ts.map',
|
||||||
// Check for token overflow against model limits
|
'**/*.min.js',
|
||||||
const MODEL_TOKEN_LIMIT = 200000; // o4-mini
|
'**/*.bundle.js',
|
||||||
if (contextResult.tokenCount > MODEL_TOKEN_LIMIT * 0.9) {
|
'**/*.chunk.js',
|
||||||
console.log(`⚠️ Warning: Context size (${contextResult.tokenCount} tokens) is close to or exceeds model limit (${MODEL_TOKEN_LIMIT} tokens).`);
|
|
||||||
console.log(`The model may not be able to process all information effectively.`);
|
// IDE/Editor directories
|
||||||
|
'.claude/**',
|
||||||
|
'.cursor/**',
|
||||||
|
'.vscode/**',
|
||||||
|
'.idea/**',
|
||||||
|
'**/*.swp',
|
||||||
|
'**/*.swo',
|
||||||
|
|
||||||
|
// Logs and caches
|
||||||
|
'.nogit/**',
|
||||||
|
'**/*.log',
|
||||||
|
'.cache/**',
|
||||||
|
'.rpt2_cache/**',
|
||||||
|
'coverage/**',
|
||||||
|
'.nyc_output/**',
|
||||||
|
];
|
||||||
|
|
||||||
|
// Pass glob patterns directly to smartgit - it handles matching internally
|
||||||
|
const diffStringArray = await gitRepo.getUncommittedDiff(excludePatterns);
|
||||||
|
|
||||||
|
// Process diffs intelligently using DiffProcessor
|
||||||
|
let processedDiffString: string;
|
||||||
|
|
||||||
|
if (diffStringArray.length > 0) {
|
||||||
|
// Diagnostic logging for raw diff statistics
|
||||||
|
const totalChars = diffStringArray.join('\n\n').length;
|
||||||
|
const estimatedTokens = Math.ceil(totalChars / 4);
|
||||||
|
|
||||||
|
console.log(`Raw git diff statistics:`);
|
||||||
|
console.log(` Files changed: ${diffStringArray.length}`);
|
||||||
|
console.log(` Total characters: ${totalChars.toLocaleString()}`);
|
||||||
|
console.log(` Estimated tokens: ${estimatedTokens.toLocaleString()}`);
|
||||||
|
console.log(` Exclusion patterns: ${excludePatterns.length}`);
|
||||||
|
|
||||||
|
// Calculate available tokens for diff based on total budget
|
||||||
|
const maxDiffTokens = calculateMaxDiffTokens();
|
||||||
|
console.log(`Token budget: ${maxDiffTokens.toLocaleString()} tokens for diff (limit: ${TOKEN_BUDGET.OPENAI_CONTEXT_LIMIT.toLocaleString()}, overhead: ${(TOKEN_BUDGET.SMARTAGENT_OVERHEAD + TOKEN_BUDGET.TASK_PROMPT_OVERHEAD).toLocaleString()})`);
|
||||||
|
|
||||||
|
// Use DiffProcessor to intelligently handle large diffs
|
||||||
|
const diffProcessor = new DiffProcessor({
|
||||||
|
maxDiffTokens, // Dynamic based on total budget
|
||||||
|
smallFileLines: 300, // Most source files are under 300 lines
|
||||||
|
mediumFileLines: 800, // Only very large files get head/tail treatment
|
||||||
|
sampleHeadLines: 75, // When sampling, show more context
|
||||||
|
sampleTailLines: 75, // When sampling, show more context
|
||||||
|
});
|
||||||
|
|
||||||
|
const processedDiff = diffProcessor.processDiffs(diffStringArray);
|
||||||
|
processedDiffString = diffProcessor.formatForContext(processedDiff);
|
||||||
|
|
||||||
|
console.log(`Processed diff statistics:`);
|
||||||
|
console.log(` Full diffs: ${processedDiff.fullDiffs.length} files`);
|
||||||
|
console.log(` Summarized: ${processedDiff.summarizedDiffs.length} files`);
|
||||||
|
console.log(` Metadata only: ${processedDiff.metadataOnly.length} files`);
|
||||||
|
console.log(` Final tokens: ${processedDiff.totalTokens.toLocaleString()}`);
|
||||||
|
|
||||||
|
if (estimatedTokens > 50000) {
|
||||||
|
console.log(`DiffProcessor reduced token usage: ${estimatedTokens.toLocaleString()} -> ${processedDiff.totalTokens.toLocaleString()}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate total tokens won't exceed limit
|
||||||
|
const totalEstimatedTokens = processedDiff.totalTokens
|
||||||
|
+ TOKEN_BUDGET.SMARTAGENT_OVERHEAD
|
||||||
|
+ TOKEN_BUDGET.TASK_PROMPT_OVERHEAD;
|
||||||
|
|
||||||
|
if (totalEstimatedTokens > TOKEN_BUDGET.OPENAI_CONTEXT_LIMIT - TOKEN_BUDGET.SAFETY_MARGIN) {
|
||||||
|
console.log(`Warning: Estimated tokens (${totalEstimatedTokens.toLocaleString()}) approaching limit`);
|
||||||
|
console.log(` Consider splitting into smaller commits`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
processedDiffString = 'No changes.';
|
||||||
}
|
}
|
||||||
|
|
||||||
let result = await this.aiDocsRef.openaiInstance.chat({
|
// Use runAgent for commit message generation with filesystem tool
|
||||||
systemMessage: `
|
const fsTools = plugins.smartagentTools.filesystemTool({ rootDir: this.projectDir });
|
||||||
You create a commit message for a git commit.
|
|
||||||
The commit message should be based on the files in the project.
|
|
||||||
You should not include any licensing information.
|
|
||||||
You should not include any personal information.
|
|
||||||
|
|
||||||
Important: Answer only in valid JSON.
|
const commitSystemPrompt = `
|
||||||
|
You create commit messages for git commits following semantic versioning conventions.
|
||||||
|
|
||||||
Your answer should be parseable with JSON.parse() without modifying anything.
|
You have access to filesystem tools to explore the project if needed.
|
||||||
|
|
||||||
Here is the structure of the JSON you should return:
|
IMPORTANT RULES:
|
||||||
|
- Only READ files (package.json, source files) for context
|
||||||
|
- Do NOT write, delete, or modify any files
|
||||||
|
- Version level (fix/feat/BREAKING CHANGE) must match the scope of changes
|
||||||
|
- Commit message must be clear, professional, and follow conventional commit conventions
|
||||||
|
- Do NOT include personal information, licensing details, or AI mentions (Claude/Codex)
|
||||||
|
- JSON structure must be valid with all required fields
|
||||||
|
- Scope must accurately reflect the changed modules/files
|
||||||
|
`;
|
||||||
|
|
||||||
interface {
|
const commitTaskPrompt = `
|
||||||
recommendedNextVersionLevel: 'fix' | 'feat' | 'BREAKING CHANGE'; // the recommended next version level of the project
|
Project directory: ${this.projectDir}
|
||||||
recommendedNextVersionScope: string; // the recommended scope name of the next version, like "core" or "cli", or specific class names.
|
|
||||||
recommendedNextVersionMessage: string; // the commit message. Don't put fix() feat() or BREAKING CHANGE in the message. Please just the message itself.
|
You have access to filesystem tools to explore the project if needed:
|
||||||
recommendedNextVersionDetails: string[]; // detailed bullet points for the changelog
|
- Use list_directory to see project structure
|
||||||
recommendedNextVersion: string; // the recommended next version of the project, x.x.x
|
- Use read_file to read package.json or source files for context
|
||||||
|
|
||||||
|
Analyze the git diff below to understand what changed and generate a commit message.
|
||||||
|
|
||||||
|
You should not include any licensing information or personal information.
|
||||||
|
Never mention CLAUDE code, or codex.
|
||||||
|
|
||||||
|
Your final response must be ONLY valid JSON - the raw JSON object, nothing else.
|
||||||
|
No explanations, no summaries, no markdown - just the JSON object that can be parsed with JSON.parse().
|
||||||
|
|
||||||
|
Here is the structure of the JSON you must return:
|
||||||
|
|
||||||
|
{
|
||||||
|
"recommendedNextVersionLevel": "fix" | "feat" | "BREAKING CHANGE",
|
||||||
|
"recommendedNextVersionScope": "string",
|
||||||
|
"recommendedNextVersionMessage": "string (ONLY the description body WITHOUT the type(scope): prefix - e.g. 'bump dependency to ^1.2.6' NOT 'fix(deps): bump dependency to ^1.2.6')",
|
||||||
|
"recommendedNextVersionDetails": ["string"],
|
||||||
|
"recommendedNextVersion": "x.x.x"
|
||||||
}
|
}
|
||||||
|
|
||||||
For the recommendedNextVersionDetails, please only add a detail entries to the array if it has an obvious value to the reader.
|
For recommendedNextVersionDetails, only add entries that have obvious value to the reader.
|
||||||
|
|
||||||
You are being given the files of the project. You should use them to create the commit message.
|
Here is the git diff showing what changed:
|
||||||
Also you are given a diff
|
|
||||||
|
|
||||||
`,
|
${processedDiffString}
|
||||||
messageHistory: [],
|
|
||||||
userMessage: contextString,
|
Analyze these changes and output the JSON commit message object.
|
||||||
|
`;
|
||||||
|
|
||||||
|
logger.log('info', 'Starting commit message generation with agent...');
|
||||||
|
|
||||||
|
const commitResult = await plugins.smartagent.runAgent({
|
||||||
|
model: this.aiDocsRef.model,
|
||||||
|
prompt: commitTaskPrompt,
|
||||||
|
system: commitSystemPrompt,
|
||||||
|
tools: fsTools,
|
||||||
|
maxSteps: 10,
|
||||||
|
onToolCall: (toolName) => logger.log('info', `[Commit] Tool call: ${toolName}`),
|
||||||
});
|
});
|
||||||
|
|
||||||
// console.log(result.message);
|
// Extract JSON from result - handle cases where AI adds text around it
|
||||||
const resultObject: INextCommitObject = JSON.parse(
|
let jsonString = commitResult.text
|
||||||
result.message.replace('```json', '').replace('```', '')
|
.replace(/```json\n?/gi, '')
|
||||||
);
|
.replace(/```\n?/gi, '');
|
||||||
|
|
||||||
|
// Try to find JSON object in the result
|
||||||
|
const jsonMatch = jsonString.match(/\{[\s\S]*\}/);
|
||||||
|
if (!jsonMatch) {
|
||||||
|
throw new Error(`Could not find JSON object in result: ${jsonString.substring(0, 100)}...`);
|
||||||
|
}
|
||||||
|
jsonString = jsonMatch[0];
|
||||||
|
|
||||||
|
const resultObject: INextCommitObject = JSON.parse(jsonString);
|
||||||
|
|
||||||
const previousChangelogPath = plugins.path.join(this.projectDir, 'changelog.md');
|
const previousChangelogPath = plugins.path.join(this.projectDir, 'changelog.md');
|
||||||
let previousChangelog: plugins.smartfile.SmartFile;
|
let previousChangelog: plugins.smartfile.SmartFile;
|
||||||
if (await plugins.smartfile.fs.fileExists(previousChangelogPath)) {
|
if (await plugins.fsInstance.file(previousChangelogPath).exists()) {
|
||||||
previousChangelog = await plugins.smartfile.SmartFile.fromFilePath(previousChangelogPath);
|
previousChangelog = await plugins.smartfileFactory.fromFilePath(previousChangelogPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!previousChangelog) {
|
if (!previousChangelog) {
|
||||||
// lets build the changelog based on that
|
// lets build the changelog based on that
|
||||||
const commitMessages = await gitRepo.getAllCommitMessages();
|
const commitMessages = await gitRepo.getAllCommitMessages();
|
||||||
console.log(JSON.stringify(commitMessages, null, 2));
|
console.log(JSON.stringify(commitMessages, null, 2));
|
||||||
let result2 = await this.aiDocsRef.openaiInstance.chat({
|
|
||||||
messageHistory: [],
|
const changelogSystemPrompt = `
|
||||||
systemMessage: `
|
You generate changelog.md files for software projects.
|
||||||
|
|
||||||
|
RULES:
|
||||||
|
- Changelog must follow proper markdown format with ## headers for each version
|
||||||
|
- Entries must be chronologically ordered (newest first)
|
||||||
|
- Version ranges for trivial commits should be properly summarized
|
||||||
|
- No duplicate or empty entries
|
||||||
|
- Format: ## yyyy-mm-dd - x.x.x - scope
|
||||||
|
`;
|
||||||
|
|
||||||
|
const changelogTaskPrompt = `
|
||||||
You are building a changelog.md file for the project.
|
You are building a changelog.md file for the project.
|
||||||
Omit commits and versions that lack relevant changes, but make sure to mention them as a range with a summarizing message instead.
|
Omit commits and versions that lack relevant changes, but make sure to mention them as a range with a summarizing message instead.
|
||||||
|
|
||||||
A changelog entry should look like this:
|
A changelog entry should look like this:
|
||||||
|
|
||||||
## yyyy-mm-dd - x.x.x - scope here
|
## yyyy-mm-dd - x.x.x - scope here
|
||||||
main descriptiom here
|
main description here
|
||||||
|
|
||||||
- detailed bullet points follow
|
- detailed bullet points follow
|
||||||
|
|
||||||
You are given:
|
You are given:
|
||||||
* the commit messages of the project
|
* the commit messages of the project
|
||||||
|
|
||||||
Only return the changelog file, so it can be written directly to changelog.md`,
|
Only return the changelog file content, so it can be written directly to changelog.md.
|
||||||
userMessage: `
|
|
||||||
Here are the commit messages:
|
Here are the commit messages:
|
||||||
|
|
||||||
${JSON.stringify(commitMessages, null, 2)}
|
${JSON.stringify(commitMessages, null, 2)}
|
||||||
`,
|
`;
|
||||||
|
|
||||||
|
const changelogResult = await plugins.smartagent.runAgent({
|
||||||
|
model: this.aiDocsRef.model,
|
||||||
|
prompt: changelogTaskPrompt,
|
||||||
|
system: changelogSystemPrompt,
|
||||||
|
maxSteps: 1,
|
||||||
|
onToolCall: (toolName) => logger.log('info', `[Changelog] Tool call: ${toolName}`),
|
||||||
});
|
});
|
||||||
|
|
||||||
previousChangelog = await plugins.smartfile.SmartFile.fromString(
|
previousChangelog = plugins.smartfileFactory.fromString(
|
||||||
previousChangelogPath,
|
previousChangelogPath,
|
||||||
result2.message.replaceAll('```markdown', '').replaceAll('```', ''),
|
changelogResult.text.replaceAll('```markdown', '').replaceAll('```', ''),
|
||||||
'utf8'
|
'utf8'
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import type { AiDoc } from '../classes.aidoc.js';
|
import type { AiDoc } from '../classes.aidoc.js';
|
||||||
import * as plugins from '../plugins.js';
|
import * as plugins from '../plugins.js';
|
||||||
import { ProjectContext } from './projectcontext.js';
|
import { ProjectContext } from './projectcontext.js';
|
||||||
|
import { logger } from '../logging.js';
|
||||||
|
|
||||||
interface IDescriptionInterface {
|
interface IDescriptionInterface {
|
||||||
description: string;
|
description: string;
|
||||||
@@ -18,57 +19,77 @@ export class Description {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public async build() {
|
public async build() {
|
||||||
// Use the new TaskContextFactory for optimized context
|
// Use runAgent with filesystem tool for agent-driven exploration
|
||||||
const taskContextFactory = new (await import('../context/index.js')).TaskContextFactory(this.projectDir);
|
const fsTools = plugins.smartagentTools.filesystemTool({ rootDir: this.projectDir });
|
||||||
await taskContextFactory.initialize();
|
|
||||||
|
|
||||||
// Generate context specifically for description task
|
|
||||||
const contextResult = await taskContextFactory.createContextForDescription();
|
|
||||||
const contextString = contextResult.context;
|
|
||||||
|
|
||||||
// Log token usage statistics
|
|
||||||
console.log(`Token usage - Context: ${contextResult.tokenCount}, Files: ${contextResult.includedFiles.length + contextResult.trimmedFiles.length}, Savings: ${contextResult.tokenSavings}`);
|
|
||||||
|
|
||||||
let result = await this.aiDocsRef.openaiInstance.chat({
|
const descriptionSystemPrompt = `
|
||||||
systemMessage: `
|
You create project descriptions and keywords for npm packages.
|
||||||
You create a json adhering the following interface:
|
|
||||||
|
You have access to filesystem tools to explore the project.
|
||||||
|
|
||||||
|
IMPORTANT RULES:
|
||||||
|
- Only READ files (package.json, npmextra.json, source files in ts/)
|
||||||
|
- Do NOT write, delete, or modify any files
|
||||||
|
- Your final response must be valid JSON only
|
||||||
|
- Description must be a clear, concise one-sentence summary
|
||||||
|
- Keywords must be relevant to the project's use cases
|
||||||
|
- Both description and keywords fields must be present
|
||||||
|
- Do NOT wrap JSON in markdown code blocks
|
||||||
|
`;
|
||||||
|
|
||||||
|
const descriptionTaskPrompt = `
|
||||||
|
PROJECT DIRECTORY: ${this.projectDir}
|
||||||
|
|
||||||
|
Use the filesystem tools to explore the project and understand what it does:
|
||||||
|
1. First, use list_directory to see the project structure
|
||||||
|
2. Read package.json to understand the package name and current description
|
||||||
|
3. Read npmextra.json if it exists for additional metadata
|
||||||
|
4. Read key source files in ts/ directory to understand the implementation
|
||||||
|
|
||||||
|
Then generate a description and keywords based on your exploration.
|
||||||
|
|
||||||
|
Your FINAL response must be valid JSON adhering to this interface:
|
||||||
{
|
{
|
||||||
description: string; // a sensible short, one sentence description of the project
|
description: string; // a sensible short, one sentence description of the project
|
||||||
keywords: string[]; // an array of tags that describe the project
|
keywords: string[]; // an array of tags that describe the project based on use cases
|
||||||
}
|
}
|
||||||
|
|
||||||
The description should be based on what you understand from the project's files.
|
|
||||||
The keywords should be based on use cases you see from the files.
|
|
||||||
Don't be cheap about the way you think.
|
|
||||||
|
|
||||||
Important: Answer only in valid JSON.
|
Important: Answer only in valid JSON.
|
||||||
You answer should be parseable with JSON.parse() without modifying anything.
|
Your answer should be parseable with JSON.parse() without modifying anything.
|
||||||
|
Don't wrap the JSON in \`\`\`json\`\`\` - just return the raw JSON object.
|
||||||
|
`;
|
||||||
|
|
||||||
Don't wrap the JSON in three ticks json!!!
|
logger.log('info', 'Starting description generation with agent...');
|
||||||
`,
|
|
||||||
messageHistory: [],
|
const descriptionResult = await plugins.smartagent.runAgent({
|
||||||
userMessage: contextString,
|
model: this.aiDocsRef.model,
|
||||||
|
prompt: descriptionTaskPrompt,
|
||||||
|
system: descriptionSystemPrompt,
|
||||||
|
tools: fsTools,
|
||||||
|
maxSteps: 15,
|
||||||
|
onToolCall: (toolName) => logger.log('info', `[Description] Tool call: ${toolName}`),
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log(result.message);
|
console.log(descriptionResult.text);
|
||||||
const resultObject: IDescriptionInterface = JSON.parse(
|
const resultObject: IDescriptionInterface = JSON.parse(
|
||||||
result.message.replace('```json', '').replace('```', ''),
|
descriptionResult.text.replace('```json', '').replace('```', ''),
|
||||||
);
|
);
|
||||||
|
|
||||||
// Create a standard ProjectContext instance for file operations
|
// Use ProjectContext to get file handles for writing
|
||||||
const projectContext = new ProjectContext(this.projectDir);
|
const projectContext = new ProjectContext(this.projectDir);
|
||||||
const files = await projectContext.gatherFiles();
|
const files = await projectContext.gatherFiles();
|
||||||
|
|
||||||
|
// Update npmextra.json
|
||||||
const npmextraJson = files.smartfilesNpmextraJSON;
|
const npmextraJson = files.smartfilesNpmextraJSON;
|
||||||
const npmextraJsonContent = JSON.parse(npmextraJson.contents.toString());
|
const npmextraJsonContent = JSON.parse(npmextraJson.contents.toString());
|
||||||
|
|
||||||
npmextraJsonContent.gitzone.module.description = resultObject.description;
|
npmextraJsonContent['@git.zone/cli'].module.description = resultObject.description;
|
||||||
npmextraJsonContent.gitzone.module.keywords = resultObject.keywords;
|
npmextraJsonContent['@git.zone/cli'].module.keywords = resultObject.keywords;
|
||||||
|
|
||||||
npmextraJson.contents = Buffer.from(JSON.stringify(npmextraJsonContent, null, 2));
|
npmextraJson.contents = Buffer.from(JSON.stringify(npmextraJsonContent, null, 2));
|
||||||
await npmextraJson.write();
|
await npmextraJson.write();
|
||||||
|
|
||||||
// do the same with packageJson
|
// Update package.json
|
||||||
const packageJson = files.smartfilePackageJSON;
|
const packageJson = files.smartfilePackageJSON;
|
||||||
const packageJsonContent = JSON.parse(packageJson.contents.toString());
|
const packageJsonContent = JSON.parse(packageJson.contents.toString());
|
||||||
packageJsonContent.description = resultObject.description;
|
packageJsonContent.description = resultObject.description;
|
||||||
@@ -79,6 +100,6 @@ Don't wrap the JSON in three ticks json!!!
|
|||||||
console.log(`\n======================\n`);
|
console.log(`\n======================\n`);
|
||||||
console.log(JSON.stringify(resultObject, null, 2));
|
console.log(JSON.stringify(resultObject, null, 2));
|
||||||
console.log(`\n======================\n`);
|
console.log(`\n======================\n`);
|
||||||
return result.message;
|
return descriptionResult.text;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -13,31 +13,29 @@ export class ProjectContext {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public async gatherFiles() {
|
public async gatherFiles() {
|
||||||
const smartfilePackageJSON = await plugins.smartfile.SmartFile.fromFilePath(
|
const smartfilePackageJSON = await plugins.smartfileFactory.fromFilePath(
|
||||||
plugins.path.join(this.projectDir, 'package.json'),
|
plugins.path.join(this.projectDir, 'package.json'),
|
||||||
this.projectDir,
|
this.projectDir,
|
||||||
);
|
);
|
||||||
const smartfilesReadme = await plugins.smartfile.SmartFile.fromFilePath(
|
const smartfilesReadme = await plugins.smartfileFactory.fromFilePath(
|
||||||
plugins.path.join(this.projectDir, 'readme.md'),
|
plugins.path.join(this.projectDir, 'readme.md'),
|
||||||
this.projectDir,
|
this.projectDir,
|
||||||
);
|
);
|
||||||
|
|
||||||
const smartfilesReadmeHints = await plugins.smartfile.SmartFile.fromFilePath(
|
const smartfilesReadmeHints = await plugins.smartfileFactory.fromFilePath(
|
||||||
plugins.path.join(this.projectDir, 'readme.hints.md'),
|
plugins.path.join(this.projectDir, 'readme.hints.md'),
|
||||||
this.projectDir,
|
this.projectDir,
|
||||||
);
|
);
|
||||||
const smartfilesNpmextraJSON = await plugins.smartfile.SmartFile.fromFilePath(
|
const smartfilesNpmextraJSON = await plugins.smartfileFactory.fromFilePath(
|
||||||
plugins.path.join(this.projectDir, 'npmextra.json'),
|
plugins.path.join(this.projectDir, 'npmextra.json'),
|
||||||
this.projectDir,
|
this.projectDir,
|
||||||
);
|
);
|
||||||
const smartfilesMod = await plugins.smartfile.fs.fileTreeToObject(
|
const smartfilesMod = await plugins.smartfileFactory.virtualDirectoryFromPath(
|
||||||
this.projectDir,
|
this.projectDir,
|
||||||
'ts*/**/*.ts',
|
).then(vd => vd.filter(f => f.relative.startsWith('ts') && f.relative.endsWith('.ts')).listFiles());
|
||||||
);
|
const smartfilesTest = await plugins.smartfileFactory.virtualDirectoryFromPath(
|
||||||
const smartfilesTest = await plugins.smartfile.fs.fileTreeToObject(
|
|
||||||
this.projectDir,
|
this.projectDir,
|
||||||
'test/**/*.ts',
|
).then(vd => vd.filter(f => f.relative.startsWith('test/') && f.relative.endsWith('.ts')).listFiles());
|
||||||
);
|
|
||||||
return {
|
return {
|
||||||
smartfilePackageJSON,
|
smartfilePackageJSON,
|
||||||
smartfilesReadme,
|
smartfilesReadme,
|
||||||
@@ -66,21 +64,14 @@ ${smartfile.contents.toString()}
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Calculate the token count for a string using the GPT tokenizer
|
* Estimate token count for a string
|
||||||
* @param text The text to count tokens for
|
* Uses a rough estimate of 4 characters per token
|
||||||
* @param model The model to use for token counting (default: gpt-3.5-turbo)
|
* @param text The text to estimate tokens for
|
||||||
* @returns The number of tokens in the text
|
* @returns Estimated number of tokens
|
||||||
*/
|
*/
|
||||||
public countTokens(text: string, model: string = 'gpt-3.5-turbo'): number {
|
public countTokens(text: string): number {
|
||||||
try {
|
// Rough estimate: ~4 characters per token for English text
|
||||||
// Use the gpt-tokenizer library to count tokens
|
return Math.ceil(text.length / 4);
|
||||||
const tokens = plugins.gptTokenizer.encode(text);
|
|
||||||
return tokens.length;
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error counting tokens:', error);
|
|
||||||
// Provide a rough estimate (4 chars per token) if tokenization fails
|
|
||||||
return Math.ceil(text.length / 4);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private async buildContext(dirArg: string) {
|
private async buildContext(dirArg: string) {
|
||||||
|
|||||||
@@ -17,72 +17,91 @@ export class Readme {
|
|||||||
public async build() {
|
public async build() {
|
||||||
let finalReadmeString = ``;
|
let finalReadmeString = ``;
|
||||||
|
|
||||||
// Use the new TaskContextFactory for optimized context
|
// First check legal info before introducing any cost
|
||||||
const taskContextFactory = new (await import('../context/index.js')).TaskContextFactory(this.projectDir);
|
|
||||||
await taskContextFactory.initialize();
|
|
||||||
|
|
||||||
// Generate context specifically for readme task
|
|
||||||
const contextResult = await taskContextFactory.createContextForReadme();
|
|
||||||
const contextString = contextResult.context;
|
|
||||||
|
|
||||||
// Log token usage statistics
|
|
||||||
console.log(`Token usage - Context: ${contextResult.tokenCount}, Files: ${contextResult.includedFiles.length + contextResult.trimmedFiles.length}, Savings: ${contextResult.tokenSavings}`);
|
|
||||||
|
|
||||||
// lets first check legal before introducung any cost
|
|
||||||
const projectContext = new ProjectContext(this.projectDir);
|
const projectContext = new ProjectContext(this.projectDir);
|
||||||
const npmExtraJson = JSON.parse(
|
const npmExtraJson = JSON.parse(
|
||||||
(await projectContext.gatherFiles()).smartfilesNpmextraJSON.contents.toString()
|
(await projectContext.gatherFiles()).smartfilesNpmextraJSON.contents.toString()
|
||||||
);
|
);
|
||||||
const legalInfo = npmExtraJson?.tsdoc?.legal;
|
const legalInfo = npmExtraJson?.['@git.zone/tsdoc']?.legal;
|
||||||
if (!legalInfo) {
|
if (!legalInfo) {
|
||||||
const error = new Error(`No legal information found in npmextra.json`);
|
const error = new Error(`No legal information found in npmextra.json`);
|
||||||
console.log(error);
|
console.log(error);
|
||||||
}
|
}
|
||||||
|
|
||||||
let result = await this.aiDocsRef.openaiInstance.chat({
|
// Use runAgent with filesystem tool for agent-driven exploration
|
||||||
systemMessage: `
|
const fsTools = plugins.smartagentTools.filesystemTool({ rootDir: this.projectDir });
|
||||||
You create markdown readmes for npm projects. You only output the markdown readme.
|
|
||||||
|
|
||||||
The Readme should follow the following template:
|
const readmeSystemPrompt = `
|
||||||
|
You create markdown READMEs for npm projects. You only output the markdown readme.
|
||||||
|
|
||||||
|
You have access to filesystem tools to explore the project. Use them to understand the codebase.
|
||||||
|
|
||||||
|
IMPORTANT RULES:
|
||||||
|
- Only READ files within the project directory
|
||||||
|
- Do NOT write, delete, or modify any files
|
||||||
|
- README must follow proper markdown format
|
||||||
|
- Must contain Install and Usage sections
|
||||||
|
- Code examples must use correct TypeScript/ESM syntax
|
||||||
|
- Documentation must be comprehensive and helpful
|
||||||
|
- Do NOT include licensing information (added separately)
|
||||||
|
- Do NOT use CommonJS syntax - only ESM
|
||||||
|
- Do NOT include "in conclusion" or similar filler
|
||||||
|
`;
|
||||||
|
|
||||||
|
const readmeTaskPrompt = `
|
||||||
|
PROJECT DIRECTORY: ${this.projectDir}
|
||||||
|
|
||||||
|
Use the filesystem tools to explore the project and understand what it does:
|
||||||
|
1. First, use list_directory to see the project structure
|
||||||
|
2. Read package.json to understand the package name, description, and dependencies
|
||||||
|
3. Read the existing readme.md if it exists (use it as a base, improve and expand)
|
||||||
|
4. Read readme.hints.md if it exists (contains hints for documentation)
|
||||||
|
5. Read key source files in ts/ directory to understand the API and implementation
|
||||||
|
6. Focus on exported classes, interfaces, and functions
|
||||||
|
|
||||||
|
Then generate a comprehensive README following this template:
|
||||||
|
|
||||||
# Project Name
|
# Project Name
|
||||||
[
|
[The name from package.json and description]
|
||||||
The name is the module name of package.json
|
|
||||||
The description is in the description field of package.json
|
|
||||||
]
|
|
||||||
|
|
||||||
## Install
|
## Install
|
||||||
[
|
[Short text on how to install the project]
|
||||||
Write a short text on how to install the project
|
|
||||||
]
|
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
[
|
[
|
||||||
Give code examples here.
|
Give code examples here.
|
||||||
Construct sensible scenarios for the user.
|
Construct sensible scenarios for the user.
|
||||||
Make sure to show a complete set of features of the module.
|
Make sure to show a complete set of features of the module.
|
||||||
Don't omit use cases.
|
Don't omit use cases.
|
||||||
It does not matter how much time you need.
|
|
||||||
ALWAYS USE ESM SYNTAX AND TYPESCRIPT.
|
ALWAYS USE ESM SYNTAX AND TYPESCRIPT.
|
||||||
DON'T CHICKEN OUT. Write at least 4000 words. More if necessary.
|
Write at least 4000 words. More if necessary.
|
||||||
If there is already a readme, take the Usage section as base. Remove outdated content, and expand and improve upon the valid parts.
|
If there is already a readme, take the Usage section as base. Remove outdated content, expand and improve.
|
||||||
Super important: Check for completenes.
|
Check for completeness.
|
||||||
Don't include any licensing information. This will be added in a later step.
|
Don't include any licensing information. This will be added later.
|
||||||
Avoid "in conclusions".
|
Avoid "in conclusion" statements.
|
||||||
|
|
||||||
Good to know:
|
|
||||||
* npmextra.json contains overall module information.
|
|
||||||
* readme.hints.md provides valuable hints about module ideas.
|
|
||||||
]
|
]
|
||||||
`,
|
`;
|
||||||
messageHistory: [],
|
|
||||||
userMessage: contextString,
|
logger.log('info', 'Starting README generation with agent...');
|
||||||
|
|
||||||
|
const readmeResult = await plugins.smartagent.runAgent({
|
||||||
|
model: this.aiDocsRef.model,
|
||||||
|
prompt: readmeTaskPrompt,
|
||||||
|
system: readmeSystemPrompt,
|
||||||
|
tools: fsTools,
|
||||||
|
maxSteps: 25,
|
||||||
|
onToolCall: (toolName) => logger.log('info', `[README] Tool call: ${toolName}`),
|
||||||
});
|
});
|
||||||
|
|
||||||
finalReadmeString += result.message + '\n' + legalInfo;
|
// Clean up markdown formatting if wrapped in code blocks
|
||||||
|
let resultMessage = readmeResult.text
|
||||||
|
.replace(/^```markdown\n?/i, '')
|
||||||
|
.replace(/\n?```$/i, '');
|
||||||
|
|
||||||
|
finalReadmeString += resultMessage + '\n' + legalInfo;
|
||||||
|
|
||||||
console.log(`\n======================\n`);
|
console.log(`\n======================\n`);
|
||||||
console.log(result.message);
|
console.log(resultMessage);
|
||||||
console.log(`\n======================\n`);
|
console.log(`\n======================\n`);
|
||||||
|
|
||||||
const readme = (await projectContext.gatherFiles()).smartfilesReadme;
|
const readme = (await projectContext.gatherFiles()).smartfilesReadme;
|
||||||
@@ -93,60 +112,80 @@ The Readme should follow the following template:
|
|||||||
const tsPublishInstance = new plugins.tspublish.TsPublish();
|
const tsPublishInstance = new plugins.tspublish.TsPublish();
|
||||||
const subModules = await tsPublishInstance.getModuleSubDirs(paths.cwd);
|
const subModules = await tsPublishInstance.getModuleSubDirs(paths.cwd);
|
||||||
logger.log('info', `Found ${Object.keys(subModules).length} sub modules`);
|
logger.log('info', `Found ${Object.keys(subModules).length} sub modules`);
|
||||||
|
|
||||||
for (const subModule of Object.keys(subModules)) {
|
for (const subModule of Object.keys(subModules)) {
|
||||||
logger.log('info', `Building readme for ${subModule}`);
|
logger.log('info', `Building readme for ${subModule}`);
|
||||||
const subModuleContextString = await projectContext.update();
|
|
||||||
let result = await this.aiDocsRef.openaiInstance.chat({
|
|
||||||
systemMessage: `
|
|
||||||
You create markdown readmes for npm projects. You only output the markdown readme.
|
|
||||||
|
|
||||||
IMPORTANT: YOU ARE NOW CREATING THE README FOR THE FOLLOWING SUB MODULE: ${subModule} !!!!!!!!!!!
|
const subModulePath = plugins.path.join(paths.cwd, subModule);
|
||||||
The Sub Module will be published with the following data:
|
const tspublishData = await plugins.fsInstance
|
||||||
${JSON.stringify(plugins.smartfile.fs.toStringSync(plugins.path.join(paths.cwd, subModule, 'tspublish.json')), null, 2)}
|
.file(plugins.path.join(subModulePath, 'tspublish.json'))
|
||||||
|
.encoding('utf8')
|
||||||
|
.read();
|
||||||
|
|
||||||
|
const subModuleFsTools = plugins.smartagentTools.filesystemTool({ rootDir: subModulePath });
|
||||||
The Readme should follow the following template:
|
|
||||||
|
const subModuleSystemPrompt = `
|
||||||
# Project Name
|
You create markdown READMEs for npm projects. You only output the markdown readme.
|
||||||
[
|
|
||||||
The name is the module name of package.json
|
IMPORTANT RULES:
|
||||||
The description is in the description field of package.json
|
- Only READ files within the submodule directory
|
||||||
]
|
- Do NOT write, delete, or modify any files
|
||||||
|
- README must be comprehensive, well-formatted markdown with ESM TypeScript examples
|
||||||
## Install
|
- Do NOT include licensing information (added separately)
|
||||||
[
|
`;
|
||||||
Write a short text on how to install the project
|
|
||||||
]
|
const subModulePrompt = `
|
||||||
|
SUB MODULE: ${subModule}
|
||||||
## Usage
|
SUB MODULE DIRECTORY: ${subModulePath}
|
||||||
[
|
|
||||||
Give code examples here.
|
IMPORTANT: YOU ARE CREATING THE README FOR THIS SUB MODULE: ${subModule}
|
||||||
Construct sensible scenarios for the user.
|
The Sub Module will be published with:
|
||||||
Make sure to show a complete set of features of the module.
|
${JSON.stringify(tspublishData, null, 2)}
|
||||||
Don't omit use cases.
|
|
||||||
It does not matter how much time you need.
|
Use the filesystem tools to explore the submodule:
|
||||||
ALWAYS USE ESM SYNTAX AND TYPESCRIPT.
|
1. Use list_directory to see the submodule structure
|
||||||
DON'T CHICKEN OUT. Write at least 4000 words. More if necessary.
|
2. Read package.json to understand the submodule
|
||||||
If there is already a readme, take the Usage section as base. Remove outdated content, and expand and improve upon the valid parts.
|
3. Read source files in ts/ directory to understand the implementation
|
||||||
Super important: Check for completenes.
|
|
||||||
Don't include any licensing information. This will be added in a later step.
|
Generate a README following the template:
|
||||||
Avoid "in conclusions".
|
|
||||||
|
# Project Name
|
||||||
Good to know:
|
[name and description from package.json]
|
||||||
* npmextra.json contains overall module information.
|
|
||||||
* readme.hints.md provides valuable hints about module ideas.
|
## Install
|
||||||
* Your output lands directly in the readme.md file.
|
[installation instructions]
|
||||||
* Don't use \`\`\` at the beginning or the end. It'll cause problems. Only use it for codeblocks. You are directly writing markdown. No need to introduce it weirdly.
|
|
||||||
]
|
## Usage
|
||||||
`,
|
[
|
||||||
messageHistory: [],
|
Code examples with complete features.
|
||||||
userMessage: subModuleContextString,
|
ESM TypeScript syntax only.
|
||||||
|
Write at least 4000 words.
|
||||||
|
No licensing information.
|
||||||
|
No "in conclusion".
|
||||||
|
]
|
||||||
|
|
||||||
|
Don't use \`\`\` at the beginning or end. Only for code blocks.
|
||||||
|
`;
|
||||||
|
|
||||||
|
const subModuleResult = await plugins.smartagent.runAgent({
|
||||||
|
model: this.aiDocsRef.model,
|
||||||
|
prompt: subModulePrompt,
|
||||||
|
system: subModuleSystemPrompt,
|
||||||
|
tools: subModuleFsTools,
|
||||||
|
maxSteps: 20,
|
||||||
|
onToolCall: (toolName) => logger.log('info', `[README:${subModule}] Tool call: ${toolName}`),
|
||||||
});
|
});
|
||||||
|
|
||||||
const subModuleReadmeString = result.message + '\n' + legalInfo;
|
const subModuleReadmeString = subModuleResult.text
|
||||||
await plugins.smartfile.memory.toFs(subModuleReadmeString, plugins.path.join(paths.cwd, subModule, 'readme.md'));
|
.replace(/^```markdown\n?/i, '')
|
||||||
|
.replace(/\n?```$/i, '') + '\n' + legalInfo;
|
||||||
|
await plugins.fsInstance
|
||||||
|
.file(plugins.path.join(subModulePath, 'readme.md'))
|
||||||
|
.encoding('utf8')
|
||||||
|
.write(subModuleReadmeString);
|
||||||
logger.log('success', `Built readme for ${subModule}`);
|
logger.log('success', `Built readme for ${subModule}`);
|
||||||
}
|
}
|
||||||
return result.message;
|
|
||||||
|
return resultMessage;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ export class AiDoc {
|
|||||||
public npmextraKV: plugins.npmextra.KeyValueStore;
|
public npmextraKV: plugins.npmextra.KeyValueStore;
|
||||||
public qenvInstance: plugins.qenv.Qenv;
|
public qenvInstance: plugins.qenv.Qenv;
|
||||||
public aidocInteract: plugins.smartinteract.SmartInteract;
|
public aidocInteract: plugins.smartinteract.SmartInteract;
|
||||||
public openaiInstance: plugins.smartai.OpenAiProvider;
|
public model: plugins.smartai.LanguageModelV3;
|
||||||
|
|
||||||
argvArg: any;
|
argvArg: any;
|
||||||
|
|
||||||
@@ -36,9 +36,25 @@ export class AiDoc {
|
|||||||
this.aidocInteract = new plugins.smartinteract.SmartInteract();
|
this.aidocInteract = new plugins.smartinteract.SmartInteract();
|
||||||
this.qenvInstance = new plugins.qenv.Qenv();
|
this.qenvInstance = new plugins.qenv.Qenv();
|
||||||
if (!(await this.qenvInstance.getEnvVarOnDemand('OPENAI_TOKEN'))) {
|
if (!(await this.qenvInstance.getEnvVarOnDemand('OPENAI_TOKEN'))) {
|
||||||
|
// Migrate old KV store path to new path if needed
|
||||||
|
const homeDir = plugins.smartpath.get.home();
|
||||||
|
const oldKvPath = plugins.path.join(homeDir, '.npmextra/kv/tsdoc.json');
|
||||||
|
const newKvDir = plugins.path.join(homeDir, '.npmextra/kv/@git.zone');
|
||||||
|
const newKvPath = plugins.path.join(newKvDir, 'tsdoc.json');
|
||||||
|
if (
|
||||||
|
await plugins.fsInstance.file(oldKvPath).exists() &&
|
||||||
|
!(await plugins.fsInstance.file(newKvPath).exists())
|
||||||
|
) {
|
||||||
|
console.log('Migrating tsdoc KeyValueStore to @git.zone/tsdoc...');
|
||||||
|
await plugins.fsInstance.directory(newKvDir).recursive().create();
|
||||||
|
await plugins.fsInstance.file(oldKvPath).copy(newKvPath);
|
||||||
|
await plugins.fsInstance.file(oldKvPath).delete();
|
||||||
|
console.log('Migration complete: tsdoc.json -> @git.zone/tsdoc.json');
|
||||||
|
}
|
||||||
|
|
||||||
this.npmextraKV = new plugins.npmextra.KeyValueStore({
|
this.npmextraKV = new plugins.npmextra.KeyValueStore({
|
||||||
typeArg: 'userHomeDir',
|
typeArg: 'userHomeDir',
|
||||||
identityArg: 'tsdoc',
|
identityArg: '@git.zone/tsdoc',
|
||||||
mandatoryKeys: ['OPENAI_TOKEN'],
|
mandatoryKeys: ['OPENAI_TOKEN'],
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -64,19 +80,24 @@ export class AiDoc {
|
|||||||
await this.npmextraKV.writeKey('OPENAI_TOKEN', this.openaiToken);
|
await this.npmextraKV.writeKey('OPENAI_TOKEN', this.openaiToken);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!this.openaiToken) {
|
if (!this.openaiToken && this.npmextraKV) {
|
||||||
this.openaiToken = await this.npmextraKV.readKey('OPENAI_TOKEN');
|
this.openaiToken = await this.npmextraKV.readKey('OPENAI_TOKEN');
|
||||||
}
|
}
|
||||||
|
|
||||||
// lets assume we have an OPENAI_Token now
|
// Create model using getModel()
|
||||||
this.openaiInstance = new plugins.smartai.OpenAiProvider({
|
this.model = plugins.smartai.getModel({
|
||||||
openaiToken: this.openaiToken,
|
provider: 'openai',
|
||||||
|
model: 'gpt-5.4',
|
||||||
|
apiKey: this.openaiToken,
|
||||||
});
|
});
|
||||||
await this.openaiInstance.start();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public async stop() {
|
public async stop() {
|
||||||
await this.openaiInstance.stop();
|
// No lifecycle management needed with getModel() API
|
||||||
|
}
|
||||||
|
|
||||||
|
public getOpenaiToken(): string {
|
||||||
|
return this.openaiToken;
|
||||||
}
|
}
|
||||||
|
|
||||||
public async buildReadme(projectDirArg: string) {
|
public async buildReadme(projectDirArg: string) {
|
||||||
@@ -98,7 +119,7 @@ export class AiDoc {
|
|||||||
const projectContextInstance = new aiDocsClasses.ProjectContext(projectDirArg);
|
const projectContextInstance = new aiDocsClasses.ProjectContext(projectDirArg);
|
||||||
return await projectContextInstance.gatherFiles();
|
return await projectContextInstance.gatherFiles();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the context with token count information
|
* Get the context with token count information
|
||||||
* @param projectDirArg The path to the project directory
|
* @param projectDirArg The path to the project directory
|
||||||
@@ -109,7 +130,7 @@ export class AiDoc {
|
|||||||
await projectContextInstance.update();
|
await projectContextInstance.update();
|
||||||
return projectContextInstance.getContextWithTokenCount();
|
return projectContextInstance.getContextWithTokenCount();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get just the token count for a project's context
|
* Get just the token count for a project's context
|
||||||
* @param projectDirArg The path to the project directory
|
* @param projectDirArg The path to the project directory
|
||||||
@@ -120,15 +141,14 @@ export class AiDoc {
|
|||||||
await projectContextInstance.update();
|
await projectContextInstance.update();
|
||||||
return projectContextInstance.getTokenCount();
|
return projectContextInstance.getTokenCount();
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Count tokens in a text string using GPT tokenizer
|
* Estimate token count in a text string
|
||||||
* @param text The text to count tokens for
|
* @param text The text to estimate tokens for
|
||||||
* @param model The model to use for tokenization (default: gpt-3.5-turbo)
|
* @returns Estimated number of tokens
|
||||||
* @returns The number of tokens in the text
|
|
||||||
*/
|
*/
|
||||||
public countTokens(text: string, model: string = 'gpt-3.5-turbo'): number {
|
public countTokens(text: string): number {
|
||||||
const projectContextInstance = new aiDocsClasses.ProjectContext('');
|
const projectContextInstance = new aiDocsClasses.ProjectContext('');
|
||||||
return projectContextInstance.countTokens(text, model);
|
return projectContextInstance.countTokens(text);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
353
ts/classes.diffprocessor.ts
Normal file
353
ts/classes.diffprocessor.ts
Normal file
@@ -0,0 +1,353 @@
|
|||||||
|
/**
|
||||||
|
* Intelligent git diff processor that handles large diffs by sampling and prioritization
|
||||||
|
* instead of blind truncation.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface IDiffFileInfo {
|
||||||
|
filepath: string;
|
||||||
|
status: 'added' | 'modified' | 'deleted';
|
||||||
|
linesAdded: number;
|
||||||
|
linesRemoved: number;
|
||||||
|
totalLines: number;
|
||||||
|
estimatedTokens: number;
|
||||||
|
diffContent: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IProcessedDiff {
|
||||||
|
summary: string; // Human-readable overview
|
||||||
|
fullDiffs: string[]; // Small files included fully
|
||||||
|
summarizedDiffs: string[]; // Medium files with head/tail
|
||||||
|
metadataOnly: string[]; // Large files, just stats
|
||||||
|
totalFiles: number;
|
||||||
|
totalTokens: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IDiffProcessorOptions {
|
||||||
|
maxDiffTokens?: number; // Maximum tokens for entire diff section (default: 100000)
|
||||||
|
smallFileLines?: number; // Files <= this are included fully (default: 50)
|
||||||
|
mediumFileLines?: number; // Files <= this are summarized (default: 200)
|
||||||
|
sampleHeadLines?: number; // Lines to show at start of medium files (default: 20)
|
||||||
|
sampleTailLines?: number; // Lines to show at end of medium files (default: 20)
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DiffProcessor {
|
||||||
|
private options: Required<IDiffProcessorOptions>;
|
||||||
|
|
||||||
|
constructor(options: IDiffProcessorOptions = {}) {
|
||||||
|
this.options = {
|
||||||
|
maxDiffTokens: options.maxDiffTokens ?? 100000,
|
||||||
|
smallFileLines: options.smallFileLines ?? 50,
|
||||||
|
mediumFileLines: options.mediumFileLines ?? 200,
|
||||||
|
sampleHeadLines: options.sampleHeadLines ?? 20,
|
||||||
|
sampleTailLines: options.sampleTailLines ?? 20,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process an array of git diffs into a structured, token-efficient format
|
||||||
|
*/
|
||||||
|
public processDiffs(diffStringArray: string[]): IProcessedDiff {
|
||||||
|
// Parse all diffs into file info objects
|
||||||
|
const fileInfos: IDiffFileInfo[] = diffStringArray
|
||||||
|
.map(diffString => this.parseDiffFile(diffString))
|
||||||
|
.filter(info => info !== null) as IDiffFileInfo[];
|
||||||
|
|
||||||
|
// Prioritize files (source files first, build artifacts last)
|
||||||
|
const prioritized = this.prioritizeFiles(fileInfos);
|
||||||
|
|
||||||
|
const result: IProcessedDiff = {
|
||||||
|
summary: '',
|
||||||
|
fullDiffs: [],
|
||||||
|
summarizedDiffs: [],
|
||||||
|
metadataOnly: [],
|
||||||
|
totalFiles: prioritized.length,
|
||||||
|
totalTokens: 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
let tokensUsed = 0;
|
||||||
|
const tokenBudget = this.options.maxDiffTokens;
|
||||||
|
|
||||||
|
// Categorize and include files based on size and token budget
|
||||||
|
for (const fileInfo of prioritized) {
|
||||||
|
const remainingBudget = tokenBudget - tokensUsed;
|
||||||
|
|
||||||
|
if (remainingBudget <= 0) {
|
||||||
|
// Budget exhausted - rest are metadata only
|
||||||
|
result.metadataOnly.push(this.formatMetadataOnly(fileInfo));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fileInfo.totalLines <= this.options.smallFileLines) {
|
||||||
|
// Small file - include fully if budget allows
|
||||||
|
if (fileInfo.estimatedTokens <= remainingBudget) {
|
||||||
|
const statusPrefix = this.getFileStatusPrefix(fileInfo);
|
||||||
|
result.fullDiffs.push(`${statusPrefix}${fileInfo.diffContent}`);
|
||||||
|
tokensUsed += fileInfo.estimatedTokens;
|
||||||
|
} else {
|
||||||
|
result.metadataOnly.push(this.formatMetadataOnly(fileInfo));
|
||||||
|
}
|
||||||
|
} else if (fileInfo.totalLines <= this.options.mediumFileLines) {
|
||||||
|
// Medium file - try to include summary with head/tail
|
||||||
|
const summary = this.extractDiffSample(
|
||||||
|
fileInfo,
|
||||||
|
this.options.sampleHeadLines,
|
||||||
|
this.options.sampleTailLines
|
||||||
|
);
|
||||||
|
const summaryTokens = Math.ceil(summary.length / 4); // Rough estimate
|
||||||
|
|
||||||
|
if (summaryTokens <= remainingBudget) {
|
||||||
|
result.summarizedDiffs.push(summary);
|
||||||
|
tokensUsed += summaryTokens;
|
||||||
|
} else {
|
||||||
|
result.metadataOnly.push(this.formatMetadataOnly(fileInfo));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Large file - metadata only
|
||||||
|
result.metadataOnly.push(this.formatMetadataOnly(fileInfo));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result.totalTokens = tokensUsed;
|
||||||
|
result.summary = this.generateSummary(result);
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format the processed diff for inclusion in context
|
||||||
|
*/
|
||||||
|
public formatForContext(processed: IProcessedDiff): string {
|
||||||
|
const sections: string[] = [];
|
||||||
|
|
||||||
|
// Summary section
|
||||||
|
sections.push('====== GIT DIFF SUMMARY ======');
|
||||||
|
sections.push(processed.summary);
|
||||||
|
sections.push('');
|
||||||
|
|
||||||
|
// Full diffs section
|
||||||
|
if (processed.fullDiffs.length > 0) {
|
||||||
|
sections.push(`====== FULL DIFFS (${processed.fullDiffs.length} files) ======`);
|
||||||
|
sections.push(processed.fullDiffs.join('\n\n'));
|
||||||
|
sections.push('');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Summarized diffs section
|
||||||
|
if (processed.summarizedDiffs.length > 0) {
|
||||||
|
sections.push(`====== SUMMARIZED DIFFS (${processed.summarizedDiffs.length} files) ======`);
|
||||||
|
sections.push(processed.summarizedDiffs.join('\n\n'));
|
||||||
|
sections.push('');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Metadata only section
|
||||||
|
if (processed.metadataOnly.length > 0) {
|
||||||
|
sections.push(`====== METADATA ONLY (${processed.metadataOnly.length} files) ======`);
|
||||||
|
sections.push(processed.metadataOnly.join('\n'));
|
||||||
|
sections.push('');
|
||||||
|
}
|
||||||
|
|
||||||
|
sections.push('====== END OF GIT DIFF ======');
|
||||||
|
|
||||||
|
return sections.join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a single git diff string into file information
|
||||||
|
*/
|
||||||
|
private parseDiffFile(diffString: string): IDiffFileInfo | null {
|
||||||
|
if (!diffString || diffString.trim().length === 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const lines = diffString.split('\n');
|
||||||
|
let filepath = '';
|
||||||
|
let status: 'added' | 'modified' | 'deleted' = 'modified';
|
||||||
|
let linesAdded = 0;
|
||||||
|
let linesRemoved = 0;
|
||||||
|
|
||||||
|
// Parse diff header to extract filepath and status
|
||||||
|
for (const line of lines) {
|
||||||
|
if (line.startsWith('--- a/')) {
|
||||||
|
filepath = line.substring(6);
|
||||||
|
} else if (line.startsWith('+++ b/')) {
|
||||||
|
const newPath = line.substring(6);
|
||||||
|
if (newPath === '/dev/null') {
|
||||||
|
status = 'deleted';
|
||||||
|
} else if (filepath === '/dev/null') {
|
||||||
|
status = 'added';
|
||||||
|
filepath = newPath;
|
||||||
|
} else {
|
||||||
|
filepath = newPath;
|
||||||
|
}
|
||||||
|
} else if (line.startsWith('+') && !line.startsWith('+++')) {
|
||||||
|
linesAdded++;
|
||||||
|
} else if (line.startsWith('-') && !line.startsWith('---')) {
|
||||||
|
linesRemoved++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const totalLines = linesAdded + linesRemoved;
|
||||||
|
const estimatedTokens = Math.ceil(diffString.length / 4);
|
||||||
|
|
||||||
|
return {
|
||||||
|
filepath,
|
||||||
|
status,
|
||||||
|
linesAdded,
|
||||||
|
linesRemoved,
|
||||||
|
totalLines,
|
||||||
|
estimatedTokens,
|
||||||
|
diffContent: diffString,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prioritize files by importance (source files before build artifacts)
|
||||||
|
*/
|
||||||
|
private prioritizeFiles(files: IDiffFileInfo[]): IDiffFileInfo[] {
|
||||||
|
return files.sort((a, b) => {
|
||||||
|
const scoreA = this.getFileImportanceScore(a.filepath);
|
||||||
|
const scoreB = this.getFileImportanceScore(b.filepath);
|
||||||
|
return scoreB - scoreA; // Higher score first
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate importance score for a file path
|
||||||
|
*/
|
||||||
|
private getFileImportanceScore(filepath: string): number {
|
||||||
|
// Source files - highest priority
|
||||||
|
if (filepath.match(/^(src|lib|app|components|pages|api)\//)) {
|
||||||
|
return 100;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test files - high priority
|
||||||
|
if (filepath.match(/\.(test|spec)\.(ts|js|tsx|jsx)$/) || filepath.startsWith('test/')) {
|
||||||
|
return 80;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configuration files - medium-high priority
|
||||||
|
if (filepath.match(/\.(json|yaml|yml|toml|config\.(ts|js))$/)) {
|
||||||
|
return 60;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Documentation - medium priority
|
||||||
|
if (filepath.match(/\.(md|txt|rst)$/)) {
|
||||||
|
return 40;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build artifacts - low priority
|
||||||
|
if (filepath.match(/^(dist|build|out|\.next|public\/dist)\//)) {
|
||||||
|
return 10;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start with default priority
|
||||||
|
let score = 50;
|
||||||
|
|
||||||
|
// Boost interface/type files - they're usually small but critical
|
||||||
|
if (filepath.includes('interfaces/') || filepath.includes('.types.')) {
|
||||||
|
score += 20;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Boost entry points
|
||||||
|
if (filepath.endsWith('index.ts') || filepath.endsWith('mod.ts')) {
|
||||||
|
score += 15;
|
||||||
|
}
|
||||||
|
|
||||||
|
return score;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract head and tail lines from a diff, omitting the middle
|
||||||
|
*/
|
||||||
|
private extractDiffSample(fileInfo: IDiffFileInfo, headLines: number, tailLines: number): string {
|
||||||
|
const lines = fileInfo.diffContent.split('\n');
|
||||||
|
const totalLines = lines.length;
|
||||||
|
|
||||||
|
if (totalLines <= headLines + tailLines) {
|
||||||
|
// File is small enough to include fully
|
||||||
|
return fileInfo.diffContent;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract file metadata from diff header
|
||||||
|
const headerLines: string[] = [];
|
||||||
|
let bodyStartIndex = 0;
|
||||||
|
for (let i = 0; i < lines.length; i++) {
|
||||||
|
if (lines[i].startsWith('@@')) {
|
||||||
|
headerLines.push(...lines.slice(0, i + 1));
|
||||||
|
bodyStartIndex = i + 1;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const bodyLines = lines.slice(bodyStartIndex);
|
||||||
|
const head = bodyLines.slice(0, headLines);
|
||||||
|
const tail = bodyLines.slice(-tailLines);
|
||||||
|
const omittedLines = bodyLines.length - headLines - tailLines;
|
||||||
|
|
||||||
|
const statusEmoji = fileInfo.status === 'added' ? '➕' :
|
||||||
|
fileInfo.status === 'deleted' ? '➖' : '📝';
|
||||||
|
|
||||||
|
const parts: string[] = [];
|
||||||
|
parts.push(`${statusEmoji} FILE: ${fileInfo.filepath}`);
|
||||||
|
parts.push(`CHANGES: +${fileInfo.linesAdded} lines, -${fileInfo.linesRemoved} lines (${fileInfo.totalLines} total)`);
|
||||||
|
parts.push('');
|
||||||
|
parts.push(...headerLines);
|
||||||
|
parts.push(...head);
|
||||||
|
parts.push('');
|
||||||
|
parts.push(`[... ${omittedLines} lines omitted - use Read tool to see full file ...]`);
|
||||||
|
parts.push('');
|
||||||
|
parts.push(...tail);
|
||||||
|
|
||||||
|
return parts.join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get file status prefix with emoji
|
||||||
|
*/
|
||||||
|
private getFileStatusPrefix(fileInfo: IDiffFileInfo): string {
|
||||||
|
const statusEmoji = fileInfo.status === 'added' ? '➕' :
|
||||||
|
fileInfo.status === 'deleted' ? '➖' : '📝';
|
||||||
|
return `${statusEmoji} `;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract filepath from diff content
|
||||||
|
*/
|
||||||
|
private extractFilepathFromDiff(diffContent: string): string {
|
||||||
|
const lines = diffContent.split('\n');
|
||||||
|
for (const line of lines) {
|
||||||
|
if (line.startsWith('+++ b/')) {
|
||||||
|
return line.substring(6);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 'unknown';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format file info as metadata only
|
||||||
|
*/
|
||||||
|
private formatMetadataOnly(fileInfo: IDiffFileInfo): string {
|
||||||
|
const statusEmoji = fileInfo.status === 'added' ? '➕' :
|
||||||
|
fileInfo.status === 'deleted' ? '➖' : '📝';
|
||||||
|
return `${statusEmoji} ${fileInfo.filepath} (+${fileInfo.linesAdded}, -${fileInfo.linesRemoved})`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate human-readable summary of processed diff
|
||||||
|
*/
|
||||||
|
private generateSummary(result: IProcessedDiff): string {
|
||||||
|
const parts: string[] = [];
|
||||||
|
parts.push(`Files changed: ${result.totalFiles} total`);
|
||||||
|
parts.push(`- ${result.fullDiffs.length} included in full`);
|
||||||
|
parts.push(`- ${result.summarizedDiffs.length} summarized (head/tail shown)`);
|
||||||
|
parts.push(`- ${result.metadataOnly.length} metadata only`);
|
||||||
|
parts.push(`Estimated tokens: ~${result.totalTokens.toLocaleString()}`);
|
||||||
|
|
||||||
|
if (result.metadataOnly.length > 0) {
|
||||||
|
parts.push('');
|
||||||
|
parts.push('NOTE: Some files excluded to stay within token budget.');
|
||||||
|
parts.push('Use Read tool with specific file paths to see full content.');
|
||||||
|
}
|
||||||
|
|
||||||
|
return parts.join('\n');
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -33,19 +33,19 @@ export class TypeDoc {
|
|||||||
include: [],
|
include: [],
|
||||||
};
|
};
|
||||||
let startDirectory = '';
|
let startDirectory = '';
|
||||||
if (plugins.smartfile.fs.isDirectory(plugins.path.join(paths.cwd, './ts'))) {
|
if (await plugins.fsInstance.directory(plugins.path.join(paths.cwd, './ts')).exists()) {
|
||||||
data.include.push(plugins.path.join(paths.cwd, './ts/**/*'));
|
data.include.push(plugins.path.join(paths.cwd, './ts/**/*'));
|
||||||
startDirectory = 'ts';
|
startDirectory = 'ts';
|
||||||
}
|
}
|
||||||
|
|
||||||
if (plugins.smartfile.fs.isDirectory(plugins.path.join(paths.cwd, './ts_web'))) {
|
if (await plugins.fsInstance.directory(plugins.path.join(paths.cwd, './ts_web')).exists()) {
|
||||||
data.include.push(plugins.path.join(paths.cwd, './ts_web/**/*'));
|
data.include.push(plugins.path.join(paths.cwd, './ts_web/**/*'));
|
||||||
if (!startDirectory) {
|
if (!startDirectory) {
|
||||||
startDirectory = 'ts_web';
|
startDirectory = 'ts_web';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await plugins.smartfile.memory.toFs(JSON.stringify(data), paths.tsconfigFile);
|
await plugins.fsInstance.file(paths.tsconfigFile).encoding('utf8').write(JSON.stringify(data));
|
||||||
let targetDir = paths.publicDir;
|
let targetDir = paths.publicDir;
|
||||||
if (options?.publicSubdir) {
|
if (options?.publicSubdir) {
|
||||||
targetDir = plugins.path.join(targetDir, options.publicSubdir);
|
targetDir = plugins.path.join(targetDir, options.publicSubdir);
|
||||||
@@ -53,6 +53,6 @@ export class TypeDoc {
|
|||||||
await this.smartshellInstance.exec(
|
await this.smartshellInstance.exec(
|
||||||
`typedoc --tsconfig ${paths.tsconfigFile} --out ${targetDir} ${startDirectory}/index.ts`,
|
`typedoc --tsconfig ${paths.tsconfigFile} --out ${targetDir} ${startDirectory}/index.ts`,
|
||||||
);
|
);
|
||||||
plugins.smartfile.fs.remove(paths.tsconfigFile);
|
await plugins.fsInstance.file(paths.tsconfigFile).delete();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
153
ts/cli.ts
153
ts/cli.ts
@@ -4,7 +4,6 @@ import { logger } from './logging.js';
|
|||||||
|
|
||||||
import { TypeDoc } from './classes.typedoc.js';
|
import { TypeDoc } from './classes.typedoc.js';
|
||||||
import { AiDoc } from './classes.aidoc.js';
|
import { AiDoc } from './classes.aidoc.js';
|
||||||
import * as context from './context/index.js';
|
|
||||||
|
|
||||||
export const run = async () => {
|
export const run = async () => {
|
||||||
const tsdocCli = new plugins.smartcli.Smartcli();
|
const tsdocCli = new plugins.smartcli.Smartcli();
|
||||||
@@ -31,18 +30,7 @@ export const run = async () => {
|
|||||||
tsdocCli.addCommand('aidoc').subscribe(async (argvArg) => {
|
tsdocCli.addCommand('aidoc').subscribe(async (argvArg) => {
|
||||||
const aidocInstance = new AiDoc();
|
const aidocInstance = new AiDoc();
|
||||||
await aidocInstance.start();
|
await aidocInstance.start();
|
||||||
|
|
||||||
// Get context token count if requested
|
|
||||||
if (argvArg.tokens || argvArg.showTokens) {
|
|
||||||
logger.log('info', `Calculating context token count...`);
|
|
||||||
const tokenCount = await aidocInstance.getProjectContextTokenCount(paths.cwd);
|
|
||||||
logger.log('ok', `Total context token count: ${tokenCount}`);
|
|
||||||
|
|
||||||
if (argvArg.tokensOnly) {
|
|
||||||
return; // Exit early if we only want token count
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.log('info', `Generating new readme...`);
|
logger.log('info', `Generating new readme...`);
|
||||||
logger.log('info', `This may take some time...`);
|
logger.log('info', `This may take some time...`);
|
||||||
await aidocInstance.buildReadme(paths.cwd);
|
await aidocInstance.buildReadme(paths.cwd);
|
||||||
@@ -51,125 +39,40 @@ export const run = async () => {
|
|||||||
await aidocInstance.buildDescription(paths.cwd);
|
await aidocInstance.buildDescription(paths.cwd);
|
||||||
});
|
});
|
||||||
|
|
||||||
tsdocCli.addCommand('tokens').subscribe(async (argvArg) => {
|
tsdocCli.addCommand('readme').subscribe(async (argvArg) => {
|
||||||
const aidocInstance = new AiDoc();
|
const aidocInstance = new AiDoc();
|
||||||
await aidocInstance.start();
|
await aidocInstance.start();
|
||||||
|
|
||||||
logger.log('info', `Calculating context token count...`);
|
logger.log('info', `Generating new readme...`);
|
||||||
|
logger.log('info', `This may take some time...`);
|
||||||
// Determine context mode based on args
|
await aidocInstance.buildReadme(paths.cwd);
|
||||||
let contextMode: context.ContextMode = 'full';
|
});
|
||||||
if (argvArg.trim || argvArg.trimmed) {
|
|
||||||
contextMode = 'trimmed';
|
tsdocCli.addCommand('description').subscribe(async (argvArg) => {
|
||||||
} else if (argvArg.summarize || argvArg.summarized) {
|
const aidocInstance = new AiDoc();
|
||||||
contextMode = 'summarized';
|
await aidocInstance.start();
|
||||||
}
|
|
||||||
|
logger.log('info', `Generating new description and keywords...`);
|
||||||
// Get task type if specified
|
logger.log('info', `This may take some time...`);
|
||||||
let taskType: context.TaskType | undefined = undefined;
|
await aidocInstance.buildDescription(paths.cwd);
|
||||||
if (argvArg.task) {
|
});
|
||||||
if (['readme', 'commit', 'description'].includes(argvArg.task)) {
|
|
||||||
taskType = argvArg.task as context.TaskType;
|
tsdocCli.addCommand('commit').subscribe(async (argvArg) => {
|
||||||
} else {
|
const aidocInstance = new AiDoc();
|
||||||
logger.log('warn', `Unknown task type: ${argvArg.task}. Using default context.`);
|
await aidocInstance.start();
|
||||||
}
|
|
||||||
}
|
logger.log('info', `Generating commit message...`);
|
||||||
|
logger.log('info', `This may take some time...`);
|
||||||
// Use enhanced context
|
const commitObject = await aidocInstance.buildNextCommitObject(paths.cwd);
|
||||||
const taskFactory = new context.TaskContextFactory(paths.cwd);
|
|
||||||
await taskFactory.initialize();
|
logger.log('ok', `Commit message generated:`);
|
||||||
|
console.log(JSON.stringify(commitObject, null, 2));
|
||||||
let contextResult: context.IContextResult;
|
|
||||||
|
|
||||||
if (argvArg.all) {
|
|
||||||
// Show stats for all task types
|
|
||||||
const stats = await taskFactory.getTokenStats();
|
|
||||||
|
|
||||||
logger.log('ok', 'Token statistics by task:');
|
|
||||||
for (const [task, data] of Object.entries(stats)) {
|
|
||||||
logger.log('info', `\n${task.toUpperCase()}:`);
|
|
||||||
logger.log('info', ` Tokens: ${data.tokenCount}`);
|
|
||||||
logger.log('info', ` Token savings: ${data.savings}`);
|
|
||||||
logger.log('info', ` Files: ${data.includedFiles} included, ${data.trimmedFiles} trimmed, ${data.excludedFiles} excluded`);
|
|
||||||
|
|
||||||
// Calculate percentage of model context
|
|
||||||
const o4MiniPercentage = (data.tokenCount / 200000 * 100).toFixed(2);
|
|
||||||
logger.log('info', ` Context usage: ${o4MiniPercentage}% of o4-mini (200K tokens)`);
|
|
||||||
}
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (taskType) {
|
|
||||||
// Get context for specific task
|
|
||||||
contextResult = await taskFactory.createContextForTask(taskType);
|
|
||||||
} else {
|
|
||||||
// Get generic context with specified mode
|
|
||||||
const enhancedContext = new context.EnhancedContext(paths.cwd);
|
|
||||||
await enhancedContext.initialize();
|
|
||||||
enhancedContext.setContextMode(contextMode);
|
|
||||||
|
|
||||||
if (argvArg.maxTokens) {
|
|
||||||
enhancedContext.setTokenBudget(parseInt(argvArg.maxTokens, 10));
|
|
||||||
}
|
|
||||||
|
|
||||||
contextResult = await enhancedContext.buildContext();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Display results
|
|
||||||
logger.log('ok', `Total context token count: ${contextResult.tokenCount}`);
|
|
||||||
logger.log('info', `Files included: ${contextResult.includedFiles.length}`);
|
|
||||||
logger.log('info', `Files trimmed: ${contextResult.trimmedFiles.length}`);
|
|
||||||
logger.log('info', `Files excluded: ${contextResult.excludedFiles.length}`);
|
|
||||||
logger.log('info', `Token savings: ${contextResult.tokenSavings}`);
|
|
||||||
|
|
||||||
if (argvArg.detailed) {
|
|
||||||
// Show more detailed info about the context and token usage
|
|
||||||
const o4MiniPercentage = (contextResult.tokenCount / 200000 * 100).toFixed(2);
|
|
||||||
logger.log('info', `Token usage: ${o4MiniPercentage}% of o4-mini 200K token context window`);
|
|
||||||
|
|
||||||
if (argvArg.model) {
|
|
||||||
// Show percentages for different models
|
|
||||||
if (argvArg.model === 'gpt4') {
|
|
||||||
const gpt4Percentage = (contextResult.tokenCount / 8192 * 100).toFixed(2);
|
|
||||||
logger.log('info', `Token usage (GPT-4): ${gpt4Percentage}% of 8192 token context window`);
|
|
||||||
} else if (argvArg.model === 'gpt35') {
|
|
||||||
const gpt35Percentage = (contextResult.tokenCount / 4096 * 100).toFixed(2);
|
|
||||||
logger.log('info', `Token usage (GPT-3.5): ${gpt35Percentage}% of 4096 token context window`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Estimate cost (approximate values)
|
|
||||||
const o4MiniInputCost = 0.00005; // per 1K tokens for o4-mini
|
|
||||||
const estimatedCost = (contextResult.tokenCount / 1000 * o4MiniInputCost).toFixed(6);
|
|
||||||
logger.log('info', `Estimated input cost: $${estimatedCost} (o4-mini)`);
|
|
||||||
|
|
||||||
if (argvArg.listFiles) {
|
|
||||||
// List files included in context
|
|
||||||
logger.log('info', '\nIncluded files:');
|
|
||||||
contextResult.includedFiles.forEach(file => {
|
|
||||||
logger.log('info', ` ${file.relativePath} (${file.tokenCount} tokens)`);
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.log('info', '\nTrimmed files:');
|
|
||||||
contextResult.trimmedFiles.forEach(file => {
|
|
||||||
logger.log('info', ` ${file.relativePath} (${file.tokenCount} tokens)`);
|
|
||||||
});
|
|
||||||
|
|
||||||
if (contextResult.excludedFiles.length > 0) {
|
|
||||||
logger.log('info', '\nExcluded files:');
|
|
||||||
contextResult.excludedFiles.forEach(file => {
|
|
||||||
logger.log('info', ` ${file.relativePath} (${file.tokenCount} tokens)`);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
|
|
||||||
tsdocCli.addCommand('test').subscribe((argvArg) => {
|
tsdocCli.addCommand('test').subscribe((argvArg) => {
|
||||||
tsdocCli.triggerCommand('typedoc', argvArg);
|
tsdocCli.triggerCommand('typedoc', argvArg);
|
||||||
process.on('exit', async () => {
|
process.on('exit', async () => {
|
||||||
await plugins.smartfile.fs.remove(paths.publicDir);
|
await plugins.fsInstance.directory(paths.publicDir).recursive().delete();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -1,209 +0,0 @@
|
|||||||
import * as plugins from '../plugins.js';
|
|
||||||
import type { IContextConfig, ITrimConfig, ITaskConfig, TaskType, ContextMode } from './types.js';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Manages configuration for context building
|
|
||||||
*/
|
|
||||||
export class ConfigManager {
|
|
||||||
private static instance: ConfigManager;
|
|
||||||
private config: IContextConfig;
|
|
||||||
private projectDir: string = '';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the singleton instance of ConfigManager
|
|
||||||
*/
|
|
||||||
public static getInstance(): ConfigManager {
|
|
||||||
if (!ConfigManager.instance) {
|
|
||||||
ConfigManager.instance = new ConfigManager();
|
|
||||||
}
|
|
||||||
return ConfigManager.instance;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Private constructor for singleton pattern
|
|
||||||
*/
|
|
||||||
private constructor() {
|
|
||||||
this.config = this.getDefaultConfig();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Initialize the config manager with a project directory
|
|
||||||
* @param projectDir The project directory
|
|
||||||
*/
|
|
||||||
public async initialize(projectDir: string): Promise<void> {
|
|
||||||
this.projectDir = projectDir;
|
|
||||||
await this.loadConfig();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the default configuration
|
|
||||||
*/
|
|
||||||
private getDefaultConfig(): IContextConfig {
|
|
||||||
return {
|
|
||||||
maxTokens: 190000, // Default for o4-mini with some buffer
|
|
||||||
defaultMode: 'trimmed',
|
|
||||||
taskSpecificSettings: {
|
|
||||||
readme: {
|
|
||||||
mode: 'trimmed',
|
|
||||||
includePaths: ['ts/', 'src/'],
|
|
||||||
excludePaths: ['test/', 'node_modules/']
|
|
||||||
},
|
|
||||||
commit: {
|
|
||||||
mode: 'trimmed',
|
|
||||||
focusOnChangedFiles: true
|
|
||||||
},
|
|
||||||
description: {
|
|
||||||
mode: 'trimmed',
|
|
||||||
includePackageInfo: true
|
|
||||||
}
|
|
||||||
},
|
|
||||||
trimming: {
|
|
||||||
removeImplementations: true,
|
|
||||||
preserveInterfaces: true,
|
|
||||||
preserveTypeDefs: true,
|
|
||||||
preserveJSDoc: true,
|
|
||||||
maxFunctionLines: 5,
|
|
||||||
removeComments: true,
|
|
||||||
removeBlankLines: true
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Load configuration from npmextra.json
|
|
||||||
*/
|
|
||||||
private async loadConfig(): Promise<void> {
|
|
||||||
try {
|
|
||||||
if (!this.projectDir) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create KeyValueStore for this project
|
|
||||||
// We'll just use smartfile directly instead of KeyValueStore
|
|
||||||
|
|
||||||
// Read the npmextra.json file
|
|
||||||
const npmextraJsonFile = await plugins.smartfile.SmartFile.fromFilePath(
|
|
||||||
plugins.path.join(this.projectDir, 'npmextra.json')
|
|
||||||
);
|
|
||||||
const npmextraContent = JSON.parse(npmextraJsonFile.contents.toString());
|
|
||||||
|
|
||||||
// Check for tsdoc context configuration
|
|
||||||
if (npmextraContent?.tsdoc?.context) {
|
|
||||||
// Merge with default config
|
|
||||||
this.config = this.mergeConfigs(this.config, npmextraContent.tsdoc.context);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error loading context configuration:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Merge configurations, with userConfig taking precedence
|
|
||||||
* @param defaultConfig The default configuration
|
|
||||||
* @param userConfig The user configuration
|
|
||||||
*/
|
|
||||||
private mergeConfigs(defaultConfig: IContextConfig, userConfig: Partial<IContextConfig>): IContextConfig {
|
|
||||||
const result: IContextConfig = { ...defaultConfig };
|
|
||||||
|
|
||||||
// Merge top-level properties
|
|
||||||
if (userConfig.maxTokens !== undefined) result.maxTokens = userConfig.maxTokens;
|
|
||||||
if (userConfig.defaultMode !== undefined) result.defaultMode = userConfig.defaultMode;
|
|
||||||
|
|
||||||
// Merge task-specific settings
|
|
||||||
if (userConfig.taskSpecificSettings) {
|
|
||||||
result.taskSpecificSettings = result.taskSpecificSettings || {};
|
|
||||||
|
|
||||||
// For each task type, merge settings
|
|
||||||
(['readme', 'commit', 'description'] as TaskType[]).forEach(taskType => {
|
|
||||||
if (userConfig.taskSpecificSettings?.[taskType]) {
|
|
||||||
result.taskSpecificSettings![taskType] = {
|
|
||||||
...result.taskSpecificSettings![taskType],
|
|
||||||
...userConfig.taskSpecificSettings[taskType]
|
|
||||||
};
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// Merge trimming configuration
|
|
||||||
if (userConfig.trimming) {
|
|
||||||
result.trimming = {
|
|
||||||
...result.trimming,
|
|
||||||
...userConfig.trimming
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the complete configuration
|
|
||||||
*/
|
|
||||||
public getConfig(): IContextConfig {
|
|
||||||
return this.config;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the trimming configuration
|
|
||||||
*/
|
|
||||||
public getTrimConfig(): ITrimConfig {
|
|
||||||
return this.config.trimming || {};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get configuration for a specific task
|
|
||||||
* @param taskType The type of task
|
|
||||||
*/
|
|
||||||
public getTaskConfig(taskType: TaskType): ITaskConfig {
|
|
||||||
// Get task-specific config or empty object
|
|
||||||
const taskConfig = this.config.taskSpecificSettings?.[taskType] || {};
|
|
||||||
|
|
||||||
// If mode is not specified, use default mode
|
|
||||||
if (!taskConfig.mode) {
|
|
||||||
taskConfig.mode = this.config.defaultMode;
|
|
||||||
}
|
|
||||||
|
|
||||||
return taskConfig;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the maximum tokens allowed for context
|
|
||||||
*/
|
|
||||||
public getMaxTokens(): number {
|
|
||||||
return this.config.maxTokens || 190000;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Update the configuration
|
|
||||||
* @param config The new configuration
|
|
||||||
*/
|
|
||||||
public async updateConfig(config: Partial<IContextConfig>): Promise<void> {
|
|
||||||
// Merge with existing config
|
|
||||||
this.config = this.mergeConfigs(this.config, config);
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (!this.projectDir) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Read the existing npmextra.json file
|
|
||||||
const npmextraJsonPath = plugins.path.join(this.projectDir, 'npmextra.json');
|
|
||||||
let npmextraContent = {};
|
|
||||||
|
|
||||||
if (await plugins.smartfile.fs.fileExists(npmextraJsonPath)) {
|
|
||||||
const npmextraJsonFile = await plugins.smartfile.SmartFile.fromFilePath(npmextraJsonPath);
|
|
||||||
npmextraContent = JSON.parse(npmextraJsonFile.contents.toString()) || {};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Update the tsdoc context configuration
|
|
||||||
const typedContent = npmextraContent as any;
|
|
||||||
if (!typedContent.tsdoc) typedContent.tsdoc = {};
|
|
||||||
typedContent.tsdoc.context = this.config;
|
|
||||||
|
|
||||||
// Write back to npmextra.json
|
|
||||||
const updatedContent = JSON.stringify(npmextraContent, null, 2);
|
|
||||||
await plugins.smartfile.memory.toFs(updatedContent, npmextraJsonPath);
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error updating context configuration:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,246 +0,0 @@
|
|||||||
import * as plugins from '../plugins.js';
|
|
||||||
import type { ITrimConfig, ContextMode } from './types.js';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Class responsible for trimming file contents to reduce token usage
|
|
||||||
* while preserving important information for context
|
|
||||||
*/
|
|
||||||
export class ContextTrimmer {
|
|
||||||
private config: ITrimConfig;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a new ContextTrimmer with the given configuration
|
|
||||||
* @param config The trimming configuration
|
|
||||||
*/
|
|
||||||
constructor(config?: ITrimConfig) {
|
|
||||||
this.config = {
|
|
||||||
removeImplementations: true,
|
|
||||||
preserveInterfaces: true,
|
|
||||||
preserveTypeDefs: true,
|
|
||||||
preserveJSDoc: true,
|
|
||||||
maxFunctionLines: 5,
|
|
||||||
removeComments: true,
|
|
||||||
removeBlankLines: true,
|
|
||||||
...config
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Trim a file's contents based on the configuration
|
|
||||||
* @param filePath The path to the file
|
|
||||||
* @param content The file's contents
|
|
||||||
* @param mode The context mode to use
|
|
||||||
* @returns The trimmed file contents
|
|
||||||
*/
|
|
||||||
public trimFile(filePath: string, content: string, mode: ContextMode = 'trimmed'): string {
|
|
||||||
// If mode is 'full', return the original content
|
|
||||||
if (mode === 'full') {
|
|
||||||
return content;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Process based on file type
|
|
||||||
if (filePath.endsWith('.ts') || filePath.endsWith('.tsx')) {
|
|
||||||
return this.trimTypeScriptFile(content);
|
|
||||||
} else if (filePath.endsWith('.md')) {
|
|
||||||
return this.trimMarkdownFile(content);
|
|
||||||
} else if (filePath.endsWith('.json')) {
|
|
||||||
return this.trimJsonFile(content);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Default to returning the original content for unknown file types
|
|
||||||
return content;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Trim a TypeScript file to reduce token usage
|
|
||||||
* @param content The TypeScript file contents
|
|
||||||
* @returns The trimmed file contents
|
|
||||||
*/
|
|
||||||
private trimTypeScriptFile(content: string): string {
|
|
||||||
let result = content;
|
|
||||||
|
|
||||||
// Step 1: Preserve JSDoc comments if configured
|
|
||||||
const jsDocComments: string[] = [];
|
|
||||||
if (this.config.preserveJSDoc) {
|
|
||||||
const jsDocRegex = /\/\*\*[\s\S]*?\*\//g;
|
|
||||||
const matches = result.match(jsDocRegex) || [];
|
|
||||||
jsDocComments.push(...matches);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 2: Remove comments if configured
|
|
||||||
if (this.config.removeComments) {
|
|
||||||
// Remove single-line comments
|
|
||||||
result = result.replace(/\/\/.*$/gm, '');
|
|
||||||
// Remove multi-line comments (except JSDoc if preserveJSDoc is true)
|
|
||||||
if (!this.config.preserveJSDoc) {
|
|
||||||
result = result.replace(/\/\*[\s\S]*?\*\//g, '');
|
|
||||||
} else {
|
|
||||||
// Only remove non-JSDoc comments
|
|
||||||
result = result.replace(/\/\*(?!\*)[\s\S]*?\*\//g, '');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 3: Remove function implementations if configured
|
|
||||||
if (this.config.removeImplementations) {
|
|
||||||
// Match function and method bodies
|
|
||||||
result = result.replace(
|
|
||||||
/(\b(function|constructor|async function)\s+[\w$]*\s*\([^)]*\)\s*{)([\s\S]*?)(})/g,
|
|
||||||
(match, start, funcType, body, end) => {
|
|
||||||
// Keep function signature and opening brace, replace body with comment
|
|
||||||
return `${start} /* implementation removed */ ${end}`;
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
// Match arrow function bodies
|
|
||||||
result = result.replace(
|
|
||||||
/(\([^)]*\)\s*=>\s*{)([\s\S]*?)(})/g,
|
|
||||||
(match, start, body, end) => {
|
|
||||||
return `${start} /* implementation removed */ ${end}`;
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
// Match method declarations
|
|
||||||
result = result.replace(
|
|
||||||
/(^\s*[\w$]*\s*\([^)]*\)\s*{)([\s\S]*?)(})/gm,
|
|
||||||
(match, start, body, end) => {
|
|
||||||
return `${start} /* implementation removed */ ${end}`;
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
// Match class methods
|
|
||||||
result = result.replace(
|
|
||||||
/(\b(public|private|protected|static|async)?\s+[\w$]+\s*\([^)]*\)\s*{)([\s\S]*?)(})/g,
|
|
||||||
(match, start, modifier, body, end) => {
|
|
||||||
return `${start} /* implementation removed */ ${end}`;
|
|
||||||
}
|
|
||||||
);
|
|
||||||
} else if (this.config.maxFunctionLines && this.config.maxFunctionLines > 0) {
|
|
||||||
// If not removing implementations completely, limit the number of lines
|
|
||||||
// Match function and method bodies
|
|
||||||
result = result.replace(
|
|
||||||
/(\b(function|constructor|async function)\s+[\w$]*\s*\([^)]*\)\s*{)([\s\S]*?)(})/g,
|
|
||||||
(match, start, funcType, body, end) => {
|
|
||||||
return this.limitFunctionBody(start, body, end);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
// Match arrow function bodies
|
|
||||||
result = result.replace(
|
|
||||||
/(\([^)]*\)\s*=>\s*{)([\s\S]*?)(})/g,
|
|
||||||
(match, start, body, end) => {
|
|
||||||
return this.limitFunctionBody(start, body, end);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
// Match method declarations
|
|
||||||
result = result.replace(
|
|
||||||
/(^\s*[\w$]*\s*\([^)]*\)\s*{)([\s\S]*?)(})/gm,
|
|
||||||
(match, start, body, end) => {
|
|
||||||
return this.limitFunctionBody(start, body, end);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
// Match class methods
|
|
||||||
result = result.replace(
|
|
||||||
/(\b(public|private|protected|static|async)?\s+[\w$]+\s*\([^)]*\)\s*{)([\s\S]*?)(})/g,
|
|
||||||
(match, start, modifier, body, end) => {
|
|
||||||
return this.limitFunctionBody(start, body, end);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 4: Remove blank lines if configured
|
|
||||||
if (this.config.removeBlankLines) {
|
|
||||||
result = result.replace(/^\s*[\r\n]/gm, '');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Step 5: Restore preserved JSDoc comments
|
|
||||||
if (this.config.preserveJSDoc && jsDocComments.length > 0) {
|
|
||||||
// This is a placeholder; we already preserved JSDoc comments in the regex steps
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Limit a function body to a maximum number of lines
|
|
||||||
* @param start The function signature and opening brace
|
|
||||||
* @param body The function body
|
|
||||||
* @param end The closing brace
|
|
||||||
* @returns The limited function body
|
|
||||||
*/
|
|
||||||
private limitFunctionBody(start: string, body: string, end: string): string {
|
|
||||||
const lines = body.split('\n');
|
|
||||||
if (lines.length > this.config.maxFunctionLines!) {
|
|
||||||
const limitedBody = lines.slice(0, this.config.maxFunctionLines!).join('\n');
|
|
||||||
return `${start}${limitedBody}\n // ... (${lines.length - this.config.maxFunctionLines!} lines trimmed)\n${end}`;
|
|
||||||
}
|
|
||||||
return `${start}${body}${end}`;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Trim a Markdown file to reduce token usage
|
|
||||||
* @param content The Markdown file contents
|
|
||||||
* @returns The trimmed file contents
|
|
||||||
*/
|
|
||||||
private trimMarkdownFile(content: string): string {
|
|
||||||
// For markdown files, we generally want to keep most content
|
|
||||||
// but we can remove lengthy code blocks if needed
|
|
||||||
return content;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Trim a JSON file to reduce token usage
|
|
||||||
* @param content The JSON file contents
|
|
||||||
* @returns The trimmed file contents
|
|
||||||
*/
|
|
||||||
private trimJsonFile(content: string): string {
|
|
||||||
try {
|
|
||||||
// Parse the JSON
|
|
||||||
const json = JSON.parse(content);
|
|
||||||
|
|
||||||
// For package.json, keep only essential information
|
|
||||||
if ('name' in json && 'version' in json && 'dependencies' in json) {
|
|
||||||
const essentialKeys = [
|
|
||||||
'name', 'version', 'description', 'author', 'license',
|
|
||||||
'main', 'types', 'exports', 'type'
|
|
||||||
];
|
|
||||||
|
|
||||||
const trimmedJson: any = {};
|
|
||||||
essentialKeys.forEach(key => {
|
|
||||||
if (key in json) {
|
|
||||||
trimmedJson[key] = json[key];
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Add dependency information without versions
|
|
||||||
if ('dependencies' in json) {
|
|
||||||
trimmedJson.dependencies = Object.keys(json.dependencies).reduce((acc, dep) => {
|
|
||||||
acc[dep] = '*'; // Replace version with wildcard
|
|
||||||
return acc;
|
|
||||||
}, {} as Record<string, string>);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Return the trimmed JSON
|
|
||||||
return JSON.stringify(trimmedJson, null, 2);
|
|
||||||
}
|
|
||||||
|
|
||||||
// For other JSON files, leave as is
|
|
||||||
return content;
|
|
||||||
} catch (error) {
|
|
||||||
// If there's an error parsing the JSON, return the original content
|
|
||||||
return content;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Update the trimmer configuration
|
|
||||||
* @param config The new configuration to apply
|
|
||||||
*/
|
|
||||||
public updateConfig(config: ITrimConfig): void {
|
|
||||||
this.config = {
|
|
||||||
...this.config,
|
|
||||||
...config
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,343 +0,0 @@
|
|||||||
import * as plugins from '../plugins.js';
|
|
||||||
import type { ContextMode, IContextResult, IFileInfo, TaskType } from './types.js';
|
|
||||||
import { ContextTrimmer } from './context-trimmer.js';
|
|
||||||
import { ConfigManager } from './config-manager.js';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Enhanced ProjectContext that supports context optimization strategies
|
|
||||||
*/
|
|
||||||
export class EnhancedContext {
|
|
||||||
private projectDir: string;
|
|
||||||
private trimmer: ContextTrimmer;
|
|
||||||
private configManager: ConfigManager;
|
|
||||||
private contextMode: ContextMode = 'trimmed';
|
|
||||||
private tokenBudget: number = 190000; // Default for o4-mini
|
|
||||||
private contextResult: IContextResult = {
|
|
||||||
context: '',
|
|
||||||
tokenCount: 0,
|
|
||||||
includedFiles: [],
|
|
||||||
trimmedFiles: [],
|
|
||||||
excludedFiles: [],
|
|
||||||
tokenSavings: 0
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a new EnhancedContext
|
|
||||||
* @param projectDirArg The project directory
|
|
||||||
*/
|
|
||||||
constructor(projectDirArg: string) {
|
|
||||||
this.projectDir = projectDirArg;
|
|
||||||
this.configManager = ConfigManager.getInstance();
|
|
||||||
this.trimmer = new ContextTrimmer(this.configManager.getTrimConfig());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Initialize the context builder
|
|
||||||
*/
|
|
||||||
public async initialize(): Promise<void> {
|
|
||||||
await this.configManager.initialize(this.projectDir);
|
|
||||||
this.tokenBudget = this.configManager.getMaxTokens();
|
|
||||||
this.trimmer.updateConfig(this.configManager.getTrimConfig());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the context mode
|
|
||||||
* @param mode The context mode to use
|
|
||||||
*/
|
|
||||||
public setContextMode(mode: ContextMode): void {
|
|
||||||
this.contextMode = mode;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Set the token budget
|
|
||||||
* @param maxTokens The maximum tokens to use
|
|
||||||
*/
|
|
||||||
public setTokenBudget(maxTokens: number): void {
|
|
||||||
this.tokenBudget = maxTokens;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Gather files from the project
|
|
||||||
* @param includePaths Optional paths to include
|
|
||||||
* @param excludePaths Optional paths to exclude
|
|
||||||
*/
|
|
||||||
public async gatherFiles(includePaths?: string[], excludePaths?: string[]): Promise<Record<string, plugins.smartfile.SmartFile | plugins.smartfile.SmartFile[]>> {
|
|
||||||
const smartfilePackageJSON = await plugins.smartfile.SmartFile.fromFilePath(
|
|
||||||
plugins.path.join(this.projectDir, 'package.json'),
|
|
||||||
this.projectDir,
|
|
||||||
);
|
|
||||||
|
|
||||||
const smartfilesReadme = await plugins.smartfile.SmartFile.fromFilePath(
|
|
||||||
plugins.path.join(this.projectDir, 'readme.md'),
|
|
||||||
this.projectDir,
|
|
||||||
);
|
|
||||||
|
|
||||||
const smartfilesReadmeHints = await plugins.smartfile.SmartFile.fromFilePath(
|
|
||||||
plugins.path.join(this.projectDir, 'readme.hints.md'),
|
|
||||||
this.projectDir,
|
|
||||||
);
|
|
||||||
|
|
||||||
const smartfilesNpmextraJSON = await plugins.smartfile.SmartFile.fromFilePath(
|
|
||||||
plugins.path.join(this.projectDir, 'npmextra.json'),
|
|
||||||
this.projectDir,
|
|
||||||
);
|
|
||||||
|
|
||||||
// Use provided include paths or default to all TypeScript files
|
|
||||||
const includeGlobs = includePaths?.map(path => `${path}/**/*.ts`) || ['ts*/**/*.ts'];
|
|
||||||
|
|
||||||
// Get TypeScript files
|
|
||||||
const smartfilesModPromises = includeGlobs.map(glob =>
|
|
||||||
plugins.smartfile.fs.fileTreeToObject(this.projectDir, glob)
|
|
||||||
);
|
|
||||||
|
|
||||||
const smartfilesModArrays = await Promise.all(smartfilesModPromises);
|
|
||||||
|
|
||||||
// Flatten the arrays
|
|
||||||
const smartfilesMod: plugins.smartfile.SmartFile[] = [];
|
|
||||||
smartfilesModArrays.forEach(array => {
|
|
||||||
smartfilesMod.push(...array);
|
|
||||||
});
|
|
||||||
|
|
||||||
// Get test files if not excluded
|
|
||||||
let smartfilesTest: plugins.smartfile.SmartFile[] = [];
|
|
||||||
if (!excludePaths?.includes('test/')) {
|
|
||||||
smartfilesTest = await plugins.smartfile.fs.fileTreeToObject(
|
|
||||||
this.projectDir,
|
|
||||||
'test/**/*.ts',
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
smartfilePackageJSON,
|
|
||||||
smartfilesReadme,
|
|
||||||
smartfilesReadmeHints,
|
|
||||||
smartfilesNpmextraJSON,
|
|
||||||
smartfilesMod,
|
|
||||||
smartfilesTest,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Convert files to context string
|
|
||||||
* @param files The files to convert
|
|
||||||
* @param mode The context mode to use
|
|
||||||
*/
|
|
||||||
public async convertFilesToContext(
|
|
||||||
files: plugins.smartfile.SmartFile[],
|
|
||||||
mode: ContextMode = this.contextMode
|
|
||||||
): Promise<string> {
|
|
||||||
// Reset context result
|
|
||||||
this.contextResult = {
|
|
||||||
context: '',
|
|
||||||
tokenCount: 0,
|
|
||||||
includedFiles: [],
|
|
||||||
trimmedFiles: [],
|
|
||||||
excludedFiles: [],
|
|
||||||
tokenSavings: 0
|
|
||||||
};
|
|
||||||
|
|
||||||
let totalTokenCount = 0;
|
|
||||||
let totalOriginalTokens = 0;
|
|
||||||
|
|
||||||
// Sort files by importance (for now just a simple alphabetical sort)
|
|
||||||
// Later this could be enhanced with more sophisticated prioritization
|
|
||||||
const sortedFiles = [...files].sort((a, b) => a.relative.localeCompare(b.relative));
|
|
||||||
|
|
||||||
const processedFiles: string[] = [];
|
|
||||||
|
|
||||||
for (const smartfile of sortedFiles) {
|
|
||||||
// Calculate original token count
|
|
||||||
const originalContent = smartfile.contents.toString();
|
|
||||||
const originalTokenCount = this.countTokens(originalContent);
|
|
||||||
totalOriginalTokens += originalTokenCount;
|
|
||||||
|
|
||||||
// Apply trimming based on mode
|
|
||||||
let processedContent = originalContent;
|
|
||||||
|
|
||||||
if (mode !== 'full') {
|
|
||||||
processedContent = this.trimmer.trimFile(
|
|
||||||
smartfile.relative,
|
|
||||||
originalContent,
|
|
||||||
mode
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Calculate new token count
|
|
||||||
const processedTokenCount = this.countTokens(processedContent);
|
|
||||||
|
|
||||||
// Check if we have budget for this file
|
|
||||||
if (totalTokenCount + processedTokenCount > this.tokenBudget) {
|
|
||||||
// We don't have budget for this file
|
|
||||||
this.contextResult.excludedFiles.push({
|
|
||||||
path: smartfile.path,
|
|
||||||
contents: originalContent,
|
|
||||||
relativePath: smartfile.relative,
|
|
||||||
tokenCount: originalTokenCount
|
|
||||||
});
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Format the file for context
|
|
||||||
const formattedContent = `
|
|
||||||
====== START OF FILE ${smartfile.relative} ======
|
|
||||||
|
|
||||||
${processedContent}
|
|
||||||
|
|
||||||
====== END OF FILE ${smartfile.relative} ======
|
|
||||||
`;
|
|
||||||
|
|
||||||
processedFiles.push(formattedContent);
|
|
||||||
totalTokenCount += processedTokenCount;
|
|
||||||
|
|
||||||
// Track file in appropriate list
|
|
||||||
const fileInfo: IFileInfo = {
|
|
||||||
path: smartfile.path,
|
|
||||||
contents: processedContent,
|
|
||||||
relativePath: smartfile.relative,
|
|
||||||
tokenCount: processedTokenCount
|
|
||||||
};
|
|
||||||
|
|
||||||
if (mode === 'full' || processedContent === originalContent) {
|
|
||||||
this.contextResult.includedFiles.push(fileInfo);
|
|
||||||
} else {
|
|
||||||
this.contextResult.trimmedFiles.push(fileInfo);
|
|
||||||
this.contextResult.tokenSavings += (originalTokenCount - processedTokenCount);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Join all processed files
|
|
||||||
const context = processedFiles.join('\n');
|
|
||||||
|
|
||||||
// Update context result
|
|
||||||
this.contextResult.context = context;
|
|
||||||
this.contextResult.tokenCount = totalTokenCount;
|
|
||||||
|
|
||||||
return context;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Build context for the project
|
|
||||||
* @param taskType Optional task type for task-specific context
|
|
||||||
*/
|
|
||||||
public async buildContext(taskType?: TaskType): Promise<IContextResult> {
|
|
||||||
// Initialize if needed
|
|
||||||
if (this.tokenBudget === 0) {
|
|
||||||
await this.initialize();
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get task-specific configuration if a task type is provided
|
|
||||||
if (taskType) {
|
|
||||||
const taskConfig = this.configManager.getTaskConfig(taskType);
|
|
||||||
if (taskConfig.mode) {
|
|
||||||
this.setContextMode(taskConfig.mode);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Gather files
|
|
||||||
const taskConfig = taskType ? this.configManager.getTaskConfig(taskType) : undefined;
|
|
||||||
const files = await this.gatherFiles(
|
|
||||||
taskConfig?.includePaths,
|
|
||||||
taskConfig?.excludePaths
|
|
||||||
);
|
|
||||||
|
|
||||||
// Convert files to context
|
|
||||||
// Create an array of all files to process
|
|
||||||
const allFiles: plugins.smartfile.SmartFile[] = [];
|
|
||||||
|
|
||||||
// Add individual files
|
|
||||||
if (files.smartfilePackageJSON) allFiles.push(files.smartfilePackageJSON as plugins.smartfile.SmartFile);
|
|
||||||
if (files.smartfilesReadme) allFiles.push(files.smartfilesReadme as plugins.smartfile.SmartFile);
|
|
||||||
if (files.smartfilesReadmeHints) allFiles.push(files.smartfilesReadmeHints as plugins.smartfile.SmartFile);
|
|
||||||
if (files.smartfilesNpmextraJSON) allFiles.push(files.smartfilesNpmextraJSON as plugins.smartfile.SmartFile);
|
|
||||||
|
|
||||||
// Add arrays of files
|
|
||||||
if (files.smartfilesMod) {
|
|
||||||
if (Array.isArray(files.smartfilesMod)) {
|
|
||||||
allFiles.push(...files.smartfilesMod);
|
|
||||||
} else {
|
|
||||||
allFiles.push(files.smartfilesMod);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (files.smartfilesTest) {
|
|
||||||
if (Array.isArray(files.smartfilesTest)) {
|
|
||||||
allFiles.push(...files.smartfilesTest);
|
|
||||||
} else {
|
|
||||||
allFiles.push(files.smartfilesTest);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const context = await this.convertFilesToContext(allFiles);
|
|
||||||
|
|
||||||
return this.contextResult;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Update the context with git diff information for commit tasks
|
|
||||||
* @param gitDiff The git diff to include
|
|
||||||
*/
|
|
||||||
public updateWithGitDiff(gitDiff: string): IContextResult {
|
|
||||||
// If we don't have a context yet, return empty result
|
|
||||||
if (!this.contextResult.context) {
|
|
||||||
return this.contextResult;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Add git diff to context
|
|
||||||
const diffSection = `
|
|
||||||
====== GIT DIFF ======
|
|
||||||
|
|
||||||
${gitDiff}
|
|
||||||
|
|
||||||
====== END GIT DIFF ======
|
|
||||||
`;
|
|
||||||
|
|
||||||
const diffTokenCount = this.countTokens(diffSection);
|
|
||||||
|
|
||||||
// Update context and token count
|
|
||||||
this.contextResult.context += diffSection;
|
|
||||||
this.contextResult.tokenCount += diffTokenCount;
|
|
||||||
|
|
||||||
return this.contextResult;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Count tokens in a string
|
|
||||||
* @param text The text to count tokens for
|
|
||||||
* @param model The model to use for token counting
|
|
||||||
*/
|
|
||||||
public countTokens(text: string, model: string = 'gpt-3.5-turbo'): number {
|
|
||||||
try {
|
|
||||||
// Use the gpt-tokenizer library to count tokens
|
|
||||||
const tokens = plugins.gptTokenizer.encode(text);
|
|
||||||
return tokens.length;
|
|
||||||
} catch (error) {
|
|
||||||
console.error('Error counting tokens:', error);
|
|
||||||
// Provide a rough estimate if tokenization fails
|
|
||||||
return Math.ceil(text.length / 4);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the context result
|
|
||||||
*/
|
|
||||||
public getContextResult(): IContextResult {
|
|
||||||
return this.contextResult;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the token count for the current context
|
|
||||||
*/
|
|
||||||
public getTokenCount(): number {
|
|
||||||
return this.contextResult.tokenCount;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get both the context string and its token count
|
|
||||||
*/
|
|
||||||
public getContextWithTokenCount(): { context: string; tokenCount: number } {
|
|
||||||
return {
|
|
||||||
context: this.contextResult.context,
|
|
||||||
tokenCount: this.contextResult.tokenCount
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
import { EnhancedContext } from './enhanced-context.js';
|
|
||||||
import { TaskContextFactory } from './task-context-factory.js';
|
|
||||||
import { ConfigManager } from './config-manager.js';
|
|
||||||
import { ContextTrimmer } from './context-trimmer.js';
|
|
||||||
import type {
|
|
||||||
ContextMode,
|
|
||||||
IContextConfig,
|
|
||||||
IContextResult,
|
|
||||||
IFileInfo,
|
|
||||||
ITrimConfig,
|
|
||||||
ITaskConfig,
|
|
||||||
TaskType
|
|
||||||
} from './types.js';
|
|
||||||
|
|
||||||
export {
|
|
||||||
// Classes
|
|
||||||
EnhancedContext,
|
|
||||||
TaskContextFactory,
|
|
||||||
ConfigManager,
|
|
||||||
ContextTrimmer,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Types
|
|
||||||
export type {
|
|
||||||
ContextMode,
|
|
||||||
IContextConfig,
|
|
||||||
IContextResult,
|
|
||||||
IFileInfo,
|
|
||||||
ITrimConfig,
|
|
||||||
ITaskConfig,
|
|
||||||
TaskType
|
|
||||||
};
|
|
||||||
@@ -1,138 +0,0 @@
|
|||||||
import * as plugins from '../plugins.js';
|
|
||||||
import { EnhancedContext } from './enhanced-context.js';
|
|
||||||
import { ConfigManager } from './config-manager.js';
|
|
||||||
import type { IContextResult, TaskType } from './types.js';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Factory class for creating task-specific context
|
|
||||||
*/
|
|
||||||
export class TaskContextFactory {
|
|
||||||
private projectDir: string;
|
|
||||||
private configManager: ConfigManager;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a new TaskContextFactory
|
|
||||||
* @param projectDirArg The project directory
|
|
||||||
*/
|
|
||||||
constructor(projectDirArg: string) {
|
|
||||||
this.projectDir = projectDirArg;
|
|
||||||
this.configManager = ConfigManager.getInstance();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Initialize the factory
|
|
||||||
*/
|
|
||||||
public async initialize(): Promise<void> {
|
|
||||||
await this.configManager.initialize(this.projectDir);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create context for README generation
|
|
||||||
*/
|
|
||||||
public async createContextForReadme(): Promise<IContextResult> {
|
|
||||||
const contextBuilder = new EnhancedContext(this.projectDir);
|
|
||||||
await contextBuilder.initialize();
|
|
||||||
|
|
||||||
// Get README-specific configuration
|
|
||||||
const taskConfig = this.configManager.getTaskConfig('readme');
|
|
||||||
if (taskConfig.mode) {
|
|
||||||
contextBuilder.setContextMode(taskConfig.mode);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build the context for README task
|
|
||||||
return await contextBuilder.buildContext('readme');
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create context for description generation
|
|
||||||
*/
|
|
||||||
public async createContextForDescription(): Promise<IContextResult> {
|
|
||||||
const contextBuilder = new EnhancedContext(this.projectDir);
|
|
||||||
await contextBuilder.initialize();
|
|
||||||
|
|
||||||
// Get description-specific configuration
|
|
||||||
const taskConfig = this.configManager.getTaskConfig('description');
|
|
||||||
if (taskConfig.mode) {
|
|
||||||
contextBuilder.setContextMode(taskConfig.mode);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build the context for description task
|
|
||||||
return await contextBuilder.buildContext('description');
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create context for commit message generation
|
|
||||||
* @param gitDiff Optional git diff to include
|
|
||||||
*/
|
|
||||||
public async createContextForCommit(gitDiff?: string): Promise<IContextResult> {
|
|
||||||
const contextBuilder = new EnhancedContext(this.projectDir);
|
|
||||||
await contextBuilder.initialize();
|
|
||||||
|
|
||||||
// Get commit-specific configuration
|
|
||||||
const taskConfig = this.configManager.getTaskConfig('commit');
|
|
||||||
if (taskConfig.mode) {
|
|
||||||
contextBuilder.setContextMode(taskConfig.mode);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Build the context for commit task
|
|
||||||
const contextResult = await contextBuilder.buildContext('commit');
|
|
||||||
|
|
||||||
// If git diff is provided, add it to the context
|
|
||||||
if (gitDiff) {
|
|
||||||
contextBuilder.updateWithGitDiff(gitDiff);
|
|
||||||
}
|
|
||||||
|
|
||||||
return contextBuilder.getContextResult();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create context for any task type
|
|
||||||
* @param taskType The task type to create context for
|
|
||||||
* @param additionalContent Optional additional content to include
|
|
||||||
*/
|
|
||||||
public async createContextForTask(
|
|
||||||
taskType: TaskType,
|
|
||||||
additionalContent?: string
|
|
||||||
): Promise<IContextResult> {
|
|
||||||
switch (taskType) {
|
|
||||||
case 'readme':
|
|
||||||
return this.createContextForReadme();
|
|
||||||
case 'description':
|
|
||||||
return this.createContextForDescription();
|
|
||||||
case 'commit':
|
|
||||||
return this.createContextForCommit(additionalContent);
|
|
||||||
default:
|
|
||||||
// Generic context for unknown task types
|
|
||||||
const contextBuilder = new EnhancedContext(this.projectDir);
|
|
||||||
await contextBuilder.initialize();
|
|
||||||
return await contextBuilder.buildContext();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get token stats for all task types
|
|
||||||
*/
|
|
||||||
public async getTokenStats(): Promise<Record<TaskType, {
|
|
||||||
tokenCount: number;
|
|
||||||
savings: number;
|
|
||||||
includedFiles: number;
|
|
||||||
trimmedFiles: number;
|
|
||||||
excludedFiles: number;
|
|
||||||
}>> {
|
|
||||||
const taskTypes: TaskType[] = ['readme', 'description', 'commit'];
|
|
||||||
const stats: Record<TaskType, any> = {} as any;
|
|
||||||
|
|
||||||
for (const taskType of taskTypes) {
|
|
||||||
const result = await this.createContextForTask(taskType);
|
|
||||||
stats[taskType] = {
|
|
||||||
tokenCount: result.tokenCount,
|
|
||||||
savings: result.tokenSavings,
|
|
||||||
includedFiles: result.includedFiles.length,
|
|
||||||
trimmedFiles: result.trimmedFiles.length,
|
|
||||||
excludedFiles: result.excludedFiles.length
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return stats;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,95 +0,0 @@
|
|||||||
/**
|
|
||||||
* Context processing mode to control how context is built
|
|
||||||
*/
|
|
||||||
export type ContextMode = 'full' | 'trimmed' | 'summarized';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Configuration for context trimming
|
|
||||||
*/
|
|
||||||
export interface ITrimConfig {
|
|
||||||
/** Whether to remove function implementations */
|
|
||||||
removeImplementations?: boolean;
|
|
||||||
/** Whether to preserve interface definitions */
|
|
||||||
preserveInterfaces?: boolean;
|
|
||||||
/** Whether to preserve type definitions */
|
|
||||||
preserveTypeDefs?: boolean;
|
|
||||||
/** Whether to preserve JSDoc comments */
|
|
||||||
preserveJSDoc?: boolean;
|
|
||||||
/** Maximum lines to keep for function bodies (if not removing completely) */
|
|
||||||
maxFunctionLines?: number;
|
|
||||||
/** Whether to remove normal comments (non-JSDoc) */
|
|
||||||
removeComments?: boolean;
|
|
||||||
/** Whether to remove blank lines */
|
|
||||||
removeBlankLines?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Task types that require different context optimization
|
|
||||||
*/
|
|
||||||
export type TaskType = 'readme' | 'commit' | 'description';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Configuration for different tasks
|
|
||||||
*/
|
|
||||||
export interface ITaskConfig {
|
|
||||||
/** The context mode to use for this task */
|
|
||||||
mode?: ContextMode;
|
|
||||||
/** File paths to include for this task */
|
|
||||||
includePaths?: string[];
|
|
||||||
/** File paths to exclude for this task */
|
|
||||||
excludePaths?: string[];
|
|
||||||
/** For commit tasks, whether to focus on changed files */
|
|
||||||
focusOnChangedFiles?: boolean;
|
|
||||||
/** For description tasks, whether to include package info */
|
|
||||||
includePackageInfo?: boolean;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Complete context configuration
|
|
||||||
*/
|
|
||||||
export interface IContextConfig {
|
|
||||||
/** Maximum tokens to use for context */
|
|
||||||
maxTokens?: number;
|
|
||||||
/** Default context mode */
|
|
||||||
defaultMode?: ContextMode;
|
|
||||||
/** Task-specific settings */
|
|
||||||
taskSpecificSettings?: {
|
|
||||||
[key in TaskType]?: ITaskConfig;
|
|
||||||
};
|
|
||||||
/** Trimming configuration */
|
|
||||||
trimming?: ITrimConfig;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Basic file information interface
|
|
||||||
*/
|
|
||||||
export interface IFileInfo {
|
|
||||||
/** The file path */
|
|
||||||
path: string;
|
|
||||||
/** The file contents */
|
|
||||||
contents: string;
|
|
||||||
/** The file's relative path from the project root */
|
|
||||||
relativePath: string;
|
|
||||||
/** The estimated token count of the file */
|
|
||||||
tokenCount?: number;
|
|
||||||
/** The file's importance score (higher is more important) */
|
|
||||||
importanceScore?: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Result of context building
|
|
||||||
*/
|
|
||||||
export interface IContextResult {
|
|
||||||
/** The generated context string */
|
|
||||||
context: string;
|
|
||||||
/** The total token count of the context */
|
|
||||||
tokenCount: number;
|
|
||||||
/** Files included in the context */
|
|
||||||
includedFiles: IFileInfo[];
|
|
||||||
/** Files that were trimmed */
|
|
||||||
trimmedFiles: IFileInfo[];
|
|
||||||
/** Files that were excluded */
|
|
||||||
excludedFiles: IFileInfo[];
|
|
||||||
/** Token savings from trimming */
|
|
||||||
tokenSavings: number;
|
|
||||||
}
|
|
||||||
@@ -6,10 +6,13 @@ export { path };
|
|||||||
// pushrocks scope
|
// pushrocks scope
|
||||||
import * as npmextra from '@push.rocks/npmextra';
|
import * as npmextra from '@push.rocks/npmextra';
|
||||||
import * as qenv from '@push.rocks/qenv';
|
import * as qenv from '@push.rocks/qenv';
|
||||||
|
import * as smartagent from '@push.rocks/smartagent';
|
||||||
|
import * as smartagentTools from '@push.rocks/smartagent/tools';
|
||||||
import * as smartai from '@push.rocks/smartai';
|
import * as smartai from '@push.rocks/smartai';
|
||||||
import * as smartcli from '@push.rocks/smartcli';
|
import * as smartcli from '@push.rocks/smartcli';
|
||||||
import * as smartdelay from '@push.rocks/smartdelay';
|
import * as smartdelay from '@push.rocks/smartdelay';
|
||||||
import * as smartfile from '@push.rocks/smartfile';
|
import * as smartfile from '@push.rocks/smartfile';
|
||||||
|
import * as smartfs from '@push.rocks/smartfs';
|
||||||
import * as smartgit from '@push.rocks/smartgit';
|
import * as smartgit from '@push.rocks/smartgit';
|
||||||
import * as smartinteract from '@push.rocks/smartinteract';
|
import * as smartinteract from '@push.rocks/smartinteract';
|
||||||
import * as smartlog from '@push.rocks/smartlog';
|
import * as smartlog from '@push.rocks/smartlog';
|
||||||
@@ -21,10 +24,13 @@ import * as smarttime from '@push.rocks/smarttime';
|
|||||||
export {
|
export {
|
||||||
npmextra,
|
npmextra,
|
||||||
qenv,
|
qenv,
|
||||||
|
smartagent,
|
||||||
|
smartagentTools,
|
||||||
smartai,
|
smartai,
|
||||||
smartcli,
|
smartcli,
|
||||||
smartdelay,
|
smartdelay,
|
||||||
smartfile,
|
smartfile,
|
||||||
|
smartfs,
|
||||||
smartgit,
|
smartgit,
|
||||||
smartinteract,
|
smartinteract,
|
||||||
smartlog,
|
smartlog,
|
||||||
@@ -34,6 +40,13 @@ export {
|
|||||||
smarttime,
|
smarttime,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Create a shared SmartFs instance for filesystem operations
|
||||||
|
const smartFsNodeProvider = new smartfs.SmartFsProviderNode();
|
||||||
|
export const fsInstance = new smartfs.SmartFs(smartFsNodeProvider);
|
||||||
|
|
||||||
|
// Create a shared SmartFileFactory for in-memory file operations
|
||||||
|
export const smartfileFactory = smartfile.SmartFileFactory.nodeFs();
|
||||||
|
|
||||||
// @git.zone scope
|
// @git.zone scope
|
||||||
import * as tspublish from '@git.zone/tspublish';
|
import * as tspublish from '@git.zone/tspublish';
|
||||||
|
|
||||||
@@ -41,6 +54,5 @@ export { tspublish };
|
|||||||
|
|
||||||
// third party scope
|
// third party scope
|
||||||
import * as typedoc from 'typedoc';
|
import * as typedoc from 'typedoc';
|
||||||
import * as gptTokenizer from 'gpt-tokenizer';
|
|
||||||
|
|
||||||
export { typedoc, gptTokenizer };
|
export { typedoc };
|
||||||
|
|||||||
Reference in New Issue
Block a user