Compare commits

...

37 Commits

Author SHA1 Message Date
3485392979 1.11.2
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-15 15:22:45 +00:00
89adae2cff update 2025-12-15 15:22:35 +00:00
3451ab7456 update 2025-12-15 15:14:16 +00:00
bcded1eafa update 2025-12-15 14:34:02 +00:00
9cae46e2fe update 2025-12-15 14:33:58 +00:00
65c1df30da 1.11.1
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-15 12:02:16 +00:00
e8f2add812 fix(dependencies): update 2025-12-15 12:02:13 +00:00
8fcc304ee3 v1.11.0
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-15 11:36:03 +00:00
69802b46b6 feat(commit): Integrate DualAgentOrchestrator for commit message generation and improve diff/context handling 2025-12-15 11:36:03 +00:00
e500455557 1.10.2
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-13 22:50:26 +00:00
4029691ccd 1.10.1
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-13 11:42:43 +00:00
3b1c84d7e8 fix(npmextra): update to new format 2025-12-13 11:42:39 +00:00
f8d0895aab v1.10.0
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-12-02 12:17:10 +00:00
d7ec2220a1 feat(diff-processor): Improve diff sampling and file prioritization: increase inclusion thresholds, expand sampled context, and boost priority for interface/type and entry-point files 2025-12-02 12:17:10 +00:00
c24ce31b1f 1.9.2
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-11-04 03:43:27 +00:00
fec2017cc6 fix(deps): Update dependencies and devDependencies to newer versions (bump multiple packages) 2025-11-04 03:43:27 +00:00
88fac91c79 1.9.1
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-11-04 02:28:55 +00:00
ce4da89da9 fix(iterative-context-builder): Rely on DiffProcessor for git diff pre-processing; remove raw char truncation, raise diff token safety, and improve logging 2025-11-04 02:28:55 +00:00
6524adea18 1.9.0
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-11-04 02:19:57 +00:00
4bf0c02618 feat(context): Add intelligent DiffProcessor to summarize and prioritize git diffs and integrate it into the commit context pipeline 2025-11-04 02:19:57 +00:00
f84a65217d 1.8.3
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-11-04 01:37:15 +00:00
3f22fc91ae fix(context): Prevent enormous git diffs and OOM during context building by adding exclusion patterns, truncation, and diagnostic logging 2025-11-04 01:37:15 +00:00
11e65b92ec 1.8.2
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-11-03 17:53:03 +00:00
0a3080518f 1.8.1
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-11-03 17:50:09 +00:00
d0a4ddbb4b fix(git diff): improve git diff 2025-11-03 17:49:35 +00:00
481339d3cb 1.8.0
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-11-03 13:37:16 +00:00
ebc3d760af feat(context): Wire OpenAI provider through task context factory and add git-diff support to iterative context builder 2025-11-03 13:37:16 +00:00
a6d678e36c 1.7.0
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-11-03 13:19:29 +00:00
8c3e16a4f2 feat(IterativeContextBuilder): Add iterative AI-driven context builder and integrate into task factory; add tests and iterative configuration 2025-11-03 13:19:29 +00:00
2276fb0c0c 1.6.1
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-11-03 11:04:21 +00:00
0a9d535df4 fix(context): Improve context building, caching and test robustness 2025-11-03 11:04:21 +00:00
d46fd1590e 1.6.0
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-11-02 23:07:59 +00:00
1d7317f063 feat(context): Introduce smart context system: analyzer, lazy loader, cache and README/docs improvements 2025-11-02 23:07:59 +00:00
fe5121ec9c 1.5.2
Some checks failed
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-09-07 07:54:04 +00:00
c084b20390 fix(package): Bump dependencies, refine test script and imports, and overhaul README and docs 2025-09-07 07:54:04 +00:00
6f024536a8 1.5.1
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-08-16 11:20:39 +00:00
2405fb3370 fix(aidoc): Bump dependencies, add pnpm workspace config, and add AiDoc.stop() 2025-08-16 11:20:39 +00:00
24 changed files with 6953 additions and 7343 deletions

View File

@@ -1,5 +1,116 @@
# Changelog
## 2025-12-15 - 1.11.0 - feat(commit)
Integrate DualAgentOrchestrator for commit message generation and improve diff/context handling
- Add @push.rocks/smartagent dependency and export it from plugins
- Use DualAgentOrchestrator to generate and guardian-validate commit messages
- Use DualAgentOrchestrator for changelog generation with guardian validation
- Switch commit flow to TaskContextFactory and DiffProcessor for token-efficient context
- Expose getOpenaiToken() and wire orchestrator with the project OpenAI token
- Enhance iterative context builder and context components to better manage token budgets and sampling
- Update npmextra.json with release config for @git.zone/cli and reference local smartagent package in package.json
## 2025-12-02 - 1.10.0 - feat(diff-processor)
Improve diff sampling and file prioritization: increase inclusion thresholds, expand sampled context, and boost priority for interface/type and entry-point files
- Raise small/medium file thresholds used by DiffProcessor (smallFileLines 50 -> 300, mediumFileLines 200 -> 800) so more source files are included fully or summarized rather than treated as large metadata-only files
- Increase sample window for medium files (sampleHeadLines/sampleTailLines 20 -> 75) to provide more context when summarizing diffs
- Boost importance scoring for interfaces/type files and entry points (adds +20 for interfaces/.types and +15 for index/mod entry files) to prioritize critical API surface in diff processing
- Keep other prioritization rules intact (source/test/config/docs/build heuristics), and align the aidoc commit DiffProcessor usage with the new defaults
## 2025-11-04 - 1.9.2 - fix(deps)
Update dependencies and devDependencies to newer versions (bump multiple packages)
- Bumped devDependencies: @git.zone/tsbuild 2.6.8 -> 2.7.1, @git.zone/tsrun 1.2.46 -> 1.6.2, @git.zone/tstest 2.3.6 -> 2.7.0
- Bumped runtime dependencies: @push.rocks/smartai 0.5.11 -> 0.8.0, @push.rocks/smartcli 4.0.11 -> 4.0.19, @push.rocks/smartgit 3.2.1 -> 3.3.1, @push.rocks/smartlog 3.1.9 -> 3.1.10, gpt-tokenizer 3.0.1 -> 3.2.0, typedoc 0.28.12 -> 0.28.14, typescript 5.9.2 -> 5.9.3
- No source code changes in this commit; dependency-only updates. Run the test suite and CI to verify compatibility.
## 2025-11-04 - 1.9.1 - fix(iterative-context-builder)
Rely on DiffProcessor for git diff pre-processing; remove raw char truncation, raise diff token safety, and improve logging
- Removed raw character-based truncation of additionalContext — diffs are expected to be pre-processed by DiffProcessor instead of blind substring truncation.
- Now validates pre-processed diff token count only and treats DiffProcessor as the primary sampler (DiffProcessor typically uses a ~100k token budget).
- Increased MAX_DIFF_TOKENS safety net to 200,000 to cover edge cases and avoid false positives; updated logs to reflect pre-processed diffs.
- Improved error messaging to indicate a likely DiffProcessor misconfiguration when pre-processed diffs exceed the safety limit.
- Updated informational logs to state that a pre-processed git diff was added to context.
## 2025-11-04 - 1.9.0 - feat(context)
Add intelligent DiffProcessor to summarize and prioritize git diffs and integrate it into the commit context pipeline
- Add DiffProcessor (ts/context/diff-processor.ts) to intelligently process git diffs: include small files fully, summarize medium files (head/tail sampling), and mark very large files as metadata-only to stay within token budgets.
- Integrate DiffProcessor into commit workflow (ts/aidocs_classes/commit.ts): preprocess raw diffs, emit processed diff statistics, and pass a token-efficient diff section into the TaskContextFactory for commit context generation.
- Export DiffProcessor and its types through the context index and types (ts/context/index.ts, ts/context/types.ts) so other context components can reuse it.
- Add comprehensive tests for the DiffProcessor behavior and integration (test/test.diffprocessor.node.ts) covering small/medium/large diffs, added/deleted files, prioritization, token budgets, and formatting for context.
- Minor adjustments across context/task factories and builders to accept and propagate processed diff strings rather than raw diffs, reducing risk of token overflows during iterative context building.
## 2025-11-04 - 1.8.3 - fix(context)
Prevent enormous git diffs and OOM during context building by adding exclusion patterns, truncation, and diagnostic logging
- Add comprehensive git diff exclusion globs (locks, build artifacts, maps, bundles, IDE folders, logs, caches) when collecting uncommitted diffs to avoid noisy/huge diffs
- Pass glob patterns directly to smartgit.getUncommittedDiff for efficient server-side matching
- Emit diagnostic statistics for diffs (files changed, total characters, estimated tokens, number of exclusion patterns) and warn on unusually large diffs
- Introduce pre-tokenization safety checks in iterative context builder: truncate raw diff text if it exceeds MAX_DIFF_CHARS and throw a clear error if token count still exceeds MAX_DIFF_TOKENS
- Format and log token counts using locale-aware formatting for clarity
- Improve robustness of commit context generation to reduce risk of OOM / model-limit overruns
## 2025-11-03 - 1.8.0 - feat(context)
Wire OpenAI provider through task context factory and add git-diff support to iterative context builder
- Pass AiDoc.openaiInstance through TaskContextFactory into IterativeContextBuilder to reuse the same OpenAI provider and avoid reinitialization.
- IterativeContextBuilder now accepts an optional OpenAiProvider and an additionalContext string; when provided, git diffs (or other extra context) are prepended to the AI context and token counts are updated.
- createContextForCommit now forwards the git diff into the iterative builder so commit-specific context includes the diff.
- Updated aidocs_classes (commit, description, readme) to supply the existing openaiInstance when creating the TaskContextFactory.
## 2025-11-03 - 1.7.0 - feat(IterativeContextBuilder)
Add iterative AI-driven context builder and integrate into task factory; add tests and iterative configuration
- Introduce IterativeContextBuilder: iterative, token-aware context construction that asks the AI which files to load and evaluates context sufficiency.
- Switch TaskContextFactory to use IterativeContextBuilder for readme, description and commit tasks (replaces earlier EnhancedContext flow for these tasks).
- Add iterative configuration options (maxIterations, firstPassFileLimit, subsequentPassFileLimit, temperature, model) in types and ConfigManager and merge support for user config.
- Update CLI (tokens and aidoc flows) to use the iterative context factory and improve task handling and messaging.
- Add test coverage: test/test.iterativecontextbuilder.node.ts to validate initialization, iterative builds, token budget respect and multiple task types.
- Enhance ContextCache, LazyFileLoader, ContextAnalyzer and ContextTrimmer to support the iterative pipeline and smarter prioritization/prompts.
## 2025-11-03 - 1.6.1 - fix(context)
Improve context building, caching and test robustness
- EnhancedContext: refactored smart context building to use the analyzer and TaskContextFactory by default; taskType now defaults to 'description' and task-specific modes are applied.
- ConfigManager: simplified analyzer configuration (removed enabled flag) and fixed getAnalyzerConfig fallback shape.
- ContextCache: more robust mtime handling and persistence; tests updated to use real file mtimes so cache validation works reliably.
- LazyFileLoader: adjusted token estimation tolerance and improved metadata caching behavior.
- ContextAnalyzer & trimming pipeline: improved prioritization and trimming integration to better enforce token budgets.
- Tests: relaxed strict timing/boolean checks and made assertions more tolerant (toEqual vs toBe) to reduce false negatives.
## 2025-11-02 - 1.6.0 - feat(context)
Introduce smart context system: analyzer, lazy loader, cache and README/docs improvements
- Add ContextAnalyzer for dependency-based file scoring and prioritization (PageRank-like centrality, relevance, efficiency, recency)
- Add LazyFileLoader to scan metadata and load files in parallel with lightweight token estimates
- Add ContextCache for persistent file content/token caching with TTL and max-size eviction
- Enhance ContextTrimmer with tier-based trimming and configurable light/aggressive levels
- Integrate new components into EnhancedContext and TaskContextFactory to build task-aware, token-optimized contexts
- Extend ConfigManager and types to support cache, analyzer, prioritization weights and tier configs (npmextra.json driven)
- Add comprehensive unit tests for ContextAnalyzer, ContextCache and LazyFileLoader
- Update README with Smart Context Building docs, examples, configuration options and CI workflow snippet
## 2025-09-07 - 1.5.2 - fix(package)
Bump dependencies, refine test script and imports, and overhaul README and docs
- Bumped multiple dependencies and devDependencies (including @git.zone/tspublish, @git.zone/tsbuild, @git.zone/tstest, @push.rocks/npmextra, @push.rocks/qenv, @push.rocks/smartfile, @push.rocks/smartlog, @push.rocks/smartshell, gpt-tokenizer, typedoc, etc.).
- Updated test script to run tstest with verbose, logfile and increased timeout; adjusted testCli script invocation.
- Fixed test import in test/test.aidoc.nonci.ts to use @git.zone/tstest tapbundle.
- Large README rewrite: reorganized and expanded content, added quick start, CLI commands, examples, configuration, troubleshooting and usage sections.
- Minor clarification added to commit prompt in ts/aidocs_classes/commit.ts (text cleanup and guidance).
## 2025-08-16 - 1.5.1 - fix(aidoc)
Bump dependencies, add pnpm workspace config, and add AiDoc.stop()
- Bumped multiple dependencies and devDependencies in package.json (notable upgrades: @git.zone/tsbuild, @git.zone/tspublish, @push.rocks/npmextra, @push.rocks/qenv, @push.rocks/smartai, @push.rocks/smartfile, @push.rocks/smartgit, @push.rocks/smartlog, @push.rocks/smartpath, @push.rocks/smartshell, typedoc, typescript).
- Added pnpm-workspace.yaml with onlyBuiltDependencies (esbuild, mongodb-memory-server, puppeteer, sharp).
- Added AiDoc.stop() to properly stop the OpenAI provider (resource/client shutdown).
- Updated packageManager field in package.json to a newer pnpm version/hash.
## 2025-05-14 - 1.5.0 - feat(docs)
Update project metadata and documentation to reflect comprehensive AI-enhanced features and improved installation and usage instructions

View File

@@ -31,5 +31,14 @@
},
"tsdoc": {
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
},
"@git.zone/cli": {
"release": {
"registries": [
"https://verdaccio.lossless.digital",
"https://registry.npmjs.org"
],
"accessLevel": "public"
}
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@git.zone/tsdoc",
"version": "1.5.0",
"version": "1.11.2",
"private": false,
"description": "A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.",
"type": "module",
@@ -13,37 +13,37 @@
"tsdoc": "cli.js"
},
"scripts": {
"test": "(tstest test/) && npm run testCli",
"test": "(tstest test/ --verbose --logfile --timeout 600) && npm run testCli",
"testCli": "(node ./cli.ts.js) && (node ./cli.ts.js aidocs)",
"build": "(tsbuild --web --allowimplicitany)",
"buildDocs": "tsdoc"
},
"devDependencies": {
"@git.zone/tsbuild": "^2.3.2",
"@git.zone/tsrun": "^1.2.46",
"@git.zone/tstest": "^1.0.90",
"@push.rocks/tapbundle": "^6.0.3",
"@types/node": "^22.15.17"
"@git.zone/tsbuild": "^4.0.2",
"@git.zone/tsrun": "^2.0.1",
"@git.zone/tstest": "^3.1.3",
"@types/node": "^25.0.2"
},
"dependencies": {
"@git.zone/tspublish": "^1.5.5",
"@push.rocks/early": "^4.0.3",
"@push.rocks/npmextra": "^5.0.23",
"@push.rocks/qenv": "^6.0.5",
"@push.rocks/smartai": "^0.5.4",
"@push.rocks/smartcli": "^4.0.11",
"@git.zone/tspublish": "^1.10.3",
"@push.rocks/early": "^4.0.4",
"@push.rocks/npmextra": "^5.3.3",
"@push.rocks/qenv": "^6.1.3",
"@push.rocks/smartagent": "file:../../push.rocks/smartagent",
"@push.rocks/smartai": "^0.8.0",
"@push.rocks/smartcli": "^4.0.19",
"@push.rocks/smartdelay": "^3.0.5",
"@push.rocks/smartfile": "^11.0.20",
"@push.rocks/smartgit": "^3.1.0",
"@push.rocks/smartinteract": "^2.0.15",
"@push.rocks/smartlog": "^3.0.9",
"@push.rocks/smartfile": "^13.1.2",
"@push.rocks/smartfs": "^1.2.0",
"@push.rocks/smartgit": "^3.3.1",
"@push.rocks/smartinteract": "^2.0.16",
"@push.rocks/smartlog": "^3.1.10",
"@push.rocks/smartlog-destination-local": "^9.0.2",
"@push.rocks/smartpath": "^5.0.18",
"@push.rocks/smartshell": "^3.0.5",
"@push.rocks/smarttime": "^4.0.6",
"gpt-tokenizer": "^2.9.0",
"typedoc": "^0.28.4",
"typescript": "^5.8.3"
"@push.rocks/smartpath": "^6.0.0",
"@push.rocks/smartshell": "^3.3.0",
"@push.rocks/smarttime": "^4.1.1",
"typedoc": "^0.28.15",
"typescript": "^5.9.3"
},
"files": [
"ts/**/*",
@@ -81,5 +81,5 @@
"url": "https://gitlab.com/gitzone/tsdoc/issues"
},
"homepage": "https://gitlab.com/gitzone/tsdoc#readme",
"packageManager": "pnpm@10.10.0+sha512.d615db246fe70f25dcfea6d8d73dee782ce23e2245e3c4f6f888249fb568149318637dca73c2c5c8ef2a4ca0d5657fb9567188bfab47f566d1ee6ce987815c39"
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748"
}

10495
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

1033
readme.md

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,4 @@
import { tap, expect } from '@push.rocks/tapbundle';
import { tap, expect } from '@git.zone/tstest/tapbundle';
import * as qenv from '@push.rocks/qenv';
let testQenv = new qenv.Qenv('./', '.nogit/');
@@ -33,7 +33,10 @@ tap.test('should build commit object', async () => {
expect(commitObject).toHaveProperty('recommendedNextVersionLevel');
expect(commitObject).toHaveProperty('recommendedNextVersionScope');
expect(commitObject).toHaveProperty('recommendedNextVersionMessage');
});
})
tap.test('should stop AIdocs', async () => {
await aidocs.stop();
});
tap.start();

View File

@@ -0,0 +1,304 @@
import { tap, expect } from '@git.zone/tstest/tapbundle';
import { DiffProcessor } from '../ts/classes.diffprocessor.js';
// Sample diff strings for testing
const createSmallDiff = (filepath: string, addedLines = 5, removedLines = 3): string => {
const lines: string[] = [];
lines.push(`--- a/${filepath}`);
lines.push(`+++ b/${filepath}`);
lines.push(`@@ -1,10 +1,12 @@`);
for (let i = 0; i < removedLines; i++) {
lines.push(`-removed line ${i + 1}`);
}
for (let i = 0; i < addedLines; i++) {
lines.push(`+added line ${i + 1}`);
}
lines.push(' unchanged line');
return lines.join('\n');
};
const createMediumDiff = (filepath: string): string => {
const lines: string[] = [];
lines.push(`--- a/${filepath}`);
lines.push(`+++ b/${filepath}`);
lines.push(`@@ -1,100 +1,150 @@`);
// 150 lines of changes
for (let i = 0; i < 75; i++) {
lines.push(`+added line ${i + 1}`);
}
for (let i = 0; i < 75; i++) {
lines.push(`-removed line ${i + 1}`);
}
return lines.join('\n');
};
const createLargeDiff = (filepath: string): string => {
const lines: string[] = [];
lines.push(`--- a/${filepath}`);
lines.push(`+++ b/${filepath}`);
lines.push(`@@ -1,1000 +1,1500 @@`);
// 2500 lines of changes
for (let i = 0; i < 1250; i++) {
lines.push(`+added line ${i + 1}`);
}
for (let i = 0; i < 1250; i++) {
lines.push(`-removed line ${i + 1}`);
}
return lines.join('\n');
};
const createDeletedFileDiff = (filepath: string): string => {
return `--- a/${filepath}
+++ /dev/null
@@ -1,5 +0,0 @@
-deleted line 1
-deleted line 2
-deleted line 3
-deleted line 4
-deleted line 5`;
};
const createAddedFileDiff = (filepath: string): string => {
return `--- /dev/null
+++ b/${filepath}
@@ -0,0 +1,5 @@
+added line 1
+added line 2
+added line 3
+added line 4
+added line 5`;
};
tap.test('DiffProcessor should parse small diff correctly', async () => {
const processor = new DiffProcessor();
const smallDiff = createSmallDiff('src/test.ts', 5, 3);
const result = processor.processDiffs([smallDiff]);
expect(result.totalFiles).toEqual(1);
expect(result.fullDiffs.length).toEqual(1);
expect(result.summarizedDiffs.length).toEqual(0);
expect(result.metadataOnly.length).toEqual(0);
expect(result.totalTokens).toBeGreaterThan(0);
});
tap.test('DiffProcessor should summarize medium diff', async () => {
const processor = new DiffProcessor();
const mediumDiff = createMediumDiff('src/medium-file.ts');
const result = processor.processDiffs([mediumDiff]);
expect(result.totalFiles).toEqual(1);
expect(result.fullDiffs.length).toEqual(0);
expect(result.summarizedDiffs.length).toEqual(1);
expect(result.metadataOnly.length).toEqual(0);
// Verify the summarized diff contains the sample
const formatted = processor.formatForContext(result);
expect(formatted).toInclude('SUMMARIZED DIFFS');
expect(formatted).toInclude('lines omitted');
});
tap.test('DiffProcessor should handle large diff as metadata only', async () => {
const processor = new DiffProcessor();
const largeDiff = createLargeDiff('dist/bundle.js');
const result = processor.processDiffs([largeDiff]);
expect(result.totalFiles).toEqual(1);
expect(result.fullDiffs.length).toEqual(0);
expect(result.summarizedDiffs.length).toEqual(0);
expect(result.metadataOnly.length).toEqual(1);
const formatted = processor.formatForContext(result);
expect(formatted).toInclude('METADATA ONLY');
expect(formatted).toInclude('dist/bundle.js');
});
tap.test('DiffProcessor should prioritize source files over build artifacts', async () => {
const processor = new DiffProcessor();
const diffs = [
createSmallDiff('dist/bundle.js'),
createSmallDiff('src/important.ts'),
createSmallDiff('build/output.js'),
createSmallDiff('src/core.ts'),
];
const result = processor.processDiffs(diffs);
expect(result.totalFiles).toEqual(4);
// Source files should be included fully first
const formatted = processor.formatForContext(result);
const srcImportantIndex = formatted.indexOf('src/important.ts');
const srcCoreIndex = formatted.indexOf('src/core.ts');
const distBundleIndex = formatted.indexOf('dist/bundle.js');
const buildOutputIndex = formatted.indexOf('build/output.js');
// Source files should appear before build artifacts
expect(srcImportantIndex).toBeLessThan(distBundleIndex);
expect(srcCoreIndex).toBeLessThan(buildOutputIndex);
});
tap.test('DiffProcessor should respect token budget', async () => {
const processor = new DiffProcessor({
maxDiffTokens: 500, // Very small budget to force metadata-only
});
// Create multiple large diffs that will exceed budget
const diffs = [
createLargeDiff('src/file1.ts'),
createLargeDiff('src/file2.ts'),
createLargeDiff('src/file3.ts'),
createLargeDiff('src/file4.ts'),
];
const result = processor.processDiffs(diffs);
expect(result.totalTokens).toBeLessThanOrEqual(500);
// With such a small budget and large files, most should be metadata only
expect(result.metadataOnly.length).toBeGreaterThanOrEqual(2);
});
tap.test('DiffProcessor should handle deleted files', async () => {
const processor = new DiffProcessor();
const deletedDiff = createDeletedFileDiff('src/old-file.ts');
const result = processor.processDiffs([deletedDiff]);
expect(result.totalFiles).toEqual(1);
// Small deleted file should be included fully
expect(result.fullDiffs.length).toEqual(1);
const formatted = processor.formatForContext(result);
expect(formatted).toInclude('src/old-file.ts');
// Verify the file appears in the output
expect(formatted).toInclude('FULL DIFFS');
});
tap.test('DiffProcessor should handle added files', async () => {
const processor = new DiffProcessor();
const addedDiff = createAddedFileDiff('src/new-file.ts');
const result = processor.processDiffs([addedDiff]);
expect(result.totalFiles).toEqual(1);
// Small added file should be included fully
expect(result.fullDiffs.length).toEqual(1);
const formatted = processor.formatForContext(result);
expect(formatted).toInclude('src/new-file.ts');
// Verify the file appears in the output
expect(formatted).toInclude('FULL DIFFS');
});
tap.test('DiffProcessor should handle mixed file sizes', async () => {
const processor = new DiffProcessor();
const diffs = [
createSmallDiff('src/small.ts'),
createMediumDiff('src/medium.ts'),
createLargeDiff('dist/large.js'),
];
const result = processor.processDiffs(diffs);
expect(result.totalFiles).toEqual(3);
expect(result.fullDiffs.length).toEqual(1); // small file
expect(result.summarizedDiffs.length).toEqual(1); // medium file
expect(result.metadataOnly.length).toEqual(1); // large file
const formatted = processor.formatForContext(result);
expect(formatted).toInclude('FULL DIFFS (1 files)');
expect(formatted).toInclude('SUMMARIZED DIFFS (1 files)');
expect(formatted).toInclude('METADATA ONLY (1 files)');
});
tap.test('DiffProcessor should handle empty diff array', async () => {
const processor = new DiffProcessor();
const result = processor.processDiffs([]);
expect(result.totalFiles).toEqual(0);
expect(result.fullDiffs.length).toEqual(0);
expect(result.summarizedDiffs.length).toEqual(0);
expect(result.metadataOnly.length).toEqual(0);
expect(result.totalTokens).toEqual(0);
});
tap.test('DiffProcessor should generate comprehensive summary', async () => {
const processor = new DiffProcessor();
const diffs = [
createSmallDiff('src/file1.ts'),
createSmallDiff('src/file2.ts'),
createMediumDiff('src/file3.ts'),
createLargeDiff('dist/bundle.js'),
];
const result = processor.processDiffs(diffs);
const formatted = processor.formatForContext(result);
expect(formatted).toInclude('GIT DIFF SUMMARY');
expect(formatted).toInclude('Files changed: 4 total');
expect(formatted).toInclude('included in full');
expect(formatted).toInclude('summarized');
expect(formatted).toInclude('metadata only');
expect(formatted).toInclude('Estimated tokens:');
expect(formatted).toInclude('END OF GIT DIFF');
});
tap.test('DiffProcessor should handle custom options', async () => {
const processor = new DiffProcessor({
maxDiffTokens: 50000,
smallFileLines: 30,
mediumFileLines: 150,
sampleHeadLines: 10,
sampleTailLines: 10,
});
const mediumDiff = createMediumDiff('src/file.ts'); // 150 lines
const result = processor.processDiffs([mediumDiff]);
// With custom settings, this should be summarized (exactly at the mediumFileLines threshold)
expect(result.summarizedDiffs.length).toEqual(1);
});
tap.test('DiffProcessor should prioritize test files appropriately', async () => {
const processor = new DiffProcessor();
const diffs = [
createSmallDiff('src/core.ts'),
createSmallDiff('test/core.test.ts'),
createSmallDiff('config.json'),
];
const result = processor.processDiffs(diffs);
const formatted = processor.formatForContext(result);
// Source files should come before test files
const srcIndex = formatted.indexOf('src/core.ts');
const testIndex = formatted.indexOf('test/core.test.ts');
expect(srcIndex).toBeLessThan(testIndex);
});
tap.test('DiffProcessor should handle files with no changes gracefully', async () => {
const processor = new DiffProcessor();
const emptyDiff = `--- a/src/file.ts
+++ b/src/file.ts
@@ -1,1 +1,1 @@`;
const result = processor.processDiffs([emptyDiff]);
expect(result.totalFiles).toEqual(1);
expect(result.fullDiffs.length).toEqual(1); // Still included as a small file
});
export default tap.start();

View File

@@ -1,8 +0,0 @@
import { expect, tap } from '@push.rocks/tapbundle';
import * as tsdoc from '../ts/index.js';
tap.test('first test', async () => {
console.log('test');
});
tap.start();

View File

@@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@git.zone/tsdoc',
version: '1.5.0',
version: '1.11.0',
description: 'A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.'
}

View File

@@ -1,6 +1,8 @@
import * as plugins from '../plugins.js';
import { AiDoc } from '../classes.aidoc.js';
import { ProjectContext } from './projectcontext.js';
import { DiffProcessor } from '../classes.diffprocessor.js';
import { logger } from '../logging.js';
export interface INextCommitObject {
recommendedNextVersionLevel: 'fix' | 'feat' | 'BREAKING CHANGE'; // the recommended next version level of the project
@@ -27,38 +29,126 @@ export class Commit {
smartgitInstance,
this.projectDir
);
const diffStringArray = await gitRepo.getUncommittedDiff([
// Define comprehensive exclusion patterns
// smartgit@3.3.0+ supports glob patterns natively
const excludePatterns = [
// Lock files
'pnpm-lock.yaml',
'package-lock.json',
]);
// Use the new TaskContextFactory for optimized context
const taskContextFactory = new (await import('../context/index.js')).TaskContextFactory(this.projectDir);
await taskContextFactory.initialize();
// Generate context specifically for commit task
const contextResult = await taskContextFactory.createContextForCommit(
diffStringArray[0] ? diffStringArray.join('\n\n') : 'No changes.'
);
// Get the optimized context string
let contextString = contextResult.context;
// Log token usage statistics
console.log(`Token usage - Context: ${contextResult.tokenCount}, Files: ${contextResult.includedFiles.length + contextResult.trimmedFiles.length}, Savings: ${contextResult.tokenSavings}`);
// Check for token overflow against model limits
const MODEL_TOKEN_LIMIT = 200000; // o4-mini
if (contextResult.tokenCount > MODEL_TOKEN_LIMIT * 0.9) {
console.log(`⚠️ Warning: Context size (${contextResult.tokenCount} tokens) is close to or exceeds model limit (${MODEL_TOKEN_LIMIT} tokens).`);
console.log(`The model may not be able to process all information effectively.`);
'npm-shrinkwrap.json',
'yarn.lock',
'deno.lock',
'bun.lockb',
// Build artifacts (main culprit for large diffs!)
'dist/**',
'dist_*/**', // dist_ts, dist_web, etc.
'build/**',
'.next/**',
'out/**',
'public/dist/**',
// Compiled/bundled files
'**/*.js.map',
'**/*.d.ts.map',
'**/*.min.js',
'**/*.bundle.js',
'**/*.chunk.js',
// IDE/Editor directories
'.claude/**',
'.cursor/**',
'.vscode/**',
'.idea/**',
'**/*.swp',
'**/*.swo',
// Logs and caches
'.nogit/**',
'**/*.log',
'.cache/**',
'.rpt2_cache/**',
'coverage/**',
'.nyc_output/**',
];
// Pass glob patterns directly to smartgit - it handles matching internally
const diffStringArray = await gitRepo.getUncommittedDiff(excludePatterns);
// Process diffs intelligently using DiffProcessor
let processedDiffString: string;
if (diffStringArray.length > 0) {
// Diagnostic logging for raw diff statistics
const totalChars = diffStringArray.join('\n\n').length;
const estimatedTokens = Math.ceil(totalChars / 4);
console.log(`📊 Raw git diff statistics:`);
console.log(` Files changed: ${diffStringArray.length}`);
console.log(` Total characters: ${totalChars.toLocaleString()}`);
console.log(` Estimated tokens: ${estimatedTokens.toLocaleString()}`);
console.log(` Exclusion patterns: ${excludePatterns.length}`);
// Use DiffProcessor to intelligently handle large diffs
const diffProcessor = new DiffProcessor({
maxDiffTokens: 100000, // Reserve 100k tokens for diffs
smallFileLines: 300, // Most source files are under 300 lines
mediumFileLines: 800, // Only very large files get head/tail treatment
sampleHeadLines: 75, // When sampling, show more context
sampleTailLines: 75, // When sampling, show more context
});
const processedDiff = diffProcessor.processDiffs(diffStringArray);
processedDiffString = diffProcessor.formatForContext(processedDiff);
console.log(`📝 Processed diff statistics:`);
console.log(` Full diffs: ${processedDiff.fullDiffs.length} files`);
console.log(` Summarized: ${processedDiff.summarizedDiffs.length} files`);
console.log(` Metadata only: ${processedDiff.metadataOnly.length} files`);
console.log(` Final tokens: ${processedDiff.totalTokens.toLocaleString()}`);
if (estimatedTokens > 50000) {
console.log(`✅ DiffProcessor reduced token usage: ${estimatedTokens.toLocaleString()}${processedDiff.totalTokens.toLocaleString()}`);
}
} else {
processedDiffString = 'No changes.';
}
let result = await this.aiDocsRef.openaiInstance.chat({
systemMessage: `
// Use DualAgentOrchestrator for commit message generation
// Note: No filesystem tool needed - the diff already contains all change information
const commitOrchestrator = new plugins.smartagent.DualAgentOrchestrator({
smartAiInstance: this.aiDocsRef.smartAiInstance,
defaultProvider: 'openai',
logPrefix: '[Commit]',
onProgress: (event) => logger.log(event.logLevel, event.logMessage),
guardianPolicyPrompt: `
You validate commit messages for semantic versioning compliance.
APPROVE if:
- Version level (fix/feat/BREAKING CHANGE) matches the scope of changes in the diff
- Commit message is clear, professional, and follows conventional commit conventions
- No personal information, licensing details, or AI mentions (Claude/Codex) included
- JSON structure is valid with all required fields
- Scope accurately reflects the changed modules/files
REJECT with specific feedback if:
- Version level doesn't match the scope of changes (e.g., "feat" for a typo fix should be "fix")
- Message is vague, unprofessional, or contains sensitive information
- JSON is malformed or missing required fields
`,
});
await commitOrchestrator.start();
const commitTaskPrompt = `
You create a commit message for a git commit.
The commit message should be based on the files in the project.
You should not include any licensing information.
You should not include any personal information.
Project directory: ${this.projectDir}
Analyze the git diff below to understand what changed and generate a commit message.
You should not include any licensing information or personal information.
Never mention CLAUDE code, or codex.
Important: Answer only in valid JSON.
@@ -67,41 +157,70 @@ Your answer should be parseable with JSON.parse() without modifying anything.
Here is the structure of the JSON you should return:
interface {
recommendedNextVersionLevel: 'fix' | 'feat' | 'BREAKING CHANGE'; // the recommended next version level of the project
recommendedNextVersionScope: string; // the recommended scope name of the next version, like "core" or "cli", or specific class names.
recommendedNextVersionMessage: string; // the commit message. Don't put fix() feat() or BREAKING CHANGE in the message. Please just the message itself.
recommendedNextVersionLevel: 'fix' | 'feat' | 'BREAKING CHANGE'; // the recommended next version level
recommendedNextVersionScope: string; // scope name like "core", "cli", or specific class names
recommendedNextVersionMessage: string; // the commit message (don't include fix/feat prefix)
recommendedNextVersionDetails: string[]; // detailed bullet points for the changelog
recommendedNextVersion: string; // the recommended next version of the project, x.x.x
recommendedNextVersion: string; // the recommended next version x.x.x
}
For the recommendedNextVersionDetails, please only add a detail entries to the array if it has an obvious value to the reader.
For recommendedNextVersionDetails, only add entries that have obvious value to the reader.
You are being given the files of the project. You should use them to create the commit message.
Also you are given a diff
Here is the git diff showing what changed:
`,
messageHistory: [],
userMessage: contextString,
});
${processedDiffString}
Generate the commit message based on these changes.
`;
const commitResult = await commitOrchestrator.run(commitTaskPrompt);
await commitOrchestrator.stop();
if (!commitResult.success) {
throw new Error(`Commit message generation failed: ${commitResult.status}`);
}
// console.log(result.message);
const resultObject: INextCommitObject = JSON.parse(
result.message.replace('```json', '').replace('```', '')
commitResult.result.replace('```json', '').replace('```', '')
);
const previousChangelogPath = plugins.path.join(this.projectDir, 'changelog.md');
let previousChangelog: plugins.smartfile.SmartFile;
if (await plugins.smartfile.fs.fileExists(previousChangelogPath)) {
previousChangelog = await plugins.smartfile.SmartFile.fromFilePath(previousChangelogPath);
if (await plugins.fsInstance.file(previousChangelogPath).exists()) {
previousChangelog = await plugins.smartfileFactory.fromFilePath(previousChangelogPath);
}
if (!previousChangelog) {
// lets build the changelog based on that
const commitMessages = await gitRepo.getAllCommitMessages();
console.log(JSON.stringify(commitMessages, null, 2));
let result2 = await this.aiDocsRef.openaiInstance.chat({
messageHistory: [],
systemMessage: `
// Use DualAgentOrchestrator for changelog generation with Guardian validation
const changelogOrchestrator = new plugins.smartagent.DualAgentOrchestrator({
smartAiInstance: this.aiDocsRef.smartAiInstance,
defaultProvider: 'openai',
logPrefix: '[Changelog]',
onProgress: (event) => logger.log(event.logLevel, event.logMessage),
guardianPolicyPrompt: `
You validate changelog generation.
APPROVE if:
- Changelog follows proper markdown format with ## headers for each version
- Entries are chronologically ordered (newest first)
- Version ranges for trivial commits are properly summarized
- No duplicate or empty entries
- Format matches: ## yyyy-mm-dd - x.x.x - scope
REJECT with feedback if:
- Markdown formatting is incorrect
- Entries are not meaningful or helpful
- Dates or versions are malformed
`,
});
await changelogOrchestrator.start();
const changelogTaskPrompt = `
You are building a changelog.md file for the project.
Omit commits and versions that lack relevant changes, but make sure to mention them as a range with a summarizing message instead.
@@ -115,17 +234,23 @@ A changelog entry should look like this:
You are given:
* the commit messages of the project
Only return the changelog file, so it can be written directly to changelog.md`,
userMessage: `
Only return the changelog file content, so it can be written directly to changelog.md.
Here are the commit messages:
${JSON.stringify(commitMessages, null, 2)}
`,
});
`;
previousChangelog = await plugins.smartfile.SmartFile.fromString(
const changelogResult = await changelogOrchestrator.run(changelogTaskPrompt);
await changelogOrchestrator.stop();
if (!changelogResult.success) {
throw new Error(`Changelog generation failed: ${changelogResult.status}`);
}
previousChangelog = plugins.smartfileFactory.fromString(
previousChangelogPath,
result2.message.replaceAll('```markdown', '').replaceAll('```', ''),
changelogResult.result.replaceAll('```markdown', '').replaceAll('```', ''),
'utf8'
);
}

View File

@@ -1,6 +1,7 @@
import type { AiDoc } from '../classes.aidoc.js';
import * as plugins from '../plugins.js';
import { ProjectContext } from './projectcontext.js';
import { logger } from '../logging.js';
interface IDescriptionInterface {
description: string;
@@ -18,57 +19,97 @@ export class Description {
}
public async build() {
// Use the new TaskContextFactory for optimized context
const taskContextFactory = new (await import('../context/index.js')).TaskContextFactory(this.projectDir);
await taskContextFactory.initialize();
// Generate context specifically for description task
const contextResult = await taskContextFactory.createContextForDescription();
const contextString = contextResult.context;
// Log token usage statistics
console.log(`Token usage - Context: ${contextResult.tokenCount}, Files: ${contextResult.includedFiles.length + contextResult.trimmedFiles.length}, Savings: ${contextResult.tokenSavings}`);
// Use DualAgentOrchestrator with filesystem tool for agent-driven exploration
const descriptionOrchestrator = new plugins.smartagent.DualAgentOrchestrator({
smartAiInstance: this.aiDocsRef.smartAiInstance,
defaultProvider: 'openai',
maxIterations: 15,
maxResultChars: 10000, // Limit tool output to prevent token explosion
maxHistoryMessages: 15, // Limit history window
logPrefix: '[Description]',
onProgress: (event) => logger.log(event.logLevel, event.logMessage),
guardianPolicyPrompt: `
You validate description generation tool calls and outputs.
let result = await this.aiDocsRef.openaiInstance.chat({
systemMessage: `
You create a json adhering the following interface:
{
description: string; // a sensible short, one sentence description of the project
keywords: string[]; // an array of tags that describe the project
}
APPROVE tool calls for:
- Reading package.json, npmextra.json, or source files in the ts/ directory
- Listing directory contents to understand project structure
- Using tree to see project structure
The description should be based on what you understand from the project's files.
The keywords should be based on use cases you see from the files.
Don't be cheap about the way you think.
REJECT tool calls for:
- Reading files outside the project directory
- Writing, deleting, or modifying any files
- Any destructive operations
Important: Answer only in valid JSON.
You answer should be parseable with JSON.parse() without modifying anything.
For final output, APPROVE if:
- JSON is valid and parseable
- Description is a clear, concise one-sentence summary
- Keywords are relevant to the project's use cases
- Both description and keywords fields are present
Don't wrap the JSON in three ticks json!!!
`,
messageHistory: [],
userMessage: contextString,
REJECT final output if:
- JSON is malformed or wrapped in markdown code blocks
- Description is too long or vague
- Keywords are irrelevant or generic
`,
});
console.log(result.message);
// Register scoped filesystem tool for agent exploration
descriptionOrchestrator.registerScopedFilesystemTool(this.projectDir);
await descriptionOrchestrator.start();
const descriptionTaskPrompt = `
You create a project description and keywords for an npm package.
PROJECT DIRECTORY: ${this.projectDir}
Use the filesystem tool to explore the project and understand what it does:
1. First, use tree to see the project structure
2. Read package.json to understand the package name and current description
3. Read npmextra.json if it exists for additional metadata
4. Read key source files in ts/ directory to understand the implementation
Then generate a description and keywords based on your exploration.
Your FINAL response must be valid JSON adhering to this interface:
{
description: string; // a sensible short, one sentence description of the project
keywords: string[]; // an array of tags that describe the project based on use cases
}
Important: Answer only in valid JSON.
Your answer should be parseable with JSON.parse() without modifying anything.
Don't wrap the JSON in \`\`\`json\`\`\` - just return the raw JSON object.
`;
const descriptionResult = await descriptionOrchestrator.run(descriptionTaskPrompt);
await descriptionOrchestrator.stop();
if (!descriptionResult.success) {
throw new Error(`Description generation failed: ${descriptionResult.status}`);
}
console.log(descriptionResult.result);
const resultObject: IDescriptionInterface = JSON.parse(
result.message.replace('```json', '').replace('```', ''),
descriptionResult.result.replace('```json', '').replace('```', ''),
);
// Create a standard ProjectContext instance for file operations
// Use ProjectContext to get file handles for writing
const projectContext = new ProjectContext(this.projectDir);
const files = await projectContext.gatherFiles();
// Update npmextra.json
const npmextraJson = files.smartfilesNpmextraJSON;
const npmextraJsonContent = JSON.parse(npmextraJson.contents.toString());
npmextraJsonContent.gitzone.module.description = resultObject.description;
npmextraJsonContent.gitzone.module.keywords = resultObject.keywords;
npmextraJsonContent['@git.zone/cli'].module.description = resultObject.description;
npmextraJsonContent['@git.zone/cli'].module.keywords = resultObject.keywords;
npmextraJson.contents = Buffer.from(JSON.stringify(npmextraJsonContent, null, 2));
await npmextraJson.write();
// do the same with packageJson
// Update package.json
const packageJson = files.smartfilePackageJSON;
const packageJsonContent = JSON.parse(packageJson.contents.toString());
packageJsonContent.description = resultObject.description;
@@ -79,6 +120,6 @@ Don't wrap the JSON in three ticks json!!!
console.log(`\n======================\n`);
console.log(JSON.stringify(resultObject, null, 2));
console.log(`\n======================\n`);
return result.message;
return descriptionResult.result;
}
}

View File

@@ -13,31 +13,29 @@ export class ProjectContext {
}
public async gatherFiles() {
const smartfilePackageJSON = await plugins.smartfile.SmartFile.fromFilePath(
const smartfilePackageJSON = await plugins.smartfileFactory.fromFilePath(
plugins.path.join(this.projectDir, 'package.json'),
this.projectDir,
);
const smartfilesReadme = await plugins.smartfile.SmartFile.fromFilePath(
const smartfilesReadme = await plugins.smartfileFactory.fromFilePath(
plugins.path.join(this.projectDir, 'readme.md'),
this.projectDir,
);
const smartfilesReadmeHints = await plugins.smartfile.SmartFile.fromFilePath(
const smartfilesReadmeHints = await plugins.smartfileFactory.fromFilePath(
plugins.path.join(this.projectDir, 'readme.hints.md'),
this.projectDir,
);
const smartfilesNpmextraJSON = await plugins.smartfile.SmartFile.fromFilePath(
const smartfilesNpmextraJSON = await plugins.smartfileFactory.fromFilePath(
plugins.path.join(this.projectDir, 'npmextra.json'),
this.projectDir,
);
const smartfilesMod = await plugins.smartfile.fs.fileTreeToObject(
const smartfilesMod = await plugins.smartfileFactory.virtualDirectoryFromPath(
this.projectDir,
'ts*/**/*.ts',
);
const smartfilesTest = await plugins.smartfile.fs.fileTreeToObject(
).then(vd => vd.filter(f => f.relative.startsWith('ts') && f.relative.endsWith('.ts')).listFiles());
const smartfilesTest = await plugins.smartfileFactory.virtualDirectoryFromPath(
this.projectDir,
'test/**/*.ts',
);
).then(vd => vd.filter(f => f.relative.startsWith('test/') && f.relative.endsWith('.ts')).listFiles());
return {
smartfilePackageJSON,
smartfilesReadme,
@@ -66,21 +64,14 @@ ${smartfile.contents.toString()}
}
/**
* Calculate the token count for a string using the GPT tokenizer
* @param text The text to count tokens for
* @param model The model to use for token counting (default: gpt-3.5-turbo)
* @returns The number of tokens in the text
* Estimate token count for a string
* Uses a rough estimate of 4 characters per token
* @param text The text to estimate tokens for
* @returns Estimated number of tokens
*/
public countTokens(text: string, model: string = 'gpt-3.5-turbo'): number {
try {
// Use the gpt-tokenizer library to count tokens
const tokens = plugins.gptTokenizer.encode(text);
return tokens.length;
} catch (error) {
console.error('Error counting tokens:', error);
// Provide a rough estimate (4 chars per token) if tokenization fails
return Math.ceil(text.length / 4);
}
public countTokens(text: string): number {
// Rough estimate: ~4 characters per token for English text
return Math.ceil(text.length / 4);
}
private async buildContext(dirArg: string) {

View File

@@ -17,72 +17,111 @@ export class Readme {
public async build() {
let finalReadmeString = ``;
// Use the new TaskContextFactory for optimized context
const taskContextFactory = new (await import('../context/index.js')).TaskContextFactory(this.projectDir);
await taskContextFactory.initialize();
// Generate context specifically for readme task
const contextResult = await taskContextFactory.createContextForReadme();
const contextString = contextResult.context;
// Log token usage statistics
console.log(`Token usage - Context: ${contextResult.tokenCount}, Files: ${contextResult.includedFiles.length + contextResult.trimmedFiles.length}, Savings: ${contextResult.tokenSavings}`);
// lets first check legal before introducung any cost
// First check legal info before introducing any cost
const projectContext = new ProjectContext(this.projectDir);
const npmExtraJson = JSON.parse(
(await projectContext.gatherFiles()).smartfilesNpmextraJSON.contents.toString()
);
const legalInfo = npmExtraJson?.tsdoc?.legal;
const legalInfo = npmExtraJson?.['@git.zone/tsdoc']?.legal;
if (!legalInfo) {
const error = new Error(`No legal information found in npmextra.json`);
console.log(error);
}
let result = await this.aiDocsRef.openaiInstance.chat({
systemMessage: `
You create markdown readmes for npm projects. You only output the markdown readme.
// Use DualAgentOrchestrator with filesystem tool for agent-driven exploration
const readmeOrchestrator = new plugins.smartagent.DualAgentOrchestrator({
smartAiInstance: this.aiDocsRef.smartAiInstance,
defaultProvider: 'openai',
maxIterations: 25,
maxResultChars: 15000, // Limit tool output to prevent token explosion
maxHistoryMessages: 20, // Limit history window
logPrefix: '[README]',
onProgress: (event) => logger.log(event.logLevel, event.logMessage),
guardianPolicyPrompt: `
You validate README generation tool calls and outputs.
The Readme should follow the following template:
APPROVE tool calls for:
- Reading any files within the project directory (package.json, ts/*.ts, readme.md, etc.)
- Using tree to see project structure
- Using glob to find source files
- Listing directory contents
REJECT tool calls for:
- Reading files outside the project directory
- Writing, deleting, or modifying any files
- Any destructive operations
For final README output, APPROVE if:
- README follows proper markdown format
- Contains Install and Usage sections
- Code examples are correct TypeScript/ESM syntax
- Documentation is comprehensive and helpful
REJECT final output if:
- README is incomplete or poorly formatted
- Contains licensing information (added separately)
- Uses CommonJS syntax instead of ESM
- Contains "in conclusion" or similar filler
`,
});
// Register scoped filesystem tool for agent exploration
readmeOrchestrator.registerScopedFilesystemTool(this.projectDir);
await readmeOrchestrator.start();
const readmeTaskPrompt = `
You create markdown READMEs for npm projects. You only output the markdown readme.
PROJECT DIRECTORY: ${this.projectDir}
Use the filesystem tool to explore the project and understand what it does:
1. First, use tree to see the project structure (maxDepth: 3)
2. Read package.json to understand the package name, description, and dependencies
3. Read the existing readme.md if it exists (use it as a base, improve and expand)
4. Read readme.hints.md if it exists (contains hints for documentation)
5. Read key source files in ts/ directory to understand the API and implementation
6. Focus on exported classes, interfaces, and functions
Then generate a comprehensive README following this template:
# Project Name
[
The name is the module name of package.json
The description is in the description field of package.json
]
[The name from package.json and description]
## Install
[
Write a short text on how to install the project
]
[Short text on how to install the project]
## Usage
[
[
Give code examples here.
Construct sensible scenarios for the user.
Make sure to show a complete set of features of the module.
Don't omit use cases.
It does not matter how much time you need.
ALWAYS USE ESM SYNTAX AND TYPESCRIPT.
DON'T CHICKEN OUT. Write at least 4000 words. More if necessary.
If there is already a readme, take the Usage section as base. Remove outdated content, and expand and improve upon the valid parts.
Super important: Check for completenes.
Don't include any licensing information. This will be added in a later step.
Avoid "in conclusions".
Good to know:
* npmextra.json contains overall module information.
* readme.hints.md provides valuable hints about module ideas.
Write at least 4000 words. More if necessary.
If there is already a readme, take the Usage section as base. Remove outdated content, expand and improve.
Check for completeness.
Don't include any licensing information. This will be added later.
Avoid "in conclusion" statements.
]
`,
messageHistory: [],
userMessage: contextString,
});
`;
finalReadmeString += result.message + '\n' + legalInfo;
const readmeResult = await readmeOrchestrator.run(readmeTaskPrompt);
await readmeOrchestrator.stop();
if (!readmeResult.success) {
throw new Error(`README generation failed: ${readmeResult.status}`);
}
// Clean up markdown formatting if wrapped in code blocks
let resultMessage = readmeResult.result
.replace(/^```markdown\n?/i, '')
.replace(/\n?```$/i, '');
finalReadmeString += resultMessage + '\n' + legalInfo;
console.log(`\n======================\n`);
console.log(result.message);
console.log(resultMessage);
console.log(`\n======================\n`);
const readme = (await projectContext.gatherFiles()).smartfilesReadme;
@@ -93,60 +132,99 @@ The Readme should follow the following template:
const tsPublishInstance = new plugins.tspublish.TsPublish();
const subModules = await tsPublishInstance.getModuleSubDirs(paths.cwd);
logger.log('info', `Found ${Object.keys(subModules).length} sub modules`);
for (const subModule of Object.keys(subModules)) {
logger.log('info', `Building readme for ${subModule}`);
const subModuleContextString = await projectContext.update();
let result = await this.aiDocsRef.openaiInstance.chat({
systemMessage: `
You create markdown readmes for npm projects. You only output the markdown readme.
IMPORTANT: YOU ARE NOW CREATING THE README FOR THE FOLLOWING SUB MODULE: ${subModule} !!!!!!!!!!!
The Sub Module will be published with the following data:
${JSON.stringify(plugins.smartfile.fs.toStringSync(plugins.path.join(paths.cwd, subModule, 'tspublish.json')), null, 2)}
const subModulePath = plugins.path.join(paths.cwd, subModule);
const tspublishData = await plugins.fsInstance
.file(plugins.path.join(subModulePath, 'tspublish.json'))
.encoding('utf8')
.read();
The Readme should follow the following template:
# Project Name
[
The name is the module name of package.json
The description is in the description field of package.json
]
## Install
[
Write a short text on how to install the project
]
## Usage
[
Give code examples here.
Construct sensible scenarios for the user.
Make sure to show a complete set of features of the module.
Don't omit use cases.
It does not matter how much time you need.
ALWAYS USE ESM SYNTAX AND TYPESCRIPT.
DON'T CHICKEN OUT. Write at least 4000 words. More if necessary.
If there is already a readme, take the Usage section as base. Remove outdated content, and expand and improve upon the valid parts.
Super important: Check for completenes.
Don't include any licensing information. This will be added in a later step.
Avoid "in conclusions".
Good to know:
* npmextra.json contains overall module information.
* readme.hints.md provides valuable hints about module ideas.
* Your output lands directly in the readme.md file.
* Don't use \`\`\` at the beginning or the end. It'll cause problems. Only use it for codeblocks. You are directly writing markdown. No need to introduce it weirdly.
]
`,
messageHistory: [],
userMessage: subModuleContextString,
// Create a new orchestrator with filesystem tool for each submodule
const subModuleOrchestrator = new plugins.smartagent.DualAgentOrchestrator({
smartAiInstance: this.aiDocsRef.smartAiInstance,
defaultProvider: 'openai',
maxIterations: 20,
maxResultChars: 12000,
maxHistoryMessages: 15,
logPrefix: `[README:${subModule}]`,
onProgress: (event) => logger.log(event.logLevel, event.logMessage),
guardianPolicyPrompt: `
You validate README generation for submodules.
APPROVE tool calls for:
- Reading any files within the submodule directory
- Using tree to see structure
- Using glob to find source files
REJECT tool calls for:
- Reading files outside the submodule directory
- Writing, deleting, or modifying any files
- Any destructive operations
APPROVE final README if comprehensive, well-formatted markdown with ESM TypeScript examples.
REJECT incomplete READMEs or those with licensing info.
`,
});
const subModuleReadmeString = result.message + '\n' + legalInfo;
await plugins.smartfile.memory.toFs(subModuleReadmeString, plugins.path.join(paths.cwd, subModule, 'readme.md'));
logger.log('success', `Built readme for ${subModule}`);
// Register scoped filesystem tool for the submodule directory
subModuleOrchestrator.registerScopedFilesystemTool(subModulePath);
await subModuleOrchestrator.start();
const subModulePrompt = `
You create markdown READMEs for npm projects. You only output the markdown readme.
SUB MODULE: ${subModule}
SUB MODULE DIRECTORY: ${subModulePath}
IMPORTANT: YOU ARE CREATING THE README FOR THIS SUB MODULE: ${subModule}
The Sub Module will be published with:
${JSON.stringify(tspublishData, null, 2)}
Use the filesystem tool to explore the submodule:
1. Use tree to see the submodule structure
2. Read package.json to understand the submodule
3. Read source files in ts/ directory to understand the implementation
Generate a README following the template:
# Project Name
[name and description from package.json]
## Install
[installation instructions]
## Usage
[
Code examples with complete features.
ESM TypeScript syntax only.
Write at least 4000 words.
No licensing information.
No "in conclusion".
]
Don't use \`\`\` at the beginning or end. Only for code blocks.
`;
const subModuleResult = await subModuleOrchestrator.run(subModulePrompt);
await subModuleOrchestrator.stop();
if (subModuleResult.success) {
const subModuleReadmeString = subModuleResult.result
.replace(/^```markdown\n?/i, '')
.replace(/\n?```$/i, '') + '\n' + legalInfo;
await plugins.fsInstance
.file(plugins.path.join(subModulePath, 'readme.md'))
.encoding('utf8')
.write(subModuleReadmeString);
logger.log('success', `Built readme for ${subModule}`);
} else {
logger.log('error', `Failed to build readme for ${subModule}: ${subModuleResult.status}`);
}
}
return result.message;
return resultMessage;
}
}

View File

@@ -8,7 +8,7 @@ export class AiDoc {
public npmextraKV: plugins.npmextra.KeyValueStore;
public qenvInstance: plugins.qenv.Qenv;
public aidocInteract: plugins.smartinteract.SmartInteract;
public openaiInstance: plugins.smartai.OpenAiProvider;
public smartAiInstance: plugins.smartai.SmartAi;
argvArg: any;
@@ -36,9 +36,25 @@ export class AiDoc {
this.aidocInteract = new plugins.smartinteract.SmartInteract();
this.qenvInstance = new plugins.qenv.Qenv();
if (!(await this.qenvInstance.getEnvVarOnDemand('OPENAI_TOKEN'))) {
// Migrate old KV store path to new path if needed
const homeDir = plugins.smartpath.get.home();
const oldKvPath = plugins.path.join(homeDir, '.npmextra/kv/tsdoc.json');
const newKvDir = plugins.path.join(homeDir, '.npmextra/kv/@git.zone');
const newKvPath = plugins.path.join(newKvDir, 'tsdoc.json');
if (
await plugins.fsInstance.file(oldKvPath).exists() &&
!(await plugins.fsInstance.file(newKvPath).exists())
) {
console.log('Migrating tsdoc KeyValueStore to @git.zone/tsdoc...');
await plugins.fsInstance.directory(newKvDir).recursive().create();
await plugins.fsInstance.file(oldKvPath).copy(newKvPath);
await plugins.fsInstance.file(oldKvPath).delete();
console.log('Migration complete: tsdoc.json -> @git.zone/tsdoc.json');
}
this.npmextraKV = new plugins.npmextra.KeyValueStore({
typeArg: 'userHomeDir',
identityArg: 'tsdoc',
identityArg: '@git.zone/tsdoc',
mandatoryKeys: ['OPENAI_TOKEN'],
});
@@ -64,15 +80,35 @@ export class AiDoc {
await this.npmextraKV.writeKey('OPENAI_TOKEN', this.openaiToken);
}
}
if (!this.openaiToken) {
if (!this.openaiToken && this.npmextraKV) {
this.openaiToken = await this.npmextraKV.readKey('OPENAI_TOKEN');
}
// lets assume we have an OPENAI_Token now
this.openaiInstance = new plugins.smartai.OpenAiProvider({
this.smartAiInstance = new plugins.smartai.SmartAi({
openaiToken: this.openaiToken,
});
await this.openaiInstance.start();
await this.smartAiInstance.start();
}
public async stop() {
if (this.smartAiInstance) {
await this.smartAiInstance.stop();
}
// No explicit cleanup needed for npmextraKV or aidocInteract
// They don't keep event loop alive
}
/**
* Get the OpenAI provider for direct chat calls
* This is a convenience getter to access the provider from SmartAi
*/
public get openaiProvider(): plugins.smartai.OpenAiProvider {
return this.smartAiInstance.openaiProvider;
}
public getOpenaiToken(): string {
return this.openaiToken;
}
public async buildReadme(projectDirArg: string) {
@@ -118,13 +154,12 @@ export class AiDoc {
}
/**
* Count tokens in a text string using GPT tokenizer
* @param text The text to count tokens for
* @param model The model to use for tokenization (default: gpt-3.5-turbo)
* @returns The number of tokens in the text
* Estimate token count in a text string
* @param text The text to estimate tokens for
* @returns Estimated number of tokens
*/
public countTokens(text: string, model: string = 'gpt-3.5-turbo'): number {
public countTokens(text: string): number {
const projectContextInstance = new aiDocsClasses.ProjectContext('');
return projectContextInstance.countTokens(text, model);
return projectContextInstance.countTokens(text);
}
}

353
ts/classes.diffprocessor.ts Normal file
View File

@@ -0,0 +1,353 @@
/**
* Intelligent git diff processor that handles large diffs by sampling and prioritization
* instead of blind truncation.
*/
export interface IDiffFileInfo {
filepath: string;
status: 'added' | 'modified' | 'deleted';
linesAdded: number;
linesRemoved: number;
totalLines: number;
estimatedTokens: number;
diffContent: string;
}
export interface IProcessedDiff {
summary: string; // Human-readable overview
fullDiffs: string[]; // Small files included fully
summarizedDiffs: string[]; // Medium files with head/tail
metadataOnly: string[]; // Large files, just stats
totalFiles: number;
totalTokens: number;
}
export interface IDiffProcessorOptions {
maxDiffTokens?: number; // Maximum tokens for entire diff section (default: 100000)
smallFileLines?: number; // Files <= this are included fully (default: 50)
mediumFileLines?: number; // Files <= this are summarized (default: 200)
sampleHeadLines?: number; // Lines to show at start of medium files (default: 20)
sampleTailLines?: number; // Lines to show at end of medium files (default: 20)
}
export class DiffProcessor {
private options: Required<IDiffProcessorOptions>;
constructor(options: IDiffProcessorOptions = {}) {
this.options = {
maxDiffTokens: options.maxDiffTokens ?? 100000,
smallFileLines: options.smallFileLines ?? 50,
mediumFileLines: options.mediumFileLines ?? 200,
sampleHeadLines: options.sampleHeadLines ?? 20,
sampleTailLines: options.sampleTailLines ?? 20,
};
}
/**
* Process an array of git diffs into a structured, token-efficient format
*/
public processDiffs(diffStringArray: string[]): IProcessedDiff {
// Parse all diffs into file info objects
const fileInfos: IDiffFileInfo[] = diffStringArray
.map(diffString => this.parseDiffFile(diffString))
.filter(info => info !== null) as IDiffFileInfo[];
// Prioritize files (source files first, build artifacts last)
const prioritized = this.prioritizeFiles(fileInfos);
const result: IProcessedDiff = {
summary: '',
fullDiffs: [],
summarizedDiffs: [],
metadataOnly: [],
totalFiles: prioritized.length,
totalTokens: 0,
};
let tokensUsed = 0;
const tokenBudget = this.options.maxDiffTokens;
// Categorize and include files based on size and token budget
for (const fileInfo of prioritized) {
const remainingBudget = tokenBudget - tokensUsed;
if (remainingBudget <= 0) {
// Budget exhausted - rest are metadata only
result.metadataOnly.push(this.formatMetadataOnly(fileInfo));
continue;
}
if (fileInfo.totalLines <= this.options.smallFileLines) {
// Small file - include fully if budget allows
if (fileInfo.estimatedTokens <= remainingBudget) {
const statusPrefix = this.getFileStatusPrefix(fileInfo);
result.fullDiffs.push(`${statusPrefix}${fileInfo.diffContent}`);
tokensUsed += fileInfo.estimatedTokens;
} else {
result.metadataOnly.push(this.formatMetadataOnly(fileInfo));
}
} else if (fileInfo.totalLines <= this.options.mediumFileLines) {
// Medium file - try to include summary with head/tail
const summary = this.extractDiffSample(
fileInfo,
this.options.sampleHeadLines,
this.options.sampleTailLines
);
const summaryTokens = Math.ceil(summary.length / 4); // Rough estimate
if (summaryTokens <= remainingBudget) {
result.summarizedDiffs.push(summary);
tokensUsed += summaryTokens;
} else {
result.metadataOnly.push(this.formatMetadataOnly(fileInfo));
}
} else {
// Large file - metadata only
result.metadataOnly.push(this.formatMetadataOnly(fileInfo));
}
}
result.totalTokens = tokensUsed;
result.summary = this.generateSummary(result);
return result;
}
/**
* Format the processed diff for inclusion in context
*/
public formatForContext(processed: IProcessedDiff): string {
const sections: string[] = [];
// Summary section
sections.push('====== GIT DIFF SUMMARY ======');
sections.push(processed.summary);
sections.push('');
// Full diffs section
if (processed.fullDiffs.length > 0) {
sections.push(`====== FULL DIFFS (${processed.fullDiffs.length} files) ======`);
sections.push(processed.fullDiffs.join('\n\n'));
sections.push('');
}
// Summarized diffs section
if (processed.summarizedDiffs.length > 0) {
sections.push(`====== SUMMARIZED DIFFS (${processed.summarizedDiffs.length} files) ======`);
sections.push(processed.summarizedDiffs.join('\n\n'));
sections.push('');
}
// Metadata only section
if (processed.metadataOnly.length > 0) {
sections.push(`====== METADATA ONLY (${processed.metadataOnly.length} files) ======`);
sections.push(processed.metadataOnly.join('\n'));
sections.push('');
}
sections.push('====== END OF GIT DIFF ======');
return sections.join('\n');
}
/**
* Parse a single git diff string into file information
*/
private parseDiffFile(diffString: string): IDiffFileInfo | null {
if (!diffString || diffString.trim().length === 0) {
return null;
}
const lines = diffString.split('\n');
let filepath = '';
let status: 'added' | 'modified' | 'deleted' = 'modified';
let linesAdded = 0;
let linesRemoved = 0;
// Parse diff header to extract filepath and status
for (const line of lines) {
if (line.startsWith('--- a/')) {
filepath = line.substring(6);
} else if (line.startsWith('+++ b/')) {
const newPath = line.substring(6);
if (newPath === '/dev/null') {
status = 'deleted';
} else if (filepath === '/dev/null') {
status = 'added';
filepath = newPath;
} else {
filepath = newPath;
}
} else if (line.startsWith('+') && !line.startsWith('+++')) {
linesAdded++;
} else if (line.startsWith('-') && !line.startsWith('---')) {
linesRemoved++;
}
}
const totalLines = linesAdded + linesRemoved;
const estimatedTokens = Math.ceil(diffString.length / 4);
return {
filepath,
status,
linesAdded,
linesRemoved,
totalLines,
estimatedTokens,
diffContent: diffString,
};
}
/**
* Prioritize files by importance (source files before build artifacts)
*/
private prioritizeFiles(files: IDiffFileInfo[]): IDiffFileInfo[] {
return files.sort((a, b) => {
const scoreA = this.getFileImportanceScore(a.filepath);
const scoreB = this.getFileImportanceScore(b.filepath);
return scoreB - scoreA; // Higher score first
});
}
/**
* Calculate importance score for a file path
*/
private getFileImportanceScore(filepath: string): number {
// Source files - highest priority
if (filepath.match(/^(src|lib|app|components|pages|api)\//)) {
return 100;
}
// Test files - high priority
if (filepath.match(/\.(test|spec)\.(ts|js|tsx|jsx)$/) || filepath.startsWith('test/')) {
return 80;
}
// Configuration files - medium-high priority
if (filepath.match(/\.(json|yaml|yml|toml|config\.(ts|js))$/)) {
return 60;
}
// Documentation - medium priority
if (filepath.match(/\.(md|txt|rst)$/)) {
return 40;
}
// Build artifacts - low priority
if (filepath.match(/^(dist|build|out|\.next|public\/dist)\//)) {
return 10;
}
// Start with default priority
let score = 50;
// Boost interface/type files - they're usually small but critical
if (filepath.includes('interfaces/') || filepath.includes('.types.')) {
score += 20;
}
// Boost entry points
if (filepath.endsWith('index.ts') || filepath.endsWith('mod.ts')) {
score += 15;
}
return score;
}
/**
* Extract head and tail lines from a diff, omitting the middle
*/
private extractDiffSample(fileInfo: IDiffFileInfo, headLines: number, tailLines: number): string {
const lines = fileInfo.diffContent.split('\n');
const totalLines = lines.length;
if (totalLines <= headLines + tailLines) {
// File is small enough to include fully
return fileInfo.diffContent;
}
// Extract file metadata from diff header
const headerLines: string[] = [];
let bodyStartIndex = 0;
for (let i = 0; i < lines.length; i++) {
if (lines[i].startsWith('@@')) {
headerLines.push(...lines.slice(0, i + 1));
bodyStartIndex = i + 1;
break;
}
}
const bodyLines = lines.slice(bodyStartIndex);
const head = bodyLines.slice(0, headLines);
const tail = bodyLines.slice(-tailLines);
const omittedLines = bodyLines.length - headLines - tailLines;
const statusEmoji = fileInfo.status === 'added' ? '' :
fileInfo.status === 'deleted' ? '' : '📝';
const parts: string[] = [];
parts.push(`${statusEmoji} FILE: ${fileInfo.filepath}`);
parts.push(`CHANGES: +${fileInfo.linesAdded} lines, -${fileInfo.linesRemoved} lines (${fileInfo.totalLines} total)`);
parts.push('');
parts.push(...headerLines);
parts.push(...head);
parts.push('');
parts.push(`[... ${omittedLines} lines omitted - use Read tool to see full file ...]`);
parts.push('');
parts.push(...tail);
return parts.join('\n');
}
/**
* Get file status prefix with emoji
*/
private getFileStatusPrefix(fileInfo: IDiffFileInfo): string {
const statusEmoji = fileInfo.status === 'added' ? '' :
fileInfo.status === 'deleted' ? '' : '📝';
return `${statusEmoji} `;
}
/**
* Extract filepath from diff content
*/
private extractFilepathFromDiff(diffContent: string): string {
const lines = diffContent.split('\n');
for (const line of lines) {
if (line.startsWith('+++ b/')) {
return line.substring(6);
}
}
return 'unknown';
}
/**
* Format file info as metadata only
*/
private formatMetadataOnly(fileInfo: IDiffFileInfo): string {
const statusEmoji = fileInfo.status === 'added' ? '' :
fileInfo.status === 'deleted' ? '' : '📝';
return `${statusEmoji} ${fileInfo.filepath} (+${fileInfo.linesAdded}, -${fileInfo.linesRemoved})`;
}
/**
* Generate human-readable summary of processed diff
*/
private generateSummary(result: IProcessedDiff): string {
const parts: string[] = [];
parts.push(`Files changed: ${result.totalFiles} total`);
parts.push(`- ${result.fullDiffs.length} included in full`);
parts.push(`- ${result.summarizedDiffs.length} summarized (head/tail shown)`);
parts.push(`- ${result.metadataOnly.length} metadata only`);
parts.push(`Estimated tokens: ~${result.totalTokens.toLocaleString()}`);
if (result.metadataOnly.length > 0) {
parts.push('');
parts.push('NOTE: Some files excluded to stay within token budget.');
parts.push('Use Read tool with specific file paths to see full content.');
}
return parts.join('\n');
}
}

View File

@@ -33,19 +33,19 @@ export class TypeDoc {
include: [],
};
let startDirectory = '';
if (plugins.smartfile.fs.isDirectory(plugins.path.join(paths.cwd, './ts'))) {
if (await plugins.fsInstance.directory(plugins.path.join(paths.cwd, './ts')).exists()) {
data.include.push(plugins.path.join(paths.cwd, './ts/**/*'));
startDirectory = 'ts';
}
if (plugins.smartfile.fs.isDirectory(plugins.path.join(paths.cwd, './ts_web'))) {
if (await plugins.fsInstance.directory(plugins.path.join(paths.cwd, './ts_web')).exists()) {
data.include.push(plugins.path.join(paths.cwd, './ts_web/**/*'));
if (!startDirectory) {
startDirectory = 'ts_web';
}
}
await plugins.smartfile.memory.toFs(JSON.stringify(data), paths.tsconfigFile);
await plugins.fsInstance.file(paths.tsconfigFile).encoding('utf8').write(JSON.stringify(data));
let targetDir = paths.publicDir;
if (options?.publicSubdir) {
targetDir = plugins.path.join(targetDir, options.publicSubdir);
@@ -53,6 +53,6 @@ export class TypeDoc {
await this.smartshellInstance.exec(
`typedoc --tsconfig ${paths.tsconfigFile} --out ${targetDir} ${startDirectory}/index.ts`,
);
plugins.smartfile.fs.remove(paths.tsconfigFile);
await plugins.fsInstance.file(paths.tsconfigFile).delete();
}
}

153
ts/cli.ts
View File

@@ -4,7 +4,6 @@ import { logger } from './logging.js';
import { TypeDoc } from './classes.typedoc.js';
import { AiDoc } from './classes.aidoc.js';
import * as context from './context/index.js';
export const run = async () => {
const tsdocCli = new plugins.smartcli.Smartcli();
@@ -31,18 +30,7 @@ export const run = async () => {
tsdocCli.addCommand('aidoc').subscribe(async (argvArg) => {
const aidocInstance = new AiDoc();
await aidocInstance.start();
// Get context token count if requested
if (argvArg.tokens || argvArg.showTokens) {
logger.log('info', `Calculating context token count...`);
const tokenCount = await aidocInstance.getProjectContextTokenCount(paths.cwd);
logger.log('ok', `Total context token count: ${tokenCount}`);
if (argvArg.tokensOnly) {
return; // Exit early if we only want token count
}
}
logger.log('info', `Generating new readme...`);
logger.log('info', `This may take some time...`);
await aidocInstance.buildReadme(paths.cwd);
@@ -51,125 +39,40 @@ export const run = async () => {
await aidocInstance.buildDescription(paths.cwd);
});
tsdocCli.addCommand('tokens').subscribe(async (argvArg) => {
tsdocCli.addCommand('readme').subscribe(async (argvArg) => {
const aidocInstance = new AiDoc();
await aidocInstance.start();
logger.log('info', `Calculating context token count...`);
// Determine context mode based on args
let contextMode: context.ContextMode = 'full';
if (argvArg.trim || argvArg.trimmed) {
contextMode = 'trimmed';
} else if (argvArg.summarize || argvArg.summarized) {
contextMode = 'summarized';
}
// Get task type if specified
let taskType: context.TaskType | undefined = undefined;
if (argvArg.task) {
if (['readme', 'commit', 'description'].includes(argvArg.task)) {
taskType = argvArg.task as context.TaskType;
} else {
logger.log('warn', `Unknown task type: ${argvArg.task}. Using default context.`);
}
}
// Use enhanced context
const taskFactory = new context.TaskContextFactory(paths.cwd);
await taskFactory.initialize();
let contextResult: context.IContextResult;
if (argvArg.all) {
// Show stats for all task types
const stats = await taskFactory.getTokenStats();
logger.log('ok', 'Token statistics by task:');
for (const [task, data] of Object.entries(stats)) {
logger.log('info', `\n${task.toUpperCase()}:`);
logger.log('info', ` Tokens: ${data.tokenCount}`);
logger.log('info', ` Token savings: ${data.savings}`);
logger.log('info', ` Files: ${data.includedFiles} included, ${data.trimmedFiles} trimmed, ${data.excludedFiles} excluded`);
// Calculate percentage of model context
const o4MiniPercentage = (data.tokenCount / 200000 * 100).toFixed(2);
logger.log('info', ` Context usage: ${o4MiniPercentage}% of o4-mini (200K tokens)`);
}
return;
}
if (taskType) {
// Get context for specific task
contextResult = await taskFactory.createContextForTask(taskType);
} else {
// Get generic context with specified mode
const enhancedContext = new context.EnhancedContext(paths.cwd);
await enhancedContext.initialize();
enhancedContext.setContextMode(contextMode);
if (argvArg.maxTokens) {
enhancedContext.setTokenBudget(parseInt(argvArg.maxTokens, 10));
}
contextResult = await enhancedContext.buildContext();
}
// Display results
logger.log('ok', `Total context token count: ${contextResult.tokenCount}`);
logger.log('info', `Files included: ${contextResult.includedFiles.length}`);
logger.log('info', `Files trimmed: ${contextResult.trimmedFiles.length}`);
logger.log('info', `Files excluded: ${contextResult.excludedFiles.length}`);
logger.log('info', `Token savings: ${contextResult.tokenSavings}`);
if (argvArg.detailed) {
// Show more detailed info about the context and token usage
const o4MiniPercentage = (contextResult.tokenCount / 200000 * 100).toFixed(2);
logger.log('info', `Token usage: ${o4MiniPercentage}% of o4-mini 200K token context window`);
if (argvArg.model) {
// Show percentages for different models
if (argvArg.model === 'gpt4') {
const gpt4Percentage = (contextResult.tokenCount / 8192 * 100).toFixed(2);
logger.log('info', `Token usage (GPT-4): ${gpt4Percentage}% of 8192 token context window`);
} else if (argvArg.model === 'gpt35') {
const gpt35Percentage = (contextResult.tokenCount / 4096 * 100).toFixed(2);
logger.log('info', `Token usage (GPT-3.5): ${gpt35Percentage}% of 4096 token context window`);
}
}
// Estimate cost (approximate values)
const o4MiniInputCost = 0.00005; // per 1K tokens for o4-mini
const estimatedCost = (contextResult.tokenCount / 1000 * o4MiniInputCost).toFixed(6);
logger.log('info', `Estimated input cost: $${estimatedCost} (o4-mini)`);
if (argvArg.listFiles) {
// List files included in context
logger.log('info', '\nIncluded files:');
contextResult.includedFiles.forEach(file => {
logger.log('info', ` ${file.relativePath} (${file.tokenCount} tokens)`);
});
logger.log('info', '\nTrimmed files:');
contextResult.trimmedFiles.forEach(file => {
logger.log('info', ` ${file.relativePath} (${file.tokenCount} tokens)`);
});
if (contextResult.excludedFiles.length > 0) {
logger.log('info', '\nExcluded files:');
contextResult.excludedFiles.forEach(file => {
logger.log('info', ` ${file.relativePath} (${file.tokenCount} tokens)`);
});
}
}
}
logger.log('info', `Generating new readme...`);
logger.log('info', `This may take some time...`);
await aidocInstance.buildReadme(paths.cwd);
});
tsdocCli.addCommand('description').subscribe(async (argvArg) => {
const aidocInstance = new AiDoc();
await aidocInstance.start();
logger.log('info', `Generating new description and keywords...`);
logger.log('info', `This may take some time...`);
await aidocInstance.buildDescription(paths.cwd);
});
tsdocCli.addCommand('commit').subscribe(async (argvArg) => {
const aidocInstance = new AiDoc();
await aidocInstance.start();
logger.log('info', `Generating commit message...`);
logger.log('info', `This may take some time...`);
const commitObject = await aidocInstance.buildNextCommitObject(paths.cwd);
logger.log('ok', `Commit message generated:`);
console.log(JSON.stringify(commitObject, null, 2));
});
tsdocCli.addCommand('test').subscribe((argvArg) => {
tsdocCli.triggerCommand('typedoc', argvArg);
process.on('exit', async () => {
await plugins.smartfile.fs.remove(paths.publicDir);
await plugins.fsInstance.directory(paths.publicDir).recursive().delete();
});
});

View File

@@ -1,209 +0,0 @@
import * as plugins from '../plugins.js';
import type { IContextConfig, ITrimConfig, ITaskConfig, TaskType, ContextMode } from './types.js';
/**
* Manages configuration for context building
*/
export class ConfigManager {
private static instance: ConfigManager;
private config: IContextConfig;
private projectDir: string = '';
/**
* Get the singleton instance of ConfigManager
*/
public static getInstance(): ConfigManager {
if (!ConfigManager.instance) {
ConfigManager.instance = new ConfigManager();
}
return ConfigManager.instance;
}
/**
* Private constructor for singleton pattern
*/
private constructor() {
this.config = this.getDefaultConfig();
}
/**
* Initialize the config manager with a project directory
* @param projectDir The project directory
*/
public async initialize(projectDir: string): Promise<void> {
this.projectDir = projectDir;
await this.loadConfig();
}
/**
* Get the default configuration
*/
private getDefaultConfig(): IContextConfig {
return {
maxTokens: 190000, // Default for o4-mini with some buffer
defaultMode: 'trimmed',
taskSpecificSettings: {
readme: {
mode: 'trimmed',
includePaths: ['ts/', 'src/'],
excludePaths: ['test/', 'node_modules/']
},
commit: {
mode: 'trimmed',
focusOnChangedFiles: true
},
description: {
mode: 'trimmed',
includePackageInfo: true
}
},
trimming: {
removeImplementations: true,
preserveInterfaces: true,
preserveTypeDefs: true,
preserveJSDoc: true,
maxFunctionLines: 5,
removeComments: true,
removeBlankLines: true
}
};
}
/**
* Load configuration from npmextra.json
*/
private async loadConfig(): Promise<void> {
try {
if (!this.projectDir) {
return;
}
// Create KeyValueStore for this project
// We'll just use smartfile directly instead of KeyValueStore
// Read the npmextra.json file
const npmextraJsonFile = await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(this.projectDir, 'npmextra.json')
);
const npmextraContent = JSON.parse(npmextraJsonFile.contents.toString());
// Check for tsdoc context configuration
if (npmextraContent?.tsdoc?.context) {
// Merge with default config
this.config = this.mergeConfigs(this.config, npmextraContent.tsdoc.context);
}
} catch (error) {
console.error('Error loading context configuration:', error);
}
}
/**
* Merge configurations, with userConfig taking precedence
* @param defaultConfig The default configuration
* @param userConfig The user configuration
*/
private mergeConfigs(defaultConfig: IContextConfig, userConfig: Partial<IContextConfig>): IContextConfig {
const result: IContextConfig = { ...defaultConfig };
// Merge top-level properties
if (userConfig.maxTokens !== undefined) result.maxTokens = userConfig.maxTokens;
if (userConfig.defaultMode !== undefined) result.defaultMode = userConfig.defaultMode;
// Merge task-specific settings
if (userConfig.taskSpecificSettings) {
result.taskSpecificSettings = result.taskSpecificSettings || {};
// For each task type, merge settings
(['readme', 'commit', 'description'] as TaskType[]).forEach(taskType => {
if (userConfig.taskSpecificSettings?.[taskType]) {
result.taskSpecificSettings![taskType] = {
...result.taskSpecificSettings![taskType],
...userConfig.taskSpecificSettings[taskType]
};
}
});
}
// Merge trimming configuration
if (userConfig.trimming) {
result.trimming = {
...result.trimming,
...userConfig.trimming
};
}
return result;
}
/**
* Get the complete configuration
*/
public getConfig(): IContextConfig {
return this.config;
}
/**
* Get the trimming configuration
*/
public getTrimConfig(): ITrimConfig {
return this.config.trimming || {};
}
/**
* Get configuration for a specific task
* @param taskType The type of task
*/
public getTaskConfig(taskType: TaskType): ITaskConfig {
// Get task-specific config or empty object
const taskConfig = this.config.taskSpecificSettings?.[taskType] || {};
// If mode is not specified, use default mode
if (!taskConfig.mode) {
taskConfig.mode = this.config.defaultMode;
}
return taskConfig;
}
/**
* Get the maximum tokens allowed for context
*/
public getMaxTokens(): number {
return this.config.maxTokens || 190000;
}
/**
* Update the configuration
* @param config The new configuration
*/
public async updateConfig(config: Partial<IContextConfig>): Promise<void> {
// Merge with existing config
this.config = this.mergeConfigs(this.config, config);
try {
if (!this.projectDir) {
return;
}
// Read the existing npmextra.json file
const npmextraJsonPath = plugins.path.join(this.projectDir, 'npmextra.json');
let npmextraContent = {};
if (await plugins.smartfile.fs.fileExists(npmextraJsonPath)) {
const npmextraJsonFile = await plugins.smartfile.SmartFile.fromFilePath(npmextraJsonPath);
npmextraContent = JSON.parse(npmextraJsonFile.contents.toString()) || {};
}
// Update the tsdoc context configuration
const typedContent = npmextraContent as any;
if (!typedContent.tsdoc) typedContent.tsdoc = {};
typedContent.tsdoc.context = this.config;
// Write back to npmextra.json
const updatedContent = JSON.stringify(npmextraContent, null, 2);
await plugins.smartfile.memory.toFs(updatedContent, npmextraJsonPath);
} catch (error) {
console.error('Error updating context configuration:', error);
}
}
}

View File

@@ -1,246 +0,0 @@
import * as plugins from '../plugins.js';
import type { ITrimConfig, ContextMode } from './types.js';
/**
* Class responsible for trimming file contents to reduce token usage
* while preserving important information for context
*/
export class ContextTrimmer {
private config: ITrimConfig;
/**
* Create a new ContextTrimmer with the given configuration
* @param config The trimming configuration
*/
constructor(config?: ITrimConfig) {
this.config = {
removeImplementations: true,
preserveInterfaces: true,
preserveTypeDefs: true,
preserveJSDoc: true,
maxFunctionLines: 5,
removeComments: true,
removeBlankLines: true,
...config
};
}
/**
* Trim a file's contents based on the configuration
* @param filePath The path to the file
* @param content The file's contents
* @param mode The context mode to use
* @returns The trimmed file contents
*/
public trimFile(filePath: string, content: string, mode: ContextMode = 'trimmed'): string {
// If mode is 'full', return the original content
if (mode === 'full') {
return content;
}
// Process based on file type
if (filePath.endsWith('.ts') || filePath.endsWith('.tsx')) {
return this.trimTypeScriptFile(content);
} else if (filePath.endsWith('.md')) {
return this.trimMarkdownFile(content);
} else if (filePath.endsWith('.json')) {
return this.trimJsonFile(content);
}
// Default to returning the original content for unknown file types
return content;
}
/**
* Trim a TypeScript file to reduce token usage
* @param content The TypeScript file contents
* @returns The trimmed file contents
*/
private trimTypeScriptFile(content: string): string {
let result = content;
// Step 1: Preserve JSDoc comments if configured
const jsDocComments: string[] = [];
if (this.config.preserveJSDoc) {
const jsDocRegex = /\/\*\*[\s\S]*?\*\//g;
const matches = result.match(jsDocRegex) || [];
jsDocComments.push(...matches);
}
// Step 2: Remove comments if configured
if (this.config.removeComments) {
// Remove single-line comments
result = result.replace(/\/\/.*$/gm, '');
// Remove multi-line comments (except JSDoc if preserveJSDoc is true)
if (!this.config.preserveJSDoc) {
result = result.replace(/\/\*[\s\S]*?\*\//g, '');
} else {
// Only remove non-JSDoc comments
result = result.replace(/\/\*(?!\*)[\s\S]*?\*\//g, '');
}
}
// Step 3: Remove function implementations if configured
if (this.config.removeImplementations) {
// Match function and method bodies
result = result.replace(
/(\b(function|constructor|async function)\s+[\w$]*\s*\([^)]*\)\s*{)([\s\S]*?)(})/g,
(match, start, funcType, body, end) => {
// Keep function signature and opening brace, replace body with comment
return `${start} /* implementation removed */ ${end}`;
}
);
// Match arrow function bodies
result = result.replace(
/(\([^)]*\)\s*=>\s*{)([\s\S]*?)(})/g,
(match, start, body, end) => {
return `${start} /* implementation removed */ ${end}`;
}
);
// Match method declarations
result = result.replace(
/(^\s*[\w$]*\s*\([^)]*\)\s*{)([\s\S]*?)(})/gm,
(match, start, body, end) => {
return `${start} /* implementation removed */ ${end}`;
}
);
// Match class methods
result = result.replace(
/(\b(public|private|protected|static|async)?\s+[\w$]+\s*\([^)]*\)\s*{)([\s\S]*?)(})/g,
(match, start, modifier, body, end) => {
return `${start} /* implementation removed */ ${end}`;
}
);
} else if (this.config.maxFunctionLines && this.config.maxFunctionLines > 0) {
// If not removing implementations completely, limit the number of lines
// Match function and method bodies
result = result.replace(
/(\b(function|constructor|async function)\s+[\w$]*\s*\([^)]*\)\s*{)([\s\S]*?)(})/g,
(match, start, funcType, body, end) => {
return this.limitFunctionBody(start, body, end);
}
);
// Match arrow function bodies
result = result.replace(
/(\([^)]*\)\s*=>\s*{)([\s\S]*?)(})/g,
(match, start, body, end) => {
return this.limitFunctionBody(start, body, end);
}
);
// Match method declarations
result = result.replace(
/(^\s*[\w$]*\s*\([^)]*\)\s*{)([\s\S]*?)(})/gm,
(match, start, body, end) => {
return this.limitFunctionBody(start, body, end);
}
);
// Match class methods
result = result.replace(
/(\b(public|private|protected|static|async)?\s+[\w$]+\s*\([^)]*\)\s*{)([\s\S]*?)(})/g,
(match, start, modifier, body, end) => {
return this.limitFunctionBody(start, body, end);
}
);
}
// Step 4: Remove blank lines if configured
if (this.config.removeBlankLines) {
result = result.replace(/^\s*[\r\n]/gm, '');
}
// Step 5: Restore preserved JSDoc comments
if (this.config.preserveJSDoc && jsDocComments.length > 0) {
// This is a placeholder; we already preserved JSDoc comments in the regex steps
}
return result;
}
/**
* Limit a function body to a maximum number of lines
* @param start The function signature and opening brace
* @param body The function body
* @param end The closing brace
* @returns The limited function body
*/
private limitFunctionBody(start: string, body: string, end: string): string {
const lines = body.split('\n');
if (lines.length > this.config.maxFunctionLines!) {
const limitedBody = lines.slice(0, this.config.maxFunctionLines!).join('\n');
return `${start}${limitedBody}\n // ... (${lines.length - this.config.maxFunctionLines!} lines trimmed)\n${end}`;
}
return `${start}${body}${end}`;
}
/**
* Trim a Markdown file to reduce token usage
* @param content The Markdown file contents
* @returns The trimmed file contents
*/
private trimMarkdownFile(content: string): string {
// For markdown files, we generally want to keep most content
// but we can remove lengthy code blocks if needed
return content;
}
/**
* Trim a JSON file to reduce token usage
* @param content The JSON file contents
* @returns The trimmed file contents
*/
private trimJsonFile(content: string): string {
try {
// Parse the JSON
const json = JSON.parse(content);
// For package.json, keep only essential information
if ('name' in json && 'version' in json && 'dependencies' in json) {
const essentialKeys = [
'name', 'version', 'description', 'author', 'license',
'main', 'types', 'exports', 'type'
];
const trimmedJson: any = {};
essentialKeys.forEach(key => {
if (key in json) {
trimmedJson[key] = json[key];
}
});
// Add dependency information without versions
if ('dependencies' in json) {
trimmedJson.dependencies = Object.keys(json.dependencies).reduce((acc, dep) => {
acc[dep] = '*'; // Replace version with wildcard
return acc;
}, {} as Record<string, string>);
}
// Return the trimmed JSON
return JSON.stringify(trimmedJson, null, 2);
}
// For other JSON files, leave as is
return content;
} catch (error) {
// If there's an error parsing the JSON, return the original content
return content;
}
}
/**
* Update the trimmer configuration
* @param config The new configuration to apply
*/
public updateConfig(config: ITrimConfig): void {
this.config = {
...this.config,
...config
};
}
}

View File

@@ -1,343 +0,0 @@
import * as plugins from '../plugins.js';
import type { ContextMode, IContextResult, IFileInfo, TaskType } from './types.js';
import { ContextTrimmer } from './context-trimmer.js';
import { ConfigManager } from './config-manager.js';
/**
* Enhanced ProjectContext that supports context optimization strategies
*/
export class EnhancedContext {
private projectDir: string;
private trimmer: ContextTrimmer;
private configManager: ConfigManager;
private contextMode: ContextMode = 'trimmed';
private tokenBudget: number = 190000; // Default for o4-mini
private contextResult: IContextResult = {
context: '',
tokenCount: 0,
includedFiles: [],
trimmedFiles: [],
excludedFiles: [],
tokenSavings: 0
};
/**
* Create a new EnhancedContext
* @param projectDirArg The project directory
*/
constructor(projectDirArg: string) {
this.projectDir = projectDirArg;
this.configManager = ConfigManager.getInstance();
this.trimmer = new ContextTrimmer(this.configManager.getTrimConfig());
}
/**
* Initialize the context builder
*/
public async initialize(): Promise<void> {
await this.configManager.initialize(this.projectDir);
this.tokenBudget = this.configManager.getMaxTokens();
this.trimmer.updateConfig(this.configManager.getTrimConfig());
}
/**
* Set the context mode
* @param mode The context mode to use
*/
public setContextMode(mode: ContextMode): void {
this.contextMode = mode;
}
/**
* Set the token budget
* @param maxTokens The maximum tokens to use
*/
public setTokenBudget(maxTokens: number): void {
this.tokenBudget = maxTokens;
}
/**
* Gather files from the project
* @param includePaths Optional paths to include
* @param excludePaths Optional paths to exclude
*/
public async gatherFiles(includePaths?: string[], excludePaths?: string[]): Promise<Record<string, plugins.smartfile.SmartFile | plugins.smartfile.SmartFile[]>> {
const smartfilePackageJSON = await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(this.projectDir, 'package.json'),
this.projectDir,
);
const smartfilesReadme = await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(this.projectDir, 'readme.md'),
this.projectDir,
);
const smartfilesReadmeHints = await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(this.projectDir, 'readme.hints.md'),
this.projectDir,
);
const smartfilesNpmextraJSON = await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(this.projectDir, 'npmextra.json'),
this.projectDir,
);
// Use provided include paths or default to all TypeScript files
const includeGlobs = includePaths?.map(path => `${path}/**/*.ts`) || ['ts*/**/*.ts'];
// Get TypeScript files
const smartfilesModPromises = includeGlobs.map(glob =>
plugins.smartfile.fs.fileTreeToObject(this.projectDir, glob)
);
const smartfilesModArrays = await Promise.all(smartfilesModPromises);
// Flatten the arrays
const smartfilesMod: plugins.smartfile.SmartFile[] = [];
smartfilesModArrays.forEach(array => {
smartfilesMod.push(...array);
});
// Get test files if not excluded
let smartfilesTest: plugins.smartfile.SmartFile[] = [];
if (!excludePaths?.includes('test/')) {
smartfilesTest = await plugins.smartfile.fs.fileTreeToObject(
this.projectDir,
'test/**/*.ts',
);
}
return {
smartfilePackageJSON,
smartfilesReadme,
smartfilesReadmeHints,
smartfilesNpmextraJSON,
smartfilesMod,
smartfilesTest,
};
}
/**
* Convert files to context string
* @param files The files to convert
* @param mode The context mode to use
*/
public async convertFilesToContext(
files: plugins.smartfile.SmartFile[],
mode: ContextMode = this.contextMode
): Promise<string> {
// Reset context result
this.contextResult = {
context: '',
tokenCount: 0,
includedFiles: [],
trimmedFiles: [],
excludedFiles: [],
tokenSavings: 0
};
let totalTokenCount = 0;
let totalOriginalTokens = 0;
// Sort files by importance (for now just a simple alphabetical sort)
// Later this could be enhanced with more sophisticated prioritization
const sortedFiles = [...files].sort((a, b) => a.relative.localeCompare(b.relative));
const processedFiles: string[] = [];
for (const smartfile of sortedFiles) {
// Calculate original token count
const originalContent = smartfile.contents.toString();
const originalTokenCount = this.countTokens(originalContent);
totalOriginalTokens += originalTokenCount;
// Apply trimming based on mode
let processedContent = originalContent;
if (mode !== 'full') {
processedContent = this.trimmer.trimFile(
smartfile.relative,
originalContent,
mode
);
}
// Calculate new token count
const processedTokenCount = this.countTokens(processedContent);
// Check if we have budget for this file
if (totalTokenCount + processedTokenCount > this.tokenBudget) {
// We don't have budget for this file
this.contextResult.excludedFiles.push({
path: smartfile.path,
contents: originalContent,
relativePath: smartfile.relative,
tokenCount: originalTokenCount
});
continue;
}
// Format the file for context
const formattedContent = `
====== START OF FILE ${smartfile.relative} ======
${processedContent}
====== END OF FILE ${smartfile.relative} ======
`;
processedFiles.push(formattedContent);
totalTokenCount += processedTokenCount;
// Track file in appropriate list
const fileInfo: IFileInfo = {
path: smartfile.path,
contents: processedContent,
relativePath: smartfile.relative,
tokenCount: processedTokenCount
};
if (mode === 'full' || processedContent === originalContent) {
this.contextResult.includedFiles.push(fileInfo);
} else {
this.contextResult.trimmedFiles.push(fileInfo);
this.contextResult.tokenSavings += (originalTokenCount - processedTokenCount);
}
}
// Join all processed files
const context = processedFiles.join('\n');
// Update context result
this.contextResult.context = context;
this.contextResult.tokenCount = totalTokenCount;
return context;
}
/**
* Build context for the project
* @param taskType Optional task type for task-specific context
*/
public async buildContext(taskType?: TaskType): Promise<IContextResult> {
// Initialize if needed
if (this.tokenBudget === 0) {
await this.initialize();
}
// Get task-specific configuration if a task type is provided
if (taskType) {
const taskConfig = this.configManager.getTaskConfig(taskType);
if (taskConfig.mode) {
this.setContextMode(taskConfig.mode);
}
}
// Gather files
const taskConfig = taskType ? this.configManager.getTaskConfig(taskType) : undefined;
const files = await this.gatherFiles(
taskConfig?.includePaths,
taskConfig?.excludePaths
);
// Convert files to context
// Create an array of all files to process
const allFiles: plugins.smartfile.SmartFile[] = [];
// Add individual files
if (files.smartfilePackageJSON) allFiles.push(files.smartfilePackageJSON as plugins.smartfile.SmartFile);
if (files.smartfilesReadme) allFiles.push(files.smartfilesReadme as plugins.smartfile.SmartFile);
if (files.smartfilesReadmeHints) allFiles.push(files.smartfilesReadmeHints as plugins.smartfile.SmartFile);
if (files.smartfilesNpmextraJSON) allFiles.push(files.smartfilesNpmextraJSON as plugins.smartfile.SmartFile);
// Add arrays of files
if (files.smartfilesMod) {
if (Array.isArray(files.smartfilesMod)) {
allFiles.push(...files.smartfilesMod);
} else {
allFiles.push(files.smartfilesMod);
}
}
if (files.smartfilesTest) {
if (Array.isArray(files.smartfilesTest)) {
allFiles.push(...files.smartfilesTest);
} else {
allFiles.push(files.smartfilesTest);
}
}
const context = await this.convertFilesToContext(allFiles);
return this.contextResult;
}
/**
* Update the context with git diff information for commit tasks
* @param gitDiff The git diff to include
*/
public updateWithGitDiff(gitDiff: string): IContextResult {
// If we don't have a context yet, return empty result
if (!this.contextResult.context) {
return this.contextResult;
}
// Add git diff to context
const diffSection = `
====== GIT DIFF ======
${gitDiff}
====== END GIT DIFF ======
`;
const diffTokenCount = this.countTokens(diffSection);
// Update context and token count
this.contextResult.context += diffSection;
this.contextResult.tokenCount += diffTokenCount;
return this.contextResult;
}
/**
* Count tokens in a string
* @param text The text to count tokens for
* @param model The model to use for token counting
*/
public countTokens(text: string, model: string = 'gpt-3.5-turbo'): number {
try {
// Use the gpt-tokenizer library to count tokens
const tokens = plugins.gptTokenizer.encode(text);
return tokens.length;
} catch (error) {
console.error('Error counting tokens:', error);
// Provide a rough estimate if tokenization fails
return Math.ceil(text.length / 4);
}
}
/**
* Get the context result
*/
public getContextResult(): IContextResult {
return this.contextResult;
}
/**
* Get the token count for the current context
*/
public getTokenCount(): number {
return this.contextResult.tokenCount;
}
/**
* Get both the context string and its token count
*/
public getContextWithTokenCount(): { context: string; tokenCount: number } {
return {
context: this.contextResult.context,
tokenCount: this.contextResult.tokenCount
};
}
}

View File

@@ -1,32 +0,0 @@
import { EnhancedContext } from './enhanced-context.js';
import { TaskContextFactory } from './task-context-factory.js';
import { ConfigManager } from './config-manager.js';
import { ContextTrimmer } from './context-trimmer.js';
import type {
ContextMode,
IContextConfig,
IContextResult,
IFileInfo,
ITrimConfig,
ITaskConfig,
TaskType
} from './types.js';
export {
// Classes
EnhancedContext,
TaskContextFactory,
ConfigManager,
ContextTrimmer,
};
// Types
export type {
ContextMode,
IContextConfig,
IContextResult,
IFileInfo,
ITrimConfig,
ITaskConfig,
TaskType
};

View File

@@ -1,138 +0,0 @@
import * as plugins from '../plugins.js';
import { EnhancedContext } from './enhanced-context.js';
import { ConfigManager } from './config-manager.js';
import type { IContextResult, TaskType } from './types.js';
/**
* Factory class for creating task-specific context
*/
export class TaskContextFactory {
private projectDir: string;
private configManager: ConfigManager;
/**
* Create a new TaskContextFactory
* @param projectDirArg The project directory
*/
constructor(projectDirArg: string) {
this.projectDir = projectDirArg;
this.configManager = ConfigManager.getInstance();
}
/**
* Initialize the factory
*/
public async initialize(): Promise<void> {
await this.configManager.initialize(this.projectDir);
}
/**
* Create context for README generation
*/
public async createContextForReadme(): Promise<IContextResult> {
const contextBuilder = new EnhancedContext(this.projectDir);
await contextBuilder.initialize();
// Get README-specific configuration
const taskConfig = this.configManager.getTaskConfig('readme');
if (taskConfig.mode) {
contextBuilder.setContextMode(taskConfig.mode);
}
// Build the context for README task
return await contextBuilder.buildContext('readme');
}
/**
* Create context for description generation
*/
public async createContextForDescription(): Promise<IContextResult> {
const contextBuilder = new EnhancedContext(this.projectDir);
await contextBuilder.initialize();
// Get description-specific configuration
const taskConfig = this.configManager.getTaskConfig('description');
if (taskConfig.mode) {
contextBuilder.setContextMode(taskConfig.mode);
}
// Build the context for description task
return await contextBuilder.buildContext('description');
}
/**
* Create context for commit message generation
* @param gitDiff Optional git diff to include
*/
public async createContextForCommit(gitDiff?: string): Promise<IContextResult> {
const contextBuilder = new EnhancedContext(this.projectDir);
await contextBuilder.initialize();
// Get commit-specific configuration
const taskConfig = this.configManager.getTaskConfig('commit');
if (taskConfig.mode) {
contextBuilder.setContextMode(taskConfig.mode);
}
// Build the context for commit task
const contextResult = await contextBuilder.buildContext('commit');
// If git diff is provided, add it to the context
if (gitDiff) {
contextBuilder.updateWithGitDiff(gitDiff);
}
return contextBuilder.getContextResult();
}
/**
* Create context for any task type
* @param taskType The task type to create context for
* @param additionalContent Optional additional content to include
*/
public async createContextForTask(
taskType: TaskType,
additionalContent?: string
): Promise<IContextResult> {
switch (taskType) {
case 'readme':
return this.createContextForReadme();
case 'description':
return this.createContextForDescription();
case 'commit':
return this.createContextForCommit(additionalContent);
default:
// Generic context for unknown task types
const contextBuilder = new EnhancedContext(this.projectDir);
await contextBuilder.initialize();
return await contextBuilder.buildContext();
}
}
/**
* Get token stats for all task types
*/
public async getTokenStats(): Promise<Record<TaskType, {
tokenCount: number;
savings: number;
includedFiles: number;
trimmedFiles: number;
excludedFiles: number;
}>> {
const taskTypes: TaskType[] = ['readme', 'description', 'commit'];
const stats: Record<TaskType, any> = {} as any;
for (const taskType of taskTypes) {
const result = await this.createContextForTask(taskType);
stats[taskType] = {
tokenCount: result.tokenCount,
savings: result.tokenSavings,
includedFiles: result.includedFiles.length,
trimmedFiles: result.trimmedFiles.length,
excludedFiles: result.excludedFiles.length
};
}
return stats;
}
}

View File

@@ -1,95 +0,0 @@
/**
* Context processing mode to control how context is built
*/
export type ContextMode = 'full' | 'trimmed' | 'summarized';
/**
* Configuration for context trimming
*/
export interface ITrimConfig {
/** Whether to remove function implementations */
removeImplementations?: boolean;
/** Whether to preserve interface definitions */
preserveInterfaces?: boolean;
/** Whether to preserve type definitions */
preserveTypeDefs?: boolean;
/** Whether to preserve JSDoc comments */
preserveJSDoc?: boolean;
/** Maximum lines to keep for function bodies (if not removing completely) */
maxFunctionLines?: number;
/** Whether to remove normal comments (non-JSDoc) */
removeComments?: boolean;
/** Whether to remove blank lines */
removeBlankLines?: boolean;
}
/**
* Task types that require different context optimization
*/
export type TaskType = 'readme' | 'commit' | 'description';
/**
* Configuration for different tasks
*/
export interface ITaskConfig {
/** The context mode to use for this task */
mode?: ContextMode;
/** File paths to include for this task */
includePaths?: string[];
/** File paths to exclude for this task */
excludePaths?: string[];
/** For commit tasks, whether to focus on changed files */
focusOnChangedFiles?: boolean;
/** For description tasks, whether to include package info */
includePackageInfo?: boolean;
}
/**
* Complete context configuration
*/
export interface IContextConfig {
/** Maximum tokens to use for context */
maxTokens?: number;
/** Default context mode */
defaultMode?: ContextMode;
/** Task-specific settings */
taskSpecificSettings?: {
[key in TaskType]?: ITaskConfig;
};
/** Trimming configuration */
trimming?: ITrimConfig;
}
/**
* Basic file information interface
*/
export interface IFileInfo {
/** The file path */
path: string;
/** The file contents */
contents: string;
/** The file's relative path from the project root */
relativePath: string;
/** The estimated token count of the file */
tokenCount?: number;
/** The file's importance score (higher is more important) */
importanceScore?: number;
}
/**
* Result of context building
*/
export interface IContextResult {
/** The generated context string */
context: string;
/** The total token count of the context */
tokenCount: number;
/** Files included in the context */
includedFiles: IFileInfo[];
/** Files that were trimmed */
trimmedFiles: IFileInfo[];
/** Files that were excluded */
excludedFiles: IFileInfo[];
/** Token savings from trimming */
tokenSavings: number;
}

View File

@@ -6,10 +6,12 @@ export { path };
// pushrocks scope
import * as npmextra from '@push.rocks/npmextra';
import * as qenv from '@push.rocks/qenv';
import * as smartagent from '@push.rocks/smartagent';
import * as smartai from '@push.rocks/smartai';
import * as smartcli from '@push.rocks/smartcli';
import * as smartdelay from '@push.rocks/smartdelay';
import * as smartfile from '@push.rocks/smartfile';
import * as smartfs from '@push.rocks/smartfs';
import * as smartgit from '@push.rocks/smartgit';
import * as smartinteract from '@push.rocks/smartinteract';
import * as smartlog from '@push.rocks/smartlog';
@@ -21,10 +23,12 @@ import * as smarttime from '@push.rocks/smarttime';
export {
npmextra,
qenv,
smartagent,
smartai,
smartcli,
smartdelay,
smartfile,
smartfs,
smartgit,
smartinteract,
smartlog,
@@ -34,6 +38,13 @@ export {
smarttime,
};
// Create a shared SmartFs instance for filesystem operations
const smartFsNodeProvider = new smartfs.SmartFsProviderNode();
export const fsInstance = new smartfs.SmartFs(smartFsNodeProvider);
// Create a shared SmartFileFactory for in-memory file operations
export const smartfileFactory = smartfile.SmartFileFactory.nodeFs();
// @git.zone scope
import * as tspublish from '@git.zone/tspublish';
@@ -41,6 +52,5 @@ export { tspublish };
// third party scope
import * as typedoc from 'typedoc';
import * as gptTokenizer from 'gpt-tokenizer';
export { typedoc, gptTokenizer };
export { typedoc };