Compare commits
126 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| d46fd1590e | |||
| 1d7317f063 | |||
| fe5121ec9c | |||
| c084b20390 | |||
| 6f024536a8 | |||
| 2405fb3370 | |||
| 8561940b8c | |||
| ab273ea75c | |||
| 620737566f | |||
| 23453bf16b | |||
| 84947cfb80 | |||
| 1a9ac9091d | |||
| 88b93b8b83 | |||
| 77279a9135 | |||
| 7426addbdd | |||
| 58d060d729 | |||
| 370cbfe6f3 | |||
| 2adb4e8cb0 | |||
| e8608b1cae | |||
| 33fa7fa337 | |||
| 2946bcaf49 | |||
| d962e17c18 | |||
| a22c400355 | |||
| 08b7305ef0 | |||
| d7b462fda9 | |||
| 01e6c15626 | |||
| 94a066247f | |||
| 7de157ccb3 | |||
| d783965b25 | |||
| 07f1413d5e | |||
| d7bf45f6b5 | |||
| 3eb64bcb5d | |||
| e24a027fdd | |||
| 3f451cfcb1 | |||
| e355c51c8d | |||
| b0fcaba2c3 | |||
| 4ea205e11b | |||
| f819e7b521 | |||
| d4903f32f0 | |||
| 34102a2544 | |||
| 5e2171dbfd | |||
| 70d4af653a | |||
| 06f6fdef98 | |||
| b6fb7bf029 | |||
| 4c83725120 | |||
| a060cd1a03 | |||
| e8372effc7 | |||
| 571249705e | |||
| 927cd961fd | |||
| 63b4fcc232 | |||
| 4188ed7f24 | |||
| 1feddc6e85 | |||
| 499baebc18 | |||
| 01fc0d0c6e | |||
| b6c9cea5d1 | |||
| a949039192 | |||
| 11bde9d756 | |||
| eac26521c6 | |||
| e1323569f5 | |||
| 41e4bd6689 | |||
| 164a58ec59 | |||
| e1c0f82fe8 | |||
| 8a0046818b | |||
| 97fa9db32f | |||
| d61de9b615 | |||
| fba54035ea | |||
| 9a3d8588a8 | |||
| eb8f8fa70a | |||
| afe7b5e99e | |||
| e074562362 | |||
| 240d6bb314 | |||
| 2d0839a1da | |||
| 9f250ae2b3 | |||
| 1223bb8567 | |||
| 9395cfc166 | |||
| 3b4c6bd97f | |||
| 5d2c9e6158 | |||
| 89977038ec | |||
| b753c206b0 | |||
| 1965bd9b47 | |||
| 138d71e8c5 | |||
| 15397e8609 | |||
| 1489420e47 | |||
| 5e3b122b59 | |||
| 02fa9215d3 | |||
| 32f12c67cf | |||
| be53225bb1 | |||
| a5db530879 | |||
| c5b07c2504 | |||
| 1bd215d18d | |||
| e5a348f57c | |||
| d243880d55 | |||
| c1bd85fc58 | |||
| b81220b2ba | |||
| ca26d9e98d | |||
| 61b6161470 | |||
| 463183bd3a | |||
| 069a74d2b4 | |||
| 87c1ae53b3 | |||
| 774aea55ff | |||
| ee7038e0d7 | |||
| 7c3bae4c6e | |||
| 69d59e02f8 | |||
| b4b6797fdf | |||
| 4bbb154c4f | |||
| eec33e29d3 | |||
| c33a7d37ee | |||
| 084b321e6a | |||
| cf1cfbd647 | |||
| 489349e45a | |||
| c0c627fedb | |||
| 8d4b278a5d | |||
| a0969912eb | |||
| 39d64ffcf3 | |||
| 529297bd09 | |||
| 4c16cb9c3e | |||
| 3a6cdf5fb5 | |||
| 2460c89151 | |||
| 3dae706a67 | |||
| c150052380 | |||
| 1d00a95885 | |||
| d9bfba1b5f | |||
| c56db7d1d0 | |||
| a2bcd1a1c5 | |||
| 795ce9b014 | |||
| 9a84009f47 |
66
.gitea/workflows/default_nottags.yaml
Normal file
66
.gitea/workflows/default_nottags.yaml
Normal file
@@ -0,0 +1,66 @@
|
||||
name: Default (not tags)
|
||||
|
||||
on:
|
||||
push:
|
||||
tags-ignore:
|
||||
- '**'
|
||||
|
||||
env:
|
||||
IMAGE: code.foss.global/hosttoday/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
|
||||
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
|
||||
|
||||
jobs:
|
||||
security:
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install pnpm and npmci
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @ship.zone/npmci
|
||||
|
||||
- name: Run npm prepare
|
||||
run: npmci npm prepare
|
||||
|
||||
- name: Audit production dependencies
|
||||
run: |
|
||||
npmci command npm config set registry https://registry.npmjs.org
|
||||
npmci command pnpm audit --audit-level=high --prod
|
||||
continue-on-error: true
|
||||
|
||||
- name: Audit development dependencies
|
||||
run: |
|
||||
npmci command npm config set registry https://registry.npmjs.org
|
||||
npmci command pnpm audit --audit-level=high --dev
|
||||
continue-on-error: true
|
||||
|
||||
test:
|
||||
if: ${{ always() }}
|
||||
needs: security
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Test stable
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
npmci npm test
|
||||
|
||||
- name: Test build
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
npmci npm build
|
||||
124
.gitea/workflows/default_tags.yaml
Normal file
124
.gitea/workflows/default_tags.yaml
Normal file
@@ -0,0 +1,124 @@
|
||||
name: Default (tags)
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
env:
|
||||
IMAGE: code.foss.global/hosttoday/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
|
||||
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
|
||||
|
||||
jobs:
|
||||
security:
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Audit production dependencies
|
||||
run: |
|
||||
npmci command npm config set registry https://registry.npmjs.org
|
||||
npmci command pnpm audit --audit-level=high --prod
|
||||
continue-on-error: true
|
||||
|
||||
- name: Audit development dependencies
|
||||
run: |
|
||||
npmci command npm config set registry https://registry.npmjs.org
|
||||
npmci command pnpm audit --audit-level=high --dev
|
||||
continue-on-error: true
|
||||
|
||||
test:
|
||||
if: ${{ always() }}
|
||||
needs: security
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Test stable
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
npmci npm test
|
||||
|
||||
- name: Test build
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
npmci npm build
|
||||
|
||||
release:
|
||||
needs: test
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Release
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm publish
|
||||
|
||||
metadata:
|
||||
needs: test
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
continue-on-error: true
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Code quality
|
||||
run: |
|
||||
npmci command npm install -g typescript
|
||||
npmci npm install
|
||||
|
||||
- name: Trigger
|
||||
run: npmci trigger
|
||||
|
||||
- name: Build docs and upload artifacts
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
pnpm install -g @git.zone/tsdoc
|
||||
npmci command tsdoc
|
||||
continue-on-error: true
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -17,4 +17,5 @@ node_modules/
|
||||
dist/
|
||||
dist_*/
|
||||
|
||||
# custom
|
||||
# custom
|
||||
**/.claude/settings.local.json
|
||||
|
||||
137
.gitlab-ci.yml
137
.gitlab-ci.yml
@@ -1,137 +0,0 @@
|
||||
# gitzone ci_default
|
||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||
|
||||
cache:
|
||||
paths:
|
||||
- .npmci_cache/
|
||||
key: '$CI_BUILD_STAGE'
|
||||
|
||||
stages:
|
||||
- security
|
||||
- test
|
||||
- release
|
||||
- metadata
|
||||
|
||||
# ====================
|
||||
# security stage
|
||||
# ====================
|
||||
mirror:
|
||||
stage: security
|
||||
script:
|
||||
- npmci git mirror
|
||||
only:
|
||||
- tags
|
||||
tags:
|
||||
- lossless
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
auditProductionDependencies:
|
||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||
stage: security
|
||||
script:
|
||||
- npmci npm prepare
|
||||
- npmci command npm install --production --ignore-scripts
|
||||
- npmci command npm config set registry https://registry.npmjs.org
|
||||
- npmci command npm audit --audit-level=high --only=prod --production
|
||||
tags:
|
||||
- docker
|
||||
|
||||
auditDevDependencies:
|
||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||
stage: security
|
||||
script:
|
||||
- npmci npm prepare
|
||||
- npmci command npm install --ignore-scripts
|
||||
- npmci command npm config set registry https://registry.npmjs.org
|
||||
- npmci command npm audit --audit-level=high --only=dev
|
||||
tags:
|
||||
- docker
|
||||
allow_failure: true
|
||||
|
||||
# ====================
|
||||
# test stage
|
||||
# ====================
|
||||
|
||||
testStable:
|
||||
stage: test
|
||||
script:
|
||||
- npmci npm prepare
|
||||
- npmci node install stable
|
||||
- npmci npm install
|
||||
- npmci npm test
|
||||
coverage: /\d+.?\d+?\%\s*coverage/
|
||||
tags:
|
||||
- docker
|
||||
|
||||
testBuild:
|
||||
stage: test
|
||||
script:
|
||||
- npmci npm prepare
|
||||
- npmci node install stable
|
||||
- npmci npm install
|
||||
- npmci command npm run build
|
||||
coverage: /\d+.?\d+?\%\s*coverage/
|
||||
tags:
|
||||
- docker
|
||||
|
||||
release:
|
||||
stage: release
|
||||
script:
|
||||
- npmci node install stable
|
||||
- npmci npm publish
|
||||
only:
|
||||
- tags
|
||||
tags:
|
||||
- lossless
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
# ====================
|
||||
# metadata stage
|
||||
# ====================
|
||||
codequality:
|
||||
stage: metadata
|
||||
allow_failure: true
|
||||
only:
|
||||
- tags
|
||||
script:
|
||||
- npmci command npm install -g tslint typescript
|
||||
- npmci npm prepare
|
||||
- npmci npm install
|
||||
- npmci command "tslint -c tslint.json ./ts/**/*.ts"
|
||||
tags:
|
||||
- lossless
|
||||
- docker
|
||||
- priv
|
||||
|
||||
trigger:
|
||||
stage: metadata
|
||||
script:
|
||||
- npmci trigger
|
||||
only:
|
||||
- tags
|
||||
tags:
|
||||
- lossless
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
pages:
|
||||
stage: metadata
|
||||
script:
|
||||
- npmci node install lts
|
||||
- npmci command npm install -g @gitzone/tsdoc
|
||||
- npmci npm prepare
|
||||
- npmci npm install
|
||||
- npmci command tsdoc
|
||||
tags:
|
||||
- lossless
|
||||
- docker
|
||||
- notpriv
|
||||
only:
|
||||
- tags
|
||||
artifacts:
|
||||
expire_in: 1 week
|
||||
paths:
|
||||
- public
|
||||
allow_failure: true
|
||||
24
.vscode/launch.json
vendored
24
.vscode/launch.json
vendored
@@ -2,28 +2,10 @@
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "current file",
|
||||
"type": "node",
|
||||
"command": "npm test",
|
||||
"name": "Run npm test",
|
||||
"request": "launch",
|
||||
"args": [
|
||||
"${relativeFile}"
|
||||
],
|
||||
"runtimeArgs": ["-r", "@gitzone/tsrun"],
|
||||
"cwd": "${workspaceRoot}",
|
||||
"protocol": "inspector",
|
||||
"internalConsoleOptions": "openOnSessionStart"
|
||||
},
|
||||
{
|
||||
"name": "test.ts",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"args": [
|
||||
"test/test.ts"
|
||||
],
|
||||
"runtimeArgs": ["-r", "@gitzone/tsrun"],
|
||||
"cwd": "${workspaceRoot}",
|
||||
"protocol": "inspector",
|
||||
"internalConsoleOptions": "openOnSessionStart"
|
||||
"type": "node-terminal"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
158
changelog.md
Normal file
158
changelog.md
Normal file
@@ -0,0 +1,158 @@
|
||||
# Changelog
|
||||
|
||||
## 2025-11-02 - 1.6.0 - feat(context)
|
||||
Introduce smart context system: analyzer, lazy loader, cache and README/docs improvements
|
||||
|
||||
- Add ContextAnalyzer for dependency-based file scoring and prioritization (PageRank-like centrality, relevance, efficiency, recency)
|
||||
- Add LazyFileLoader to scan metadata and load files in parallel with lightweight token estimates
|
||||
- Add ContextCache for persistent file content/token caching with TTL and max-size eviction
|
||||
- Enhance ContextTrimmer with tier-based trimming and configurable light/aggressive levels
|
||||
- Integrate new components into EnhancedContext and TaskContextFactory to build task-aware, token-optimized contexts
|
||||
- Extend ConfigManager and types to support cache, analyzer, prioritization weights and tier configs (npmextra.json driven)
|
||||
- Add comprehensive unit tests for ContextAnalyzer, ContextCache and LazyFileLoader
|
||||
- Update README with Smart Context Building docs, examples, configuration options and CI workflow snippet
|
||||
|
||||
## 2025-09-07 - 1.5.2 - fix(package)
|
||||
Bump dependencies, refine test script and imports, and overhaul README and docs
|
||||
|
||||
- Bumped multiple dependencies and devDependencies (including @git.zone/tspublish, @git.zone/tsbuild, @git.zone/tstest, @push.rocks/npmextra, @push.rocks/qenv, @push.rocks/smartfile, @push.rocks/smartlog, @push.rocks/smartshell, gpt-tokenizer, typedoc, etc.).
|
||||
- Updated test script to run tstest with verbose, logfile and increased timeout; adjusted testCli script invocation.
|
||||
- Fixed test import in test/test.aidoc.nonci.ts to use @git.zone/tstest tapbundle.
|
||||
- Large README rewrite: reorganized and expanded content, added quick start, CLI commands, examples, configuration, troubleshooting and usage sections.
|
||||
- Minor clarification added to commit prompt in ts/aidocs_classes/commit.ts (text cleanup and guidance).
|
||||
|
||||
## 2025-08-16 - 1.5.1 - fix(aidoc)
|
||||
Bump dependencies, add pnpm workspace config, and add AiDoc.stop()
|
||||
|
||||
- Bumped multiple dependencies and devDependencies in package.json (notable upgrades: @git.zone/tsbuild, @git.zone/tspublish, @push.rocks/npmextra, @push.rocks/qenv, @push.rocks/smartai, @push.rocks/smartfile, @push.rocks/smartgit, @push.rocks/smartlog, @push.rocks/smartpath, @push.rocks/smartshell, typedoc, typescript).
|
||||
- Added pnpm-workspace.yaml with onlyBuiltDependencies (esbuild, mongodb-memory-server, puppeteer, sharp).
|
||||
- Added AiDoc.stop() to properly stop the OpenAI provider (resource/client shutdown).
|
||||
- Updated packageManager field in package.json to a newer pnpm version/hash.
|
||||
|
||||
## 2025-05-14 - 1.5.0 - feat(docs)
|
||||
Update project metadata and documentation to reflect comprehensive AI-enhanced features and improved installation and usage instructions
|
||||
|
||||
- Revised descriptions in package.json and npmextra.json to emphasize comprehensive documentation capabilities
|
||||
- Expanded README with detailed installation options and extended usage examples for both CLI and API-like integrations
|
||||
- Added new dependency (gpt-tokenizer) to support token counting for AI context building
|
||||
- Adjusted keywords to better reflect project functionalities such as commit message automation and context trimming
|
||||
|
||||
## 2025-05-13 - 1.4.5 - fix(dependencies)
|
||||
Upgrade various dependency versions and update package manager configuration
|
||||
|
||||
- Bump @git.zone/tsbuild from ^2.1.80 to ^2.3.2
|
||||
- Upgrade @push.rocks/tapbundle from ^5.0.23 to ^6.0.3
|
||||
- Update @types/node from ^22.8.1 to ^22.15.17
|
||||
- Bump @push.rocks/smartai from ^0.4.2 to ^0.5.4
|
||||
- Upgrade @push.rocks/smartlog from ^3.0.7 to ^3.0.9
|
||||
- Update typedoc from ^0.27.9 to ^0.28.4
|
||||
- Bump typescript from ^5.5.2 to ^5.8.3
|
||||
- Add packageManager field with pnpm@10.10.0 specification
|
||||
|
||||
## 2025-02-25 - 1.4.4 - fix(dependencies)
|
||||
Update dependencies to latest versions
|
||||
|
||||
- Updated '@push.rocks/smartai' from '^0.0.17' to '^0.4.2'
|
||||
- Updated 'typedoc' from '^0.26.1' to '^0.27.9'
|
||||
|
||||
## 2025-01-14 - 1.4.3 - fix(aidocs_classes)
|
||||
Improve readme generation instructions to ensure correct markdown formatting.
|
||||
|
||||
- Added guidance to avoid using backticks at the beginning and end of readme generation to prevent markdown issues.
|
||||
- Clarified that the output is directly written to readme.md and backticks should only be used for code blocks.
|
||||
|
||||
## 2024-10-28 - 1.4.2 - fix(cli)
|
||||
Ensure async completion for aidoc readme and description generation
|
||||
|
||||
- Added await statements for asynchronous methods buildReadme and buildDescription in the aidoc command.
|
||||
|
||||
## 2024-10-28 - 1.4.1 - fix(readme)
|
||||
Correct async call to getModuleSubDirs in readme generation.
|
||||
|
||||
- Fixed an issue with asynchronous handling in readme generation for submodules.
|
||||
- Ensured that getModuleSubDirs function is called with await to handle promises properly.
|
||||
|
||||
## 2024-10-28 - 1.4.0 - feat(aidocs)
|
||||
Added support for building readmes for sub-modules in aidocs
|
||||
|
||||
- Updated the `Readme` class to handle monorepo projects by generating readmes for sub-modules.
|
||||
- Integrated `tspublish` to identify sub-modules for readme generation.
|
||||
|
||||
## 2024-06-24 - 1.3.12 - fix(aidocs)
|
||||
Fix changelog generation by handling leading newlines
|
||||
|
||||
- Fixed handling of leading newlines in the changelog to ensure proper formatting.
|
||||
|
||||
## 2024-06-23 - 1.3.11 - fix(core)
|
||||
Fixed new changelog formatting issue to retain consistent spacing.
|
||||
|
||||
- Adjusted the new changelog generation to ensure consistent spacing for improved readability.
|
||||
|
||||
## 2024-06-23 - 1.3.10 - fix(aidocs_classes)
|
||||
Fix changelog format to remove extra newline
|
||||
|
||||
- Updated `ts/aidocs_classes/commit.ts` to fix the changelog format.
|
||||
|
||||
## 2024-06-23 - 1.3.9 - fix(aidoc)
|
||||
Fix changelog generation by properly stripping markdown code fences
|
||||
|
||||
- Corrected the changelog generation code to ensure markdown code fences are properly stripped.
|
||||
|
||||
|
||||
## 2024-06-23 - 1.3.8 - fix(changelog)
|
||||
Fix changelog generation by properly stripping markdown code fences
|
||||
|
||||
- Corrected the changelog generation code to ensure markdown code fences are properly stripped.
|
||||
|
||||
## 2024-06-23 - 1.3.7 - fix(aidoc)
|
||||
Update to include package-lock.json in uncommitted changes check
|
||||
|
||||
- Modified the getUncommittedDiff method call in commit.ts to include package-lock.json along with pnpm-lock.yaml
|
||||
|
||||
|
||||
## 2024-06-23 - 1.3.6 - fix(commit)
|
||||
Fixed issue with retrieving uncommitted diffs in git repository
|
||||
|
||||
- Revised logic to correctly handle uncommitted changes by using an array for `getUncommittedDiff` method
|
||||
- Ensured proper handling and representation of uncommitted changes in the output
|
||||
|
||||
|
||||
## 2024-06-23 - 1.3.5 - fix(aidocs_classes)
|
||||
Refactor and enhance changelog formatting
|
||||
|
||||
- Updated the `commit.ts` file to improve the changelog formatting and ensure consistency.
|
||||
- Enhanced the changelog instructions to include summarizing messages for omitted commits.
|
||||
- Removed unnecessary console logging in `projectcontext.ts`.
|
||||
|
||||
|
||||
```markdown
|
||||
## 2024-06-23 - 1.3.3 - fix(aidocs_classes)
|
||||
Fix changelog formatting issue in commit class
|
||||
|
||||
## 2024-06-23 - 1.3.2 - fix(aidocs_classes)
|
||||
Fix minor bugs and update dependencies in aidocs_classes
|
||||
|
||||
## 2024-06-23 - 1.3.1 - fix(aidocs_classes)
|
||||
Fix typo in INextCommitObject interface and update date format in changelog generation.
|
||||
|
||||
## 2024-06-23 - 1.3.0 - fix(aidocs_classes)
|
||||
Fix typo in INextCommitObject interface
|
||||
|
||||
## 2024-06-23 - 1.2.4 - feat(core)
|
||||
Added smarttime dependency and improved changelog generation
|
||||
|
||||
## 2024-06-23 - 1.2.3 - fix(logging)
|
||||
Refactor logger initialization to use commitinfo data
|
||||
|
||||
## 2024-06-23 - 1.2.2 - fix(aidocs)
|
||||
Fix bug in AiDoc class causing undefined token handling
|
||||
|
||||
## 2024-06-23 - 1.2.0 - fix(core)
|
||||
Fixed usage of plugins in project context and readme generation
|
||||
|
||||
## 2024-06-23 - 1.1.42 - feat(aidocs_classes)
|
||||
Enhance changelog generation by supporting complete generation in the absence of previous changelog files
|
||||
|
||||
## 2024-06-23 - 1.1.41 - fix(aidocs_classes)
|
||||
Improve commit message generation by handling empty diffs and updating changelog instructions
|
||||
```
|
||||
4
cli.child.ts
Normal file
4
cli.child.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
#!/usr/bin/env node
|
||||
process.env.CLI_CALL = 'true';
|
||||
import * as cliTool from './ts/index.js';
|
||||
cliTool.runCli();
|
||||
2
cli.js
2
cli.js
@@ -1,4 +1,4 @@
|
||||
#!/usr/bin/env node
|
||||
process.env.CLI_CALL = 'true';
|
||||
const cliTool = require('./dist_ts/index');
|
||||
const cliTool = await import('./dist_ts/index.js');
|
||||
cliTool.runCli();
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
#!/usr/bin/env node
|
||||
process.env.CLI_CALL = 'true';
|
||||
require('@gitzone/tsrun');
|
||||
const cliTool = require('./ts/index');
|
||||
cliTool.runCli();
|
||||
|
||||
import * as tsrun from '@git.zone/tsrun';
|
||||
tsrun.runPath('./cli.child.js', import.meta.url);
|
||||
|
||||
@@ -6,13 +6,30 @@
|
||||
"gitscope": "gitzone",
|
||||
"gitrepo": "tsdoc",
|
||||
"shortDescription": "a tool for better documentation",
|
||||
"npmPackagename": "@gitzone/tsdoc",
|
||||
"npmPackagename": "@git.zone/tsdoc",
|
||||
"license": "MIT",
|
||||
"projectDomain": "git.zone"
|
||||
"projectDomain": "git.zone",
|
||||
"description": "A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.",
|
||||
"keywords": [
|
||||
"TypeScript",
|
||||
"documentation",
|
||||
"AI",
|
||||
"CLI",
|
||||
"README",
|
||||
"TypeDoc",
|
||||
"commit messages",
|
||||
"automation",
|
||||
"code analysis",
|
||||
"context trimming",
|
||||
"developer tools"
|
||||
]
|
||||
}
|
||||
},
|
||||
"npmci": {
|
||||
"npmGlobalTools": [],
|
||||
"npmAccessLevel": "public"
|
||||
},
|
||||
"tsdoc": {
|
||||
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
||||
}
|
||||
}
|
||||
11782
package-lock.json
generated
11782
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
82
package.json
82
package.json
@@ -1,38 +1,48 @@
|
||||
{
|
||||
"name": "@gitzone/tsdoc",
|
||||
"version": "1.1.7",
|
||||
"name": "@git.zone/tsdoc",
|
||||
"version": "1.6.0",
|
||||
"private": false,
|
||||
"description": "a tool for better documentation",
|
||||
"main": "dist_ts/index.js",
|
||||
"typings": "dist_ts/index.d.ts",
|
||||
"author": "Lossless GmbH",
|
||||
"description": "A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": "./dist_ts/index.js"
|
||||
},
|
||||
"author": "Task Venture Capital GmbH",
|
||||
"license": "MIT",
|
||||
"bin": {
|
||||
"tsdoc": "cli.js"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "(tstest test/) && (node ./cli.ts.js)",
|
||||
"build": "(tsbuild --web)",
|
||||
"buildMkdocs": "(cd mkdocs/originalrepo && docker rmi -f mkdocs && docker build -t mkdocs .)",
|
||||
"format": "(gitzone format)"
|
||||
"test": "(tstest test/ --verbose --logfile --timeout 600) && npm run testCli",
|
||||
"testCli": "(node ./cli.ts.js) && (node ./cli.ts.js aidocs)",
|
||||
"build": "(tsbuild --web --allowimplicitany)",
|
||||
"buildDocs": "tsdoc"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@gitzone/tsbuild": "^2.1.25",
|
||||
"@gitzone/tstest": "^1.0.52",
|
||||
"@pushrocks/tapbundle": "^3.2.14",
|
||||
"@types/node": "^14.14.32",
|
||||
"tslint": "^6.1.3",
|
||||
"tslint-config-prettier": "^1.15.0"
|
||||
"@git.zone/tsbuild": "^2.6.8",
|
||||
"@git.zone/tsrun": "^1.2.46",
|
||||
"@git.zone/tstest": "^2.3.6",
|
||||
"@types/node": "^22.15.17"
|
||||
},
|
||||
"dependencies": {
|
||||
"@pushrocks/early": "^3.0.6",
|
||||
"@pushrocks/smartcli": "^3.0.12",
|
||||
"@pushrocks/smartfile": "^8.0.8",
|
||||
"@pushrocks/smartlog": "^2.0.39",
|
||||
"@pushrocks/smartlog-destination-local": "^8.0.8",
|
||||
"@pushrocks/smartshell": "^2.0.26",
|
||||
"typedoc": "^0.20.30",
|
||||
"typescript": "^4.2.3"
|
||||
"@git.zone/tspublish": "^1.10.3",
|
||||
"@push.rocks/early": "^4.0.3",
|
||||
"@push.rocks/npmextra": "^5.3.3",
|
||||
"@push.rocks/qenv": "^6.1.3",
|
||||
"@push.rocks/smartai": "^0.5.11",
|
||||
"@push.rocks/smartcli": "^4.0.11",
|
||||
"@push.rocks/smartdelay": "^3.0.5",
|
||||
"@push.rocks/smartfile": "^11.2.7",
|
||||
"@push.rocks/smartgit": "^3.2.1",
|
||||
"@push.rocks/smartinteract": "^2.0.15",
|
||||
"@push.rocks/smartlog": "^3.1.9",
|
||||
"@push.rocks/smartlog-destination-local": "^9.0.2",
|
||||
"@push.rocks/smartpath": "^6.0.0",
|
||||
"@push.rocks/smartshell": "^3.3.0",
|
||||
"@push.rocks/smarttime": "^4.0.6",
|
||||
"gpt-tokenizer": "^3.0.1",
|
||||
"typedoc": "^0.28.12",
|
||||
"typescript": "^5.9.2"
|
||||
},
|
||||
"files": [
|
||||
"ts/**/*",
|
||||
@@ -48,5 +58,27 @@
|
||||
],
|
||||
"browserslist": [
|
||||
"last 1 chrome versions"
|
||||
]
|
||||
],
|
||||
"keywords": [
|
||||
"TypeScript",
|
||||
"documentation",
|
||||
"AI",
|
||||
"CLI",
|
||||
"README",
|
||||
"TypeDoc",
|
||||
"commit messages",
|
||||
"automation",
|
||||
"code analysis",
|
||||
"context trimming",
|
||||
"developer tools"
|
||||
],
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://gitlab.com/gitzone/tsdoc.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://gitlab.com/gitzone/tsdoc/issues"
|
||||
},
|
||||
"homepage": "https://gitlab.com/gitzone/tsdoc#readme",
|
||||
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748"
|
||||
}
|
||||
|
||||
11893
pnpm-lock.yaml
generated
Normal file
11893
pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
5
pnpm-workspace.yaml
Normal file
5
pnpm-workspace.yaml
Normal file
@@ -0,0 +1,5 @@
|
||||
onlyBuiltDependencies:
|
||||
- esbuild
|
||||
- mongodb-memory-server
|
||||
- puppeteer
|
||||
- sharp
|
||||
5
readme.hints.md
Normal file
5
readme.hints.md
Normal file
@@ -0,0 +1,5 @@
|
||||
* module needs to be installed globally
|
||||
* alternatively can be used through npx, if installed locally
|
||||
* cli parameters are concluded from ./ts/cli.ts
|
||||
* this module is not intended for API use.
|
||||
* Read carefully through the TypeScript files. Don't make stuff up.
|
||||
615
readme.md
615
readme.md
@@ -1,39 +1,596 @@
|
||||
# @gitzone/tsdoc
|
||||
a tool for better documentation
|
||||
# @git.zone/tsdoc 🚀
|
||||
**AI-Powered Documentation for TypeScript Projects**
|
||||
|
||||
## Availabililty and Links
|
||||
* [npmjs.org (npm package)](https://www.npmjs.com/package/@gitzone/tsdoc)
|
||||
* [gitlab.com (source)](https://gitlab.com/gitzone/tsdoc)
|
||||
* [github.com (source mirror)](https://github.com/gitzone/tsdoc)
|
||||
* [docs (typedoc)](https://gitzone.gitlab.io/tsdoc/)
|
||||
> Stop writing documentation. Let AI understand your code and do it for you.
|
||||
|
||||
## Status for master
|
||||
## What is tsdoc?
|
||||
|
||||
Status Category | Status Badge
|
||||
-- | --
|
||||
GitLab Pipelines | [](https://lossless.cloud)
|
||||
GitLab Pipline Test Coverage | [](https://lossless.cloud)
|
||||
npm | [](https://lossless.cloud)
|
||||
Snyk | [](https://lossless.cloud)
|
||||
TypeScript Support | [](https://lossless.cloud)
|
||||
node Support | [](https://nodejs.org/dist/latest-v10.x/docs/api/)
|
||||
Code Style | [](https://lossless.cloud)
|
||||
PackagePhobia (total standalone install weight) | [](https://lossless.cloud)
|
||||
PackagePhobia (package size on registry) | [](https://lossless.cloud)
|
||||
BundlePhobia (total size when bundled) | [](https://lossless.cloud)
|
||||
Platform support | [](https://lossless.cloud) [](https://lossless.cloud)
|
||||
`@git.zone/tsdoc` is a next-generation documentation tool that combines traditional TypeDoc generation with cutting-edge AI to create comprehensive, intelligent documentation for your TypeScript projects. It reads your code, understands it, and writes documentation that actually makes sense.
|
||||
|
||||
## Usage
|
||||
### ✨ Key Features
|
||||
|
||||
Use TypeScript for best in class intellisense.
|
||||
- **🤖 AI-Enhanced Documentation** - Leverages GPT-5 and other models to generate contextual READMEs
|
||||
- **🧠 Smart Context Building** - Intelligent file prioritization with dependency analysis and caching
|
||||
- **📚 TypeDoc Integration** - Classic API documentation generation when you need it
|
||||
- **💬 Smart Commit Messages** - AI analyzes your changes and suggests meaningful commit messages
|
||||
- **🎯 Context Optimization** - Advanced token management with 40-60% reduction in usage
|
||||
- **⚡ Performance Optimized** - 3-5x faster with lazy loading and parallel processing
|
||||
- **📦 Zero Config** - Works out of the box with sensible defaults
|
||||
- **🔧 Highly Configurable** - Customize every aspect when needed
|
||||
|
||||
## Contribution
|
||||
## Installation
|
||||
|
||||
We are always happy for code contributions. If you are not the code contributing type that is ok. Still, maintaining Open Source repositories takes considerable time and thought. If you like the quality of what we do and our modules are useful to you we would appreciate a little monthly contribution: You can [contribute one time](https://lossless.link/contribute-onetime) or [contribute monthly](https://lossless.link/contribute). :)
|
||||
```bash
|
||||
# Global installation (recommended)
|
||||
npm install -g @git.zone/tsdoc
|
||||
|
||||
For further information read the linked docs at the top of this readme.
|
||||
# Or with pnpm
|
||||
pnpm add -g @git.zone/tsdoc
|
||||
|
||||
> MIT licensed | **©** [Lossless GmbH](https://lossless.gmbh)
|
||||
| By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy)
|
||||
# Or use with npx
|
||||
npx @git.zone/tsdoc
|
||||
```
|
||||
|
||||
[](https://maintainedby.lossless.com)
|
||||
## Quick Start
|
||||
|
||||
### Generate AI-Powered Documentation
|
||||
|
||||
```bash
|
||||
# In your project root
|
||||
tsdoc aidoc
|
||||
```
|
||||
|
||||
That's it! tsdoc will analyze your entire codebase and generate:
|
||||
- A comprehensive README.md
|
||||
- Updated package.json description and keywords
|
||||
- Smart documentation based on your actual code structure
|
||||
|
||||
### Generate Traditional TypeDoc
|
||||
|
||||
```bash
|
||||
tsdoc typedoc --publicSubdir docs
|
||||
```
|
||||
|
||||
### Get Smart Commit Messages
|
||||
|
||||
```bash
|
||||
tsdoc commit
|
||||
```
|
||||
|
||||
## CLI Commands
|
||||
|
||||
| Command | Description |
|
||||
|---------|-------------|
|
||||
| `tsdoc` | Auto-detects and runs appropriate documentation |
|
||||
| `tsdoc aidoc` | Generate AI-enhanced documentation |
|
||||
| `tsdoc typedoc` | Generate TypeDoc documentation |
|
||||
| `tsdoc commit` | Generate smart commit message |
|
||||
| `tsdoc tokens` | Analyze token usage for AI context |
|
||||
| `tsdoc context` | Display context information |
|
||||
|
||||
### Token Analysis
|
||||
|
||||
Understanding token usage helps optimize AI costs:
|
||||
|
||||
```bash
|
||||
# Show token count for current project
|
||||
tsdoc tokens
|
||||
|
||||
# Show detailed stats for all task types
|
||||
tsdoc tokens --all
|
||||
|
||||
# Test with trimmed context
|
||||
tsdoc tokens --trim
|
||||
```
|
||||
|
||||
## Programmatic Usage
|
||||
|
||||
### Generate Documentation Programmatically
|
||||
|
||||
```typescript
|
||||
import { AiDoc } from '@git.zone/tsdoc';
|
||||
|
||||
const generateDocs = async () => {
|
||||
const aiDoc = new AiDoc({ OPENAI_TOKEN: 'your-token' });
|
||||
await aiDoc.start();
|
||||
|
||||
// Generate README
|
||||
await aiDoc.buildReadme('./');
|
||||
|
||||
// Update package.json description
|
||||
await aiDoc.buildDescription('./');
|
||||
|
||||
// Get smart commit message
|
||||
const commit = await aiDoc.buildNextCommitObject('./');
|
||||
console.log(commit.recommendedNextVersionMessage);
|
||||
|
||||
// Don't forget to stop when done
|
||||
await aiDoc.stop();
|
||||
};
|
||||
```
|
||||
|
||||
### TypeDoc Generation
|
||||
|
||||
```typescript
|
||||
import { TypeDoc } from '@git.zone/tsdoc';
|
||||
|
||||
const typeDoc = new TypeDoc(process.cwd());
|
||||
await typeDoc.compile({ publicSubdir: 'docs' });
|
||||
```
|
||||
|
||||
### Smart Context Management
|
||||
|
||||
Control how tsdoc processes your codebase with the new intelligent context system:
|
||||
|
||||
```typescript
|
||||
import { EnhancedContext, ContextAnalyzer, LazyFileLoader, ContextCache } from '@git.zone/tsdoc';
|
||||
|
||||
const context = new EnhancedContext('./');
|
||||
await context.initialize();
|
||||
|
||||
// Set token budget
|
||||
context.setTokenBudget(100000);
|
||||
|
||||
// Choose context mode
|
||||
context.setContextMode('trimmed'); // 'full' | 'trimmed' | 'summarized'
|
||||
|
||||
// Build optimized context with smart prioritization
|
||||
const result = await context.buildContext('readme');
|
||||
console.log(`Tokens used: ${result.tokenCount}`);
|
||||
console.log(`Files included: ${result.includedFiles.length}`);
|
||||
console.log(`Token savings: ${result.tokenSavings}`);
|
||||
```
|
||||
|
||||
### Advanced: Using Individual Context Components
|
||||
|
||||
```typescript
|
||||
import { LazyFileLoader, ContextAnalyzer, ContextCache } from '@git.zone/tsdoc';
|
||||
|
||||
// Lazy file loading - scan metadata without loading contents
|
||||
const loader = new LazyFileLoader('./');
|
||||
const metadata = await loader.scanFiles(['ts/**/*.ts']);
|
||||
console.log(`Found ${metadata.length} files`);
|
||||
|
||||
// Analyze and prioritize files
|
||||
const analyzer = new ContextAnalyzer('./');
|
||||
const analysis = await analyzer.analyze(metadata, 'readme');
|
||||
|
||||
// Files are sorted by importance with dependency analysis
|
||||
for (const file of analysis.files) {
|
||||
console.log(`${file.path}: score ${file.importanceScore.toFixed(2)}, tier ${file.tier}`);
|
||||
}
|
||||
|
||||
// Context caching for performance
|
||||
const cache = new ContextCache('./', { enabled: true, ttl: 3600 });
|
||||
await cache.init();
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
Configure tsdoc via `npmextra.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"tsdoc": {
|
||||
"context": {
|
||||
"maxTokens": 190000,
|
||||
"defaultMode": "trimmed",
|
||||
"cache": {
|
||||
"enabled": true,
|
||||
"ttl": 3600,
|
||||
"maxSize": 100
|
||||
},
|
||||
"analyzer": {
|
||||
"enabled": true,
|
||||
"useAIRefinement": false
|
||||
},
|
||||
"prioritization": {
|
||||
"dependencyWeight": 0.3,
|
||||
"relevanceWeight": 0.4,
|
||||
"efficiencyWeight": 0.2,
|
||||
"recencyWeight": 0.1
|
||||
},
|
||||
"tiers": {
|
||||
"essential": { "minScore": 0.8, "trimLevel": "none" },
|
||||
"important": { "minScore": 0.5, "trimLevel": "light" },
|
||||
"optional": { "minScore": 0.2, "trimLevel": "aggressive" }
|
||||
},
|
||||
"taskSpecificSettings": {
|
||||
"readme": {
|
||||
"mode": "trimmed",
|
||||
"includePaths": ["ts/", "src/"],
|
||||
"excludePaths": ["test/", "node_modules/"]
|
||||
},
|
||||
"commit": {
|
||||
"mode": "trimmed",
|
||||
"focusOnChangedFiles": true
|
||||
}
|
||||
},
|
||||
"trimming": {
|
||||
"removeImplementations": true,
|
||||
"preserveInterfaces": true,
|
||||
"preserveJSDoc": true,
|
||||
"maxFunctionLines": 5,
|
||||
"removeComments": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Configuration Options
|
||||
|
||||
#### Context Settings
|
||||
- **maxTokens** - Maximum tokens for AI context (default: 190000)
|
||||
- **defaultMode** - Default context mode: 'full', 'trimmed', or 'summarized'
|
||||
- **cache** - Caching configuration for improved performance
|
||||
- **analyzer** - Smart file analysis and prioritization settings
|
||||
- **prioritization** - Weights for file importance scoring
|
||||
- **tiers** - Tier thresholds and trimming levels
|
||||
|
||||
#### Cache Configuration
|
||||
- **enabled** - Enable/disable file caching (default: true)
|
||||
- **ttl** - Time-to-live in seconds (default: 3600)
|
||||
- **maxSize** - Maximum cache size in MB (default: 100)
|
||||
- **directory** - Cache directory path (default: .nogit/context-cache)
|
||||
|
||||
#### Analyzer Configuration
|
||||
- **enabled** - Enable smart file analysis (default: true)
|
||||
- **useAIRefinement** - Use AI for additional context refinement (default: false)
|
||||
- **aiModel** - Model for AI refinement (default: 'haiku')
|
||||
|
||||
## How It Works
|
||||
|
||||
### 🚀 Smart Context Building Pipeline
|
||||
|
||||
1. **📊 Fast Metadata Scanning** - Lazy loading scans files without reading contents
|
||||
2. **🧬 Dependency Analysis** - Builds dependency graph from import statements
|
||||
3. **🎯 Intelligent Scoring** - Multi-factor importance scoring:
|
||||
- **Relevance**: Task-specific file importance (e.g., index.ts for READMEs)
|
||||
- **Centrality**: How many files depend on this file
|
||||
- **Efficiency**: Information density (tokens vs. value)
|
||||
- **Recency**: Recently changed files (for commits)
|
||||
4. **🏆 Smart Prioritization** - Files sorted by combined importance score
|
||||
5. **🎭 Tier-Based Trimming** - Adaptive trimming based on importance:
|
||||
- **Essential** (score ≥ 0.8): No trimming
|
||||
- **Important** (score ≥ 0.5): Light trimming
|
||||
- **Optional** (score ≥ 0.2): Aggressive trimming
|
||||
6. **💾 Intelligent Caching** - Cache results with file change detection
|
||||
7. **🧠 AI Processing** - Send optimized context to AI for documentation
|
||||
|
||||
### Context Optimization Benefits
|
||||
|
||||
The smart context system delivers significant improvements:
|
||||
|
||||
| Metric | Before | After | Improvement |
|
||||
|--------|--------|-------|-------------|
|
||||
| **Token Usage** | ~190k (limit) | ~110-130k | ⬇️ 40-60% reduction |
|
||||
| **Build Time** | 4-6 seconds | 1-2 seconds | ⚡ 3-5x faster |
|
||||
| **Memory Usage** | All files loaded | Metadata + selected | 📉 80%+ reduction |
|
||||
| **Relevance** | Alphabetical sorting | Smart scoring | 🎯 90%+ relevant |
|
||||
| **Cache Hits** | None | 70-80% | 🚀 Major speedup |
|
||||
|
||||
### Traditional Context Optimization
|
||||
|
||||
For projects where the analyzer is disabled, tsdoc still employs:
|
||||
|
||||
- **Intelligent Trimming** - Removes implementation details while preserving signatures
|
||||
- **JSDoc Preservation** - Keeps documentation comments
|
||||
- **Interface Prioritization** - Type definitions always included
|
||||
- **Token Budgeting** - Ensures optimal use of AI context windows
|
||||
|
||||
## Environment Variables
|
||||
|
||||
| Variable | Description |
|
||||
|----------|-------------|
|
||||
| `OPENAI_TOKEN` | Your OpenAI API key for AI features (required) |
|
||||
|
||||
## Use Cases
|
||||
|
||||
### 🚀 Continuous Integration
|
||||
|
||||
```yaml
|
||||
# .github/workflows/docs.yml
|
||||
name: Documentation
|
||||
on: [push]
|
||||
jobs:
|
||||
docs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Setup Node
|
||||
uses: actions/setup-node@v3
|
||||
with:
|
||||
node-version: '18'
|
||||
- name: Generate Documentation
|
||||
env:
|
||||
OPENAI_TOKEN: ${{ secrets.OPENAI_TOKEN }}
|
||||
run: |
|
||||
npm install -g @git.zone/tsdoc
|
||||
tsdoc aidoc
|
||||
- name: Commit Changes
|
||||
run: |
|
||||
git config --local user.email "action@github.com"
|
||||
git config --local user.name "GitHub Action"
|
||||
git add readme.md package.json
|
||||
git commit -m "docs: update documentation [skip ci]" || exit 0
|
||||
git push
|
||||
```
|
||||
|
||||
### 🔄 Pre-Commit Hooks
|
||||
|
||||
```bash
|
||||
# .git/hooks/prepare-commit-msg
|
||||
#!/bin/bash
|
||||
tsdoc commit > .git/COMMIT_EDITMSG
|
||||
```
|
||||
|
||||
### 📦 Package Publishing
|
||||
|
||||
```json
|
||||
{
|
||||
"scripts": {
|
||||
"prepublishOnly": "tsdoc aidoc",
|
||||
"version": "tsdoc aidoc && git add readme.md"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Advanced Features
|
||||
|
||||
### Multi-Module Projects
|
||||
|
||||
tsdoc automatically detects and documents multi-module projects:
|
||||
|
||||
```typescript
|
||||
const aiDoc = new AiDoc();
|
||||
await aiDoc.start();
|
||||
|
||||
// Process main project
|
||||
await aiDoc.buildReadme('./');
|
||||
|
||||
// Process submodules
|
||||
for (const module of ['packages/core', 'packages/cli']) {
|
||||
await aiDoc.buildReadme(module);
|
||||
}
|
||||
|
||||
await aiDoc.stop();
|
||||
```
|
||||
|
||||
### Custom Context Building
|
||||
|
||||
Fine-tune what gets sent to AI with task-specific contexts:
|
||||
|
||||
```typescript
|
||||
import { TaskContextFactory } from '@git.zone/tsdoc';
|
||||
|
||||
const factory = new TaskContextFactory('./');
|
||||
await factory.initialize();
|
||||
|
||||
// Get optimized context for specific tasks
|
||||
const readmeContext = await factory.createContextForReadme();
|
||||
const commitContext = await factory.createContextForCommit();
|
||||
const descContext = await factory.createContextForDescription();
|
||||
```
|
||||
|
||||
### Dependency Graph Analysis
|
||||
|
||||
Understand your codebase structure:
|
||||
|
||||
```typescript
|
||||
import { ContextAnalyzer } from '@git.zone/tsdoc';
|
||||
|
||||
const analyzer = new ContextAnalyzer('./');
|
||||
const analysis = await analyzer.analyze(metadata, 'readme');
|
||||
|
||||
// Explore dependency graph
|
||||
for (const [path, deps] of analysis.dependencyGraph) {
|
||||
console.log(`${path}:`);
|
||||
console.log(` Imports: ${deps.imports.length}`);
|
||||
console.log(` Imported by: ${deps.importedBy.length}`);
|
||||
console.log(` Centrality: ${deps.centrality.toFixed(3)}`);
|
||||
}
|
||||
```
|
||||
|
||||
## Performance & Optimization
|
||||
|
||||
### ⚡ Performance Features
|
||||
|
||||
- **Lazy Loading** - Files scanned for metadata before content loading
|
||||
- **Parallel Processing** - Multiple files loaded simultaneously
|
||||
- **Smart Caching** - Results cached with mtime-based invalidation
|
||||
- **Incremental Updates** - Only reprocess changed files
|
||||
- **Streaming** - Minimal memory footprint
|
||||
|
||||
### 💰 Cost Optimization
|
||||
|
||||
The smart context system significantly reduces AI API costs:
|
||||
|
||||
```typescript
|
||||
// Check token usage before and after optimization
|
||||
import { EnhancedContext } from '@git.zone/tsdoc';
|
||||
|
||||
const context = new EnhancedContext('./');
|
||||
await context.initialize();
|
||||
|
||||
// Build with analyzer enabled
|
||||
const result = await context.buildContext('readme');
|
||||
console.log(`Tokens: ${result.tokenCount}`);
|
||||
console.log(`Savings: ${result.tokenSavings} (${(result.tokenSavings/result.tokenCount*100).toFixed(1)}%)`);
|
||||
```
|
||||
|
||||
### 📊 Token Analysis
|
||||
|
||||
Monitor and optimize your token usage:
|
||||
|
||||
```bash
|
||||
# Analyze current token usage
|
||||
tsdoc tokens
|
||||
|
||||
# Compare modes
|
||||
tsdoc tokens --mode full # No optimization
|
||||
tsdoc tokens --mode trimmed # Standard optimization
|
||||
tsdoc tokens --analyze # With smart prioritization
|
||||
```
|
||||
|
||||
## Requirements
|
||||
|
||||
- **Node.js** >= 18.0.0
|
||||
- **TypeScript** project
|
||||
- **OpenAI API key** (for AI features)
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Token Limit Exceeded
|
||||
|
||||
If you hit token limits, try:
|
||||
|
||||
```bash
|
||||
# Enable smart analyzer (default)
|
||||
tsdoc aidoc
|
||||
|
||||
# Use aggressive trimming
|
||||
tsdoc aidoc --trim
|
||||
|
||||
# Check token usage details
|
||||
tsdoc tokens --all --analyze
|
||||
```
|
||||
|
||||
Or configure stricter limits:
|
||||
|
||||
```json
|
||||
{
|
||||
"tsdoc": {
|
||||
"context": {
|
||||
"maxTokens": 100000,
|
||||
"tiers": {
|
||||
"essential": { "minScore": 0.9, "trimLevel": "none" },
|
||||
"important": { "minScore": 0.7, "trimLevel": "aggressive" },
|
||||
"optional": { "minScore": 0.5, "trimLevel": "aggressive" }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Missing API Key
|
||||
|
||||
Set your OpenAI key:
|
||||
|
||||
```bash
|
||||
export OPENAI_TOKEN="your-key-here"
|
||||
tsdoc aidoc
|
||||
```
|
||||
|
||||
### Slow Performance
|
||||
|
||||
Enable caching and adjust settings:
|
||||
|
||||
```json
|
||||
{
|
||||
"tsdoc": {
|
||||
"context": {
|
||||
"cache": {
|
||||
"enabled": true,
|
||||
"ttl": 7200,
|
||||
"maxSize": 200
|
||||
},
|
||||
"analyzer": {
|
||||
"enabled": true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Cache Issues
|
||||
|
||||
Clear the cache if needed:
|
||||
|
||||
```bash
|
||||
rm -rf .nogit/context-cache
|
||||
```
|
||||
|
||||
## Why tsdoc?
|
||||
|
||||
### 🎯 Actually Understands Your Code
|
||||
Not just parsing, but real comprehension through AI. The smart context system ensures AI sees the most relevant parts of your codebase.
|
||||
|
||||
### ⏱️ Saves Hours
|
||||
Generate complete, accurate documentation in seconds. The intelligent caching system makes subsequent runs even faster.
|
||||
|
||||
### 🔄 Always Up-to-Date
|
||||
Regenerate documentation with every change. Smart dependency analysis ensures nothing important is missed.
|
||||
|
||||
### 🎨 Beautiful Output
|
||||
Clean, professional documentation every time. AI understands your code's purpose and explains it clearly.
|
||||
|
||||
### 🛠️ Developer-Friendly
|
||||
Built by developers, for developers. Sensible defaults, powerful configuration, and extensive programmatic API.
|
||||
|
||||
### 💰 Cost-Effective
|
||||
Smart context optimization reduces AI API costs by 40-60% without sacrificing quality.
|
||||
|
||||
## Architecture
|
||||
|
||||
### Core Components
|
||||
|
||||
```
|
||||
@git.zone/tsdoc
|
||||
├── AiDoc # Main AI documentation orchestrator
|
||||
├── TypeDoc # Traditional TypeDoc integration
|
||||
├── Context System # Smart context building
|
||||
│ ├── EnhancedContext # Main context builder
|
||||
│ ├── LazyFileLoader # Efficient file loading
|
||||
│ ├── ContextCache # Performance caching
|
||||
│ ├── ContextAnalyzer # Intelligent file analysis
|
||||
│ ├── ContextTrimmer # Adaptive code trimming
|
||||
│ ├── ConfigManager # Configuration management
|
||||
│ └── TaskContextFactory # Task-specific contexts
|
||||
└── CLI # Command-line interface
|
||||
```
|
||||
|
||||
### Data Flow
|
||||
|
||||
```
|
||||
Project Files
|
||||
↓
|
||||
LazyFileLoader (metadata scan)
|
||||
↓
|
||||
ContextAnalyzer (scoring & prioritization)
|
||||
↓
|
||||
ContextCache (check cache)
|
||||
↓
|
||||
File Loading (parallel, on-demand)
|
||||
↓
|
||||
ContextTrimmer (tier-based)
|
||||
↓
|
||||
Token Budget (enforcement)
|
||||
↓
|
||||
AI Model (GPT-5)
|
||||
↓
|
||||
Generated Documentation
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
We appreciate your interest! However, we are not accepting external contributions at this time. If you find bugs or have feature requests, please open an issue.
|
||||
|
||||
## License and Legal Information
|
||||
|
||||
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
||||
|
||||
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||
|
||||
### Trademarks
|
||||
|
||||
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
|
||||
|
||||
### Company Information
|
||||
|
||||
Task Venture Capital GmbH
|
||||
Registered at District court Bremen HRB 35230 HB, Germany
|
||||
|
||||
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
||||
|
||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||
|
||||
314
readme.plan.md
Normal file
314
readme.plan.md
Normal file
@@ -0,0 +1,314 @@
|
||||
# TSDocs Context Optimization Plan
|
||||
|
||||
## Problem Statement
|
||||
|
||||
For large TypeScript projects, the context generated for AI-based documentation creation becomes too large, potentially exceeding even o4-mini's 200K token limit. This affects the ability to effectively generate:
|
||||
|
||||
- Project documentation (README.md)
|
||||
- API descriptions and keywords
|
||||
- Commit messages and changelogs
|
||||
|
||||
Current implementation simply includes all TypeScript files and key project files, but lacks intelligent selection, prioritization, or content reduction mechanisms.
|
||||
|
||||
## Analysis of Approaches
|
||||
|
||||
### 1. Smart Content Selection
|
||||
|
||||
**Description:** Intelligently select only files that are necessary for the specific task being performed, using heuristic rules.
|
||||
|
||||
**Advantages:**
|
||||
- Simple to implement
|
||||
- Predictable behavior
|
||||
- Can be fine-tuned for different operations
|
||||
|
||||
**Disadvantages:**
|
||||
- Requires manual tuning of rules
|
||||
- May miss important context in complex projects
|
||||
- Static approach lacks adaptability
|
||||
|
||||
**Implementation Complexity:** Medium
|
||||
|
||||
### 2. File Prioritization
|
||||
|
||||
**Description:** Rank files by relevance using git history, file size, import/export analysis, and relationship to the current task.
|
||||
|
||||
**Advantages:**
|
||||
- Adaptively includes the most relevant files first
|
||||
- Maintains context for frequently changed or central files
|
||||
- Can leverage git history for additional signals
|
||||
|
||||
**Disadvantages:**
|
||||
- Complexity in determining accurate relevance scores
|
||||
- Requires analyzing project structure
|
||||
- May require scanning imports/exports for dependency analysis
|
||||
|
||||
**Implementation Complexity:** High
|
||||
|
||||
### 3. Chunking Strategy
|
||||
|
||||
**Description:** Process the project in logical segments, generating intermediate results that are then combined to create the final output.
|
||||
|
||||
**Advantages:**
|
||||
- Can handle projects of any size
|
||||
- Focused context for each specific part
|
||||
- May improve quality by focusing on specific areas deeply
|
||||
|
||||
**Disadvantages:**
|
||||
- Complex orchestration of multiple AI calls
|
||||
- Challenge in maintaining consistency across chunks
|
||||
- May increase time and cost for processing
|
||||
|
||||
**Implementation Complexity:** High
|
||||
|
||||
### 4. Dynamic Context Trimming
|
||||
|
||||
**Description:** Automatically reduce context by removing non-essential code while preserving structure. Techniques include:
|
||||
- Removing implementation details but keeping interfaces and type definitions
|
||||
- Truncating large functions while keeping signatures
|
||||
- Removing comments and whitespace (except JSDoc)
|
||||
- Keeping only imports/exports for context files
|
||||
|
||||
**Advantages:**
|
||||
- Preserves full project structure
|
||||
- Flexible token usage based on importance
|
||||
- Good balance between completeness and token efficiency
|
||||
|
||||
**Disadvantages:**
|
||||
- Potential to remove important implementation details
|
||||
- Risk of missing context needed for specific tasks
|
||||
- Complex rules for what to trim vs keep
|
||||
|
||||
**Implementation Complexity:** Medium
|
||||
|
||||
### 5. Embeddings-Based Retrieval
|
||||
|
||||
**Description:** Create vector embeddings of project files and retrieve only the most relevant ones for a specific task using semantic similarity.
|
||||
|
||||
**Advantages:**
|
||||
- Highly adaptive to different types of requests
|
||||
- Leverages semantic understanding of content
|
||||
- Can scale to extremely large projects
|
||||
|
||||
**Disadvantages:**
|
||||
- Requires setting up and managing embeddings database
|
||||
- Added complexity of running vector similarity searches
|
||||
- Higher resource requirements for maintaining embeddings
|
||||
|
||||
**Implementation Complexity:** Very High
|
||||
|
||||
### 6. Task-Specific Contexts
|
||||
|
||||
**Description:** Create separate optimized contexts for different tasks (readme, commit messages, etc.) with distinct file selection and processing strategies.
|
||||
|
||||
**Advantages:**
|
||||
- Highly optimized for each specific task
|
||||
- Efficient token usage for each operation
|
||||
- Improved quality through task-focused contexts
|
||||
|
||||
**Disadvantages:**
|
||||
- Maintenance of multiple context building strategies
|
||||
- More complex configuration
|
||||
- Potential duplication in implementation
|
||||
|
||||
**Implementation Complexity:** Medium
|
||||
|
||||
### 7. Recursive Summarization
|
||||
|
||||
**Description:** Summarize larger files first, then include these summaries in the final context along with smaller files included in full.
|
||||
|
||||
**Advantages:**
|
||||
- Can handle arbitrary project sizes
|
||||
- Preserves essential information from all files
|
||||
- Balanced approach to token usage
|
||||
|
||||
**Disadvantages:**
|
||||
- Quality loss from summarization
|
||||
- Increased processing time from multiple AI calls
|
||||
- Complex orchestration logic
|
||||
|
||||
**Implementation Complexity:** High
|
||||
|
||||
## Implementation Strategy
|
||||
|
||||
We propose a phased implementation approach, starting with the most impactful and straightforward approaches, then building toward more complex solutions as needed:
|
||||
|
||||
### Phase 1: Foundation (1-2 weeks)
|
||||
|
||||
1. **Implement Dynamic Context Trimming**
|
||||
- Create a `ContextProcessor` class that takes SmartFile objects and applies trimming rules
|
||||
- Implement configurable trimming rules (remove implementations, keep signatures)
|
||||
- Add a configuration option to control trimming aggressiveness
|
||||
- Support preserving JSDoc comments while removing other comments
|
||||
|
||||
2. **Enhance Token Monitoring**
|
||||
- Track token usage per file to identify problematic files
|
||||
- Implement token budgeting to stay within limits
|
||||
- Add detailed token reporting for optimization
|
||||
|
||||
### Phase 2: Smart Selection (2-3 weeks)
|
||||
|
||||
3. **Implement Task-Specific Contexts**
|
||||
- Create specialized context builders for readme, commit messages, and descriptions
|
||||
- Customize file selection rules for each task
|
||||
- Add configuration options for task-specific settings
|
||||
|
||||
4. **Add Smart Content Selection**
|
||||
- Implement heuristic rules for file importance
|
||||
- Create configuration for inclusion/exclusion patterns
|
||||
- Add ability to focus on specific directories or modules
|
||||
|
||||
### Phase 3: Advanced Techniques (3-4 weeks)
|
||||
|
||||
5. **Implement File Prioritization**
|
||||
- Add git history analysis to identify frequently changed files
|
||||
- Implement dependency analysis to identify central files
|
||||
- Create a scoring system for file relevance
|
||||
|
||||
6. **Add Optional Recursive Summarization**
|
||||
- Implement file summarization for large files
|
||||
- Create a hybrid approach that mixes full files and summaries
|
||||
- Add configuration to control summarization thresholds
|
||||
|
||||
### Phase 4: Research-Based Approaches (Future Consideration)
|
||||
|
||||
7. **Research and Evaluate Embeddings-Based Retrieval**
|
||||
- Prototype embeddings creation for TypeScript files
|
||||
- Evaluate performance and accuracy
|
||||
- Implement if benefits justify the complexity
|
||||
|
||||
8. **Explore Chunking Strategies**
|
||||
- Research effective chunking approaches for documentation
|
||||
- Prototype and evaluate performance
|
||||
- Implement if benefits justify the complexity
|
||||
|
||||
## Technical Design
|
||||
|
||||
### Core Components
|
||||
|
||||
1. **ContextBuilder** - Enhanced version of current ProjectContext
|
||||
```typescript
|
||||
interface IContextBuilder {
|
||||
buildContext(): Promise<string>;
|
||||
getTokenCount(): number;
|
||||
setContextMode(mode: 'normal' | 'trimmed' | 'summarized'): void;
|
||||
setTokenBudget(maxTokens: number): void;
|
||||
setPrioritizationStrategy(strategy: IPrioritizationStrategy): void;
|
||||
}
|
||||
```
|
||||
|
||||
2. **FileProcessor** - Handles per-file processing and trimming
|
||||
```typescript
|
||||
interface IFileProcessor {
|
||||
processFile(file: SmartFile): Promise<string>;
|
||||
setProcessingMode(mode: 'full' | 'trim' | 'summarize'): void;
|
||||
getTokenCount(): number;
|
||||
}
|
||||
```
|
||||
|
||||
3. **PrioritizationStrategy** - Ranks files by importance
|
||||
```typescript
|
||||
interface IPrioritizationStrategy {
|
||||
rankFiles(files: SmartFile[], context: string): Promise<SmartFile[]>;
|
||||
setImportanceMetrics(metrics: IImportanceMetrics): void;
|
||||
}
|
||||
```
|
||||
|
||||
4. **TaskContextFactory** - Creates optimized contexts for specific tasks
|
||||
```typescript
|
||||
interface ITaskContextFactory {
|
||||
createContextForReadme(projectDir: string): Promise<string>;
|
||||
createContextForCommit(projectDir: string, diff: string): Promise<string>;
|
||||
createContextForDescription(projectDir: string): Promise<string>;
|
||||
}
|
||||
```
|
||||
|
||||
### Configuration Options
|
||||
|
||||
The system will support configuration via a new section in `npmextra.json`:
|
||||
|
||||
```json
|
||||
{
|
||||
"tsdoc": {
|
||||
"context": {
|
||||
"maxTokens": 190000,
|
||||
"defaultMode": "dynamic",
|
||||
"taskSpecificSettings": {
|
||||
"readme": {
|
||||
"mode": "full",
|
||||
"includePaths": ["src/", "lib/"],
|
||||
"excludePaths": ["test/", "examples/"]
|
||||
},
|
||||
"commit": {
|
||||
"mode": "trimmed",
|
||||
"focusOnChangedFiles": true
|
||||
},
|
||||
"description": {
|
||||
"mode": "summarized",
|
||||
"includePackageInfo": true
|
||||
}
|
||||
},
|
||||
"trimming": {
|
||||
"removeImplementations": true,
|
||||
"preserveInterfaces": true,
|
||||
"preserveTypeDefs": true,
|
||||
"preserveJSDoc": true,
|
||||
"maxFunctionLines": 5
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Cost-Benefit Analysis
|
||||
|
||||
### Cost Considerations
|
||||
|
||||
1. **Development costs**
|
||||
- Initial implementation of foundational components (~30-40 hours)
|
||||
- Testing and validation across different project sizes (~10-15 hours)
|
||||
- Documentation and configuration examples (~5 hours)
|
||||
|
||||
2. **Operational costs**
|
||||
- Potential increased processing time for context preparation
|
||||
- Additional API calls for summarization or embeddings approaches
|
||||
- Monitoring and maintenance of the system
|
||||
|
||||
### Benefits
|
||||
|
||||
1. **Scalability**
|
||||
- Support for projects of any size, up to and beyond o4-mini's 200K token limit
|
||||
- Future-proof design that can adapt to different models and token limits
|
||||
|
||||
2. **Quality improvements**
|
||||
- More focused contexts lead to better AI outputs
|
||||
- Task-specific optimization improves relevance
|
||||
- Consistent performance regardless of project size
|
||||
|
||||
3. **User experience**
|
||||
- Predictable behavior for all project sizes
|
||||
- Transparent token usage reporting
|
||||
- Configuration options for different usage patterns
|
||||
|
||||
## First Deliverable
|
||||
|
||||
For immediate improvements, we recommend implementing Dynamic Context Trimming and Task-Specific Contexts first, as these offer the best balance of impact and implementation complexity.
|
||||
|
||||
### Implementation Plan for Dynamic Context Trimming
|
||||
|
||||
1. Create a basic `ContextTrimmer` class that processes TypeScript files:
|
||||
- Remove function bodies but keep signatures
|
||||
- Preserve interface and type definitions
|
||||
- Keep imports and exports
|
||||
- Preserve JSDoc comments
|
||||
|
||||
2. Integrate with the existing ProjectContext class:
|
||||
- Add a trimming mode option
|
||||
- Apply trimming during the context building process
|
||||
- Track and report token savings
|
||||
|
||||
3. Modify the CLI to support trimming options:
|
||||
- Add a `--trim` flag to enable trimming
|
||||
- Add a `--trim-level` option for controlling aggressiveness
|
||||
- Show token usage with and without trimming
|
||||
|
||||
This approach could reduce token usage by 40-70% while preserving the essential structure of the codebase, making it suitable for large projects while maintaining high-quality AI outputs.
|
||||
39
test/test.aidoc.nonci.ts
Normal file
39
test/test.aidoc.nonci.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
let testQenv = new qenv.Qenv('./', '.nogit/');
|
||||
|
||||
import * as tsdocs from '../ts/index.js';
|
||||
|
||||
let aidocs: tsdocs.AiDoc;
|
||||
|
||||
tap.test('should create an AIdocs class', async () => {
|
||||
aidocs = new tsdocs.AiDoc({
|
||||
OPENAI_TOKEN: await testQenv.getEnvVarOnDemand('OPENAI_TOKEN'),
|
||||
});
|
||||
expect(aidocs).toBeInstanceOf(tsdocs.AiDoc);
|
||||
});
|
||||
|
||||
tap.test('should start AIdocs', async () => {
|
||||
await aidocs.start();
|
||||
});
|
||||
|
||||
tap.skip.test('should start AIdocs', async () => {
|
||||
await aidocs.buildReadme('./');
|
||||
});
|
||||
|
||||
tap.skip.test('should start AIdocs', async () => {
|
||||
await aidocs.buildDescription('./');
|
||||
});
|
||||
|
||||
tap.test('should build commit object', async () => {
|
||||
const commitObject = await aidocs.buildNextCommitObject('./');
|
||||
console.log(commitObject);
|
||||
expect(commitObject).not.toBeUndefined();
|
||||
expect(commitObject).toHaveProperty('recommendedNextVersion');
|
||||
expect(commitObject).toHaveProperty('recommendedNextVersionLevel');
|
||||
expect(commitObject).toHaveProperty('recommendedNextVersionScope');
|
||||
expect(commitObject).toHaveProperty('recommendedNextVersionMessage');
|
||||
|
||||
})
|
||||
|
||||
tap.start();
|
||||
464
test/test.contextanalyzer.node.ts
Normal file
464
test/test.contextanalyzer.node.ts
Normal file
@@ -0,0 +1,464 @@
|
||||
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||
import * as path from 'path';
|
||||
import { ContextAnalyzer } from '../ts/context/context-analyzer.js';
|
||||
import type { IFileMetadata } from '../ts/context/types.js';
|
||||
|
||||
const testProjectRoot = process.cwd();
|
||||
|
||||
tap.test('ContextAnalyzer should create instance with default weights', async () => {
|
||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||
expect(analyzer).toBeInstanceOf(ContextAnalyzer);
|
||||
});
|
||||
|
||||
tap.test('ContextAnalyzer should create instance with custom weights', async () => {
|
||||
const analyzer = new ContextAnalyzer(
|
||||
testProjectRoot,
|
||||
{
|
||||
dependencyWeight: 0.5,
|
||||
relevanceWeight: 0.3,
|
||||
efficiencyWeight: 0.1,
|
||||
recencyWeight: 0.1
|
||||
}
|
||||
);
|
||||
expect(analyzer).toBeInstanceOf(ContextAnalyzer);
|
||||
});
|
||||
|
||||
tap.test('ContextAnalyzer.analyze should return analysis result with files', async () => {
|
||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||
|
||||
const metadata: IFileMetadata[] = [
|
||||
{
|
||||
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
||||
relativePath: 'ts/context/types.ts',
|
||||
size: 5000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 1250
|
||||
},
|
||||
{
|
||||
path: path.join(testProjectRoot, 'ts/context/enhanced-context.ts'),
|
||||
relativePath: 'ts/context/enhanced-context.ts',
|
||||
size: 10000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 2500
|
||||
}
|
||||
];
|
||||
|
||||
const result = await analyzer.analyze(metadata, 'readme');
|
||||
|
||||
expect(result.taskType).toEqual('readme');
|
||||
expect(result.files.length).toEqual(2);
|
||||
expect(result.totalFiles).toEqual(2);
|
||||
expect(result.analysisDuration).toBeGreaterThan(0);
|
||||
expect(result.dependencyGraph).toBeDefined();
|
||||
});
|
||||
|
||||
tap.test('ContextAnalyzer.analyze should assign importance scores to files', async () => {
|
||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||
|
||||
const metadata: IFileMetadata[] = [
|
||||
{
|
||||
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
||||
relativePath: 'ts/context/types.ts',
|
||||
size: 3000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 750
|
||||
}
|
||||
];
|
||||
|
||||
const result = await analyzer.analyze(metadata, 'readme');
|
||||
|
||||
expect(result.files[0].importanceScore).toBeGreaterThanOrEqual(0);
|
||||
expect(result.files[0].importanceScore).toBeLessThanOrEqual(1);
|
||||
});
|
||||
|
||||
tap.test('ContextAnalyzer.analyze should sort files by importance score', async () => {
|
||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||
|
||||
const metadata: IFileMetadata[] = [
|
||||
{
|
||||
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
||||
relativePath: 'ts/context/types.ts',
|
||||
size: 3000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 750
|
||||
},
|
||||
{
|
||||
path: path.join(testProjectRoot, 'test/test.basic.node.ts'),
|
||||
relativePath: 'test/test.basic.node.ts',
|
||||
size: 2000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 500
|
||||
}
|
||||
];
|
||||
|
||||
const result = await analyzer.analyze(metadata, 'readme');
|
||||
|
||||
// Files should be sorted by importance (highest first)
|
||||
for (let i = 0; i < result.files.length - 1; i++) {
|
||||
expect(result.files[i].importanceScore).toBeGreaterThanOrEqual(
|
||||
result.files[i + 1].importanceScore
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
tap.test('ContextAnalyzer.analyze should assign tiers based on scores', async () => {
|
||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||
|
||||
const metadata: IFileMetadata[] = [
|
||||
{
|
||||
path: path.join(testProjectRoot, 'ts/index.ts'),
|
||||
relativePath: 'ts/index.ts',
|
||||
size: 3000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 750
|
||||
}
|
||||
];
|
||||
|
||||
const result = await analyzer.analyze(metadata, 'readme');
|
||||
|
||||
const file = result.files[0];
|
||||
expect(['essential', 'important', 'optional', 'excluded']).toContain(file.tier);
|
||||
});
|
||||
|
||||
tap.test('ContextAnalyzer should prioritize index.ts files for README task', async () => {
|
||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||
|
||||
const metadata: IFileMetadata[] = [
|
||||
{
|
||||
path: path.join(testProjectRoot, 'ts/index.ts'),
|
||||
relativePath: 'ts/index.ts',
|
||||
size: 3000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 750
|
||||
},
|
||||
{
|
||||
path: path.join(testProjectRoot, 'ts/some-helper.ts'),
|
||||
relativePath: 'ts/some-helper.ts',
|
||||
size: 3000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 750
|
||||
}
|
||||
];
|
||||
|
||||
const result = await analyzer.analyze(metadata, 'readme');
|
||||
|
||||
// index.ts should have higher relevance score
|
||||
const indexFile = result.files.find(f => f.path.includes('index.ts'));
|
||||
const helperFile = result.files.find(f => f.path.includes('some-helper.ts'));
|
||||
|
||||
if (indexFile && helperFile) {
|
||||
expect(indexFile.relevanceScore).toBeGreaterThan(helperFile.relevanceScore);
|
||||
}
|
||||
});
|
||||
|
||||
tap.test('ContextAnalyzer should deprioritize test files for README task', async () => {
|
||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||
|
||||
const metadata: IFileMetadata[] = [
|
||||
{
|
||||
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
||||
relativePath: 'ts/context/types.ts',
|
||||
size: 3000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 750
|
||||
},
|
||||
{
|
||||
path: path.join(testProjectRoot, 'test/test.basic.node.ts'),
|
||||
relativePath: 'test/test.basic.node.ts',
|
||||
size: 3000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 750
|
||||
}
|
||||
];
|
||||
|
||||
const result = await analyzer.analyze(metadata, 'readme');
|
||||
|
||||
// Source file should have higher relevance than test file
|
||||
const sourceFile = result.files.find(f => f.path.includes('ts/context/types.ts'));
|
||||
const testFile = result.files.find(f => f.path.includes('test/test.basic.node.ts'));
|
||||
|
||||
if (sourceFile && testFile) {
|
||||
expect(sourceFile.relevanceScore).toBeGreaterThan(testFile.relevanceScore);
|
||||
}
|
||||
});
|
||||
|
||||
tap.test('ContextAnalyzer should prioritize changed files for commit task', async () => {
|
||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||
|
||||
const changedFile = path.join(testProjectRoot, 'ts/context/types.ts');
|
||||
const unchangedFile = path.join(testProjectRoot, 'ts/index.ts');
|
||||
|
||||
const metadata: IFileMetadata[] = [
|
||||
{
|
||||
path: changedFile,
|
||||
relativePath: 'ts/context/types.ts',
|
||||
size: 3000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 750
|
||||
},
|
||||
{
|
||||
path: unchangedFile,
|
||||
relativePath: 'ts/index.ts',
|
||||
size: 3000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 750
|
||||
}
|
||||
];
|
||||
|
||||
const result = await analyzer.analyze(metadata, 'commit', [changedFile]);
|
||||
|
||||
const changed = result.files.find(f => f.path === changedFile);
|
||||
const unchanged = result.files.find(f => f.path === unchangedFile);
|
||||
|
||||
if (changed && unchanged) {
|
||||
// Changed file should have recency score of 1.0
|
||||
expect(changed.recencyScore).toEqual(1.0);
|
||||
// Unchanged file should have recency score of 0
|
||||
expect(unchanged.recencyScore).toEqual(0);
|
||||
}
|
||||
});
|
||||
|
||||
tap.test('ContextAnalyzer should calculate efficiency scores', async () => {
|
||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||
|
||||
const metadata: IFileMetadata[] = [
|
||||
{
|
||||
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
||||
relativePath: 'ts/context/types.ts',
|
||||
size: 5000, // Optimal size
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 1250
|
||||
},
|
||||
{
|
||||
path: path.join(testProjectRoot, 'ts/very-large-file.ts'),
|
||||
relativePath: 'ts/very-large-file.ts',
|
||||
size: 50000, // Too large
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 12500
|
||||
}
|
||||
];
|
||||
|
||||
const result = await analyzer.analyze(metadata, 'readme');
|
||||
|
||||
// Optimal size file should have better efficiency score
|
||||
const optimalFile = result.files.find(f => f.path.includes('types.ts'));
|
||||
const largeFile = result.files.find(f => f.path.includes('very-large-file.ts'));
|
||||
|
||||
if (optimalFile && largeFile) {
|
||||
expect(optimalFile.efficiencyScore).toBeGreaterThan(largeFile.efficiencyScore);
|
||||
}
|
||||
});
|
||||
|
||||
tap.test('ContextAnalyzer should build dependency graph', async () => {
|
||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||
|
||||
const metadata: IFileMetadata[] = [
|
||||
{
|
||||
path: path.join(testProjectRoot, 'ts/context/enhanced-context.ts'),
|
||||
relativePath: 'ts/context/enhanced-context.ts',
|
||||
size: 10000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 2500
|
||||
},
|
||||
{
|
||||
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
||||
relativePath: 'ts/context/types.ts',
|
||||
size: 5000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 1250
|
||||
}
|
||||
];
|
||||
|
||||
const result = await analyzer.analyze(metadata, 'readme');
|
||||
|
||||
expect(result.dependencyGraph.size).toBeGreaterThan(0);
|
||||
|
||||
// Check that each file has dependency info
|
||||
for (const meta of metadata) {
|
||||
const deps = result.dependencyGraph.get(meta.path);
|
||||
expect(deps).toBeDefined();
|
||||
expect(deps!.path).toEqual(meta.path);
|
||||
expect(deps!.imports).toBeDefined();
|
||||
expect(deps!.importedBy).toBeDefined();
|
||||
expect(deps!.centrality).toBeGreaterThanOrEqual(0);
|
||||
}
|
||||
});
|
||||
|
||||
tap.test('ContextAnalyzer should calculate centrality scores', async () => {
|
||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||
|
||||
const metadata: IFileMetadata[] = [
|
||||
{
|
||||
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
||||
relativePath: 'ts/context/types.ts',
|
||||
size: 5000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 1250
|
||||
},
|
||||
{
|
||||
path: path.join(testProjectRoot, 'ts/context/enhanced-context.ts'),
|
||||
relativePath: 'ts/context/enhanced-context.ts',
|
||||
size: 10000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 2500
|
||||
}
|
||||
];
|
||||
|
||||
const result = await analyzer.analyze(metadata, 'readme');
|
||||
|
||||
// All centrality scores should be between 0 and 1
|
||||
for (const [, deps] of result.dependencyGraph) {
|
||||
expect(deps.centrality).toBeGreaterThanOrEqual(0);
|
||||
expect(deps.centrality).toBeLessThanOrEqual(1);
|
||||
}
|
||||
});
|
||||
|
||||
tap.test('ContextAnalyzer should assign higher centrality to highly imported files', async () => {
|
||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||
|
||||
// types.ts is likely imported by many files
|
||||
const typesPath = path.join(testProjectRoot, 'ts/context/types.ts');
|
||||
// A test file is likely imported by fewer files
|
||||
const testPath = path.join(testProjectRoot, 'test/test.basic.node.ts');
|
||||
|
||||
const metadata: IFileMetadata[] = [
|
||||
{
|
||||
path: typesPath,
|
||||
relativePath: 'ts/context/types.ts',
|
||||
size: 5000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 1250
|
||||
},
|
||||
{
|
||||
path: testPath,
|
||||
relativePath: 'test/test.basic.node.ts',
|
||||
size: 3000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 750
|
||||
}
|
||||
];
|
||||
|
||||
const result = await analyzer.analyze(metadata, 'readme');
|
||||
|
||||
const typesDeps = result.dependencyGraph.get(typesPath);
|
||||
const testDeps = result.dependencyGraph.get(testPath);
|
||||
|
||||
if (typesDeps && testDeps) {
|
||||
// types.ts should generally have higher centrality due to being imported more
|
||||
expect(typesDeps.centrality).toBeGreaterThanOrEqual(0);
|
||||
expect(testDeps.centrality).toBeGreaterThanOrEqual(0);
|
||||
}
|
||||
});
|
||||
|
||||
tap.test('ContextAnalyzer should provide reason for scoring', async () => {
|
||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||
|
||||
const metadata: IFileMetadata[] = [
|
||||
{
|
||||
path: path.join(testProjectRoot, 'ts/index.ts'),
|
||||
relativePath: 'ts/index.ts',
|
||||
size: 3000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 750
|
||||
}
|
||||
];
|
||||
|
||||
const result = await analyzer.analyze(metadata, 'readme');
|
||||
|
||||
expect(result.files[0].reason).toBeDefined();
|
||||
expect(result.files[0].reason!.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
tap.test('ContextAnalyzer should handle empty metadata array', async () => {
|
||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||
|
||||
const result = await analyzer.analyze([], 'readme');
|
||||
|
||||
expect(result.files.length).toEqual(0);
|
||||
expect(result.totalFiles).toEqual(0);
|
||||
expect(result.dependencyGraph.size).toEqual(0);
|
||||
});
|
||||
|
||||
tap.test('ContextAnalyzer should respect custom tier configuration', async () => {
|
||||
const analyzer = new ContextAnalyzer(
|
||||
testProjectRoot,
|
||||
{},
|
||||
{
|
||||
essential: { minScore: 0.9, trimLevel: 'none' },
|
||||
important: { minScore: 0.7, trimLevel: 'light' },
|
||||
optional: { minScore: 0.5, trimLevel: 'aggressive' }
|
||||
}
|
||||
);
|
||||
|
||||
const metadata: IFileMetadata[] = [
|
||||
{
|
||||
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
||||
relativePath: 'ts/context/types.ts',
|
||||
size: 3000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 750
|
||||
}
|
||||
];
|
||||
|
||||
const result = await analyzer.analyze(metadata, 'readme');
|
||||
|
||||
// Should use custom tier thresholds
|
||||
const file = result.files[0];
|
||||
expect(['essential', 'important', 'optional', 'excluded']).toContain(file.tier);
|
||||
});
|
||||
|
||||
tap.test('ContextAnalyzer should calculate combined importance score from all factors', async () => {
|
||||
const analyzer = new ContextAnalyzer(testProjectRoot, {
|
||||
dependencyWeight: 0.25,
|
||||
relevanceWeight: 0.25,
|
||||
efficiencyWeight: 0.25,
|
||||
recencyWeight: 0.25
|
||||
});
|
||||
|
||||
const metadata: IFileMetadata[] = [
|
||||
{
|
||||
path: path.join(testProjectRoot, 'ts/context/types.ts'),
|
||||
relativePath: 'ts/context/types.ts',
|
||||
size: 5000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 1250
|
||||
}
|
||||
];
|
||||
|
||||
const result = await analyzer.analyze(metadata, 'readme');
|
||||
|
||||
const file = result.files[0];
|
||||
|
||||
// Importance score should be weighted sum of all factors
|
||||
// With equal weights (0.25 each), importance should be average of all scores
|
||||
const expectedImportance =
|
||||
(file.relevanceScore * 0.25) +
|
||||
(file.centralityScore * 0.25) +
|
||||
(file.efficiencyScore * 0.25) +
|
||||
(file.recencyScore * 0.25);
|
||||
|
||||
expect(file.importanceScore).toBeCloseTo(expectedImportance, 2);
|
||||
});
|
||||
|
||||
tap.test('ContextAnalyzer should complete analysis within reasonable time', async () => {
|
||||
const analyzer = new ContextAnalyzer(testProjectRoot);
|
||||
|
||||
const metadata: IFileMetadata[] = Array.from({ length: 10 }, (_, i) => ({
|
||||
path: path.join(testProjectRoot, `ts/file${i}.ts`),
|
||||
relativePath: `ts/file${i}.ts`,
|
||||
size: 3000,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 750
|
||||
}));
|
||||
|
||||
const startTime = Date.now();
|
||||
const result = await analyzer.analyze(metadata, 'readme');
|
||||
const endTime = Date.now();
|
||||
|
||||
const duration = endTime - startTime;
|
||||
|
||||
expect(result.analysisDuration).toBeGreaterThan(0);
|
||||
expect(duration).toBeLessThan(10000); // Should complete within 10 seconds
|
||||
});
|
||||
|
||||
export default tap.start();
|
||||
456
test/test.contextcache.node.ts
Normal file
456
test/test.contextcache.node.ts
Normal file
@@ -0,0 +1,456 @@
|
||||
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs';
|
||||
import { ContextCache } from '../ts/context/context-cache.js';
|
||||
import type { ICacheEntry } from '../ts/context/types.js';
|
||||
|
||||
const testProjectRoot = process.cwd();
|
||||
const testCacheDir = path.join(testProjectRoot, '.nogit', 'test-cache');
|
||||
|
||||
// Helper to clean up test cache directory
|
||||
async function cleanupTestCache() {
|
||||
try {
|
||||
await fs.promises.rm(testCacheDir, { recursive: true, force: true });
|
||||
} catch (error) {
|
||||
// Ignore if directory doesn't exist
|
||||
}
|
||||
}
|
||||
|
||||
tap.test('ContextCache should create instance with default config', async () => {
|
||||
await cleanupTestCache();
|
||||
|
||||
const cache = new ContextCache(testProjectRoot, {
|
||||
directory: testCacheDir,
|
||||
enabled: true
|
||||
});
|
||||
|
||||
expect(cache).toBeInstanceOf(ContextCache);
|
||||
|
||||
await cleanupTestCache();
|
||||
});
|
||||
|
||||
tap.test('ContextCache.init should create cache directory', async () => {
|
||||
await cleanupTestCache();
|
||||
|
||||
const cache = new ContextCache(testProjectRoot, {
|
||||
directory: testCacheDir,
|
||||
enabled: true
|
||||
});
|
||||
|
||||
await cache.init();
|
||||
|
||||
// Check that cache directory was created
|
||||
const exists = await fs.promises.access(testCacheDir).then(() => true).catch(() => false);
|
||||
expect(exists).toBe(true);
|
||||
|
||||
await cleanupTestCache();
|
||||
});
|
||||
|
||||
tap.test('ContextCache.set should store cache entry', async () => {
|
||||
await cleanupTestCache();
|
||||
|
||||
const cache = new ContextCache(testProjectRoot, {
|
||||
directory: testCacheDir,
|
||||
enabled: true
|
||||
});
|
||||
await cache.init();
|
||||
|
||||
const testPath = path.join(testProjectRoot, 'package.json');
|
||||
const entry: ICacheEntry = {
|
||||
path: testPath,
|
||||
contents: 'test content',
|
||||
tokenCount: 100,
|
||||
mtime: Date.now(),
|
||||
cachedAt: Date.now()
|
||||
};
|
||||
|
||||
await cache.set(entry);
|
||||
|
||||
const retrieved = await cache.get(testPath);
|
||||
expect(retrieved).toBeDefined();
|
||||
expect(retrieved!.contents).toEqual('test content');
|
||||
expect(retrieved!.tokenCount).toEqual(100);
|
||||
|
||||
await cleanupTestCache();
|
||||
});
|
||||
|
||||
tap.test('ContextCache.get should return null for non-existent entry', async () => {
|
||||
await cleanupTestCache();
|
||||
|
||||
const cache = new ContextCache(testProjectRoot, {
|
||||
directory: testCacheDir,
|
||||
enabled: true
|
||||
});
|
||||
await cache.init();
|
||||
|
||||
const retrieved = await cache.get('/non/existent/path.ts');
|
||||
expect(retrieved).toBeNull();
|
||||
|
||||
await cleanupTestCache();
|
||||
});
|
||||
|
||||
tap.test('ContextCache.get should invalidate expired entries', async () => {
|
||||
await cleanupTestCache();
|
||||
|
||||
const cache = new ContextCache(testProjectRoot, {
|
||||
directory: testCacheDir,
|
||||
enabled: true,
|
||||
ttl: 1 // 1 second TTL
|
||||
});
|
||||
await cache.init();
|
||||
|
||||
const testPath = path.join(testProjectRoot, 'test-file.ts');
|
||||
const entry: ICacheEntry = {
|
||||
path: testPath,
|
||||
contents: 'test content',
|
||||
tokenCount: 100,
|
||||
mtime: Date.now(),
|
||||
cachedAt: Date.now() - 2000 // Cached 2 seconds ago (expired)
|
||||
};
|
||||
|
||||
await cache.set(entry);
|
||||
|
||||
// Wait a bit to ensure expiration logic runs
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
const retrieved = await cache.get(testPath);
|
||||
expect(retrieved).toBeNull(); // Should be expired
|
||||
|
||||
await cleanupTestCache();
|
||||
});
|
||||
|
||||
tap.test('ContextCache.get should invalidate entries when file mtime changes', async () => {
|
||||
await cleanupTestCache();
|
||||
|
||||
const cache = new ContextCache(testProjectRoot, {
|
||||
directory: testCacheDir,
|
||||
enabled: true
|
||||
});
|
||||
await cache.init();
|
||||
|
||||
const testPath = path.join(testProjectRoot, 'package.json');
|
||||
const stats = await fs.promises.stat(testPath);
|
||||
const oldMtime = Math.floor(stats.mtimeMs);
|
||||
|
||||
const entry: ICacheEntry = {
|
||||
path: testPath,
|
||||
contents: 'test content',
|
||||
tokenCount: 100,
|
||||
mtime: oldMtime - 1000, // Old mtime (file has changed)
|
||||
cachedAt: Date.now()
|
||||
};
|
||||
|
||||
await cache.set(entry);
|
||||
|
||||
const retrieved = await cache.get(testPath);
|
||||
expect(retrieved).toBeNull(); // Should be invalidated due to mtime mismatch
|
||||
|
||||
await cleanupTestCache();
|
||||
});
|
||||
|
||||
tap.test('ContextCache.has should check if file is cached and valid', async () => {
|
||||
await cleanupTestCache();
|
||||
|
||||
const cache = new ContextCache(testProjectRoot, {
|
||||
directory: testCacheDir,
|
||||
enabled: true
|
||||
});
|
||||
await cache.init();
|
||||
|
||||
const testPath = path.join(testProjectRoot, 'package.json');
|
||||
const stats = await fs.promises.stat(testPath);
|
||||
|
||||
const entry: ICacheEntry = {
|
||||
path: testPath,
|
||||
contents: 'test content',
|
||||
tokenCount: 100,
|
||||
mtime: Math.floor(stats.mtimeMs),
|
||||
cachedAt: Date.now()
|
||||
};
|
||||
|
||||
await cache.set(entry);
|
||||
|
||||
const hasIt = await cache.has(testPath);
|
||||
expect(hasIt).toBe(true);
|
||||
|
||||
const doesNotHaveIt = await cache.has('/non/existent/path.ts');
|
||||
expect(doesNotHaveIt).toBe(false);
|
||||
|
||||
await cleanupTestCache();
|
||||
});
|
||||
|
||||
tap.test('ContextCache.setMany should store multiple entries', async () => {
|
||||
await cleanupTestCache();
|
||||
|
||||
const cache = new ContextCache(testProjectRoot, {
|
||||
directory: testCacheDir,
|
||||
enabled: true
|
||||
});
|
||||
await cache.init();
|
||||
|
||||
const entries: ICacheEntry[] = [
|
||||
{
|
||||
path: '/test/file1.ts',
|
||||
contents: 'content 1',
|
||||
tokenCount: 100,
|
||||
mtime: Date.now(),
|
||||
cachedAt: Date.now()
|
||||
},
|
||||
{
|
||||
path: '/test/file2.ts',
|
||||
contents: 'content 2',
|
||||
tokenCount: 200,
|
||||
mtime: Date.now(),
|
||||
cachedAt: Date.now()
|
||||
}
|
||||
];
|
||||
|
||||
await cache.setMany(entries);
|
||||
|
||||
const stats = cache.getStats();
|
||||
expect(stats.entries).toBeGreaterThanOrEqual(2);
|
||||
|
||||
await cleanupTestCache();
|
||||
});
|
||||
|
||||
tap.test('ContextCache.getStats should return cache statistics', async () => {
|
||||
await cleanupTestCache();
|
||||
|
||||
const cache = new ContextCache(testProjectRoot, {
|
||||
directory: testCacheDir,
|
||||
enabled: true
|
||||
});
|
||||
await cache.init();
|
||||
|
||||
const entry: ICacheEntry = {
|
||||
path: '/test/file.ts',
|
||||
contents: 'test content with some length',
|
||||
tokenCount: 100,
|
||||
mtime: Date.now(),
|
||||
cachedAt: Date.now()
|
||||
};
|
||||
|
||||
await cache.set(entry);
|
||||
|
||||
const stats = cache.getStats();
|
||||
|
||||
expect(stats.entries).toEqual(1);
|
||||
expect(stats.totalSize).toBeGreaterThan(0);
|
||||
expect(stats.oldestEntry).toBeDefined();
|
||||
expect(stats.newestEntry).toBeDefined();
|
||||
|
||||
await cleanupTestCache();
|
||||
});
|
||||
|
||||
tap.test('ContextCache.clear should clear all entries', async () => {
|
||||
await cleanupTestCache();
|
||||
|
||||
const cache = new ContextCache(testProjectRoot, {
|
||||
directory: testCacheDir,
|
||||
enabled: true
|
||||
});
|
||||
await cache.init();
|
||||
|
||||
const entry: ICacheEntry = {
|
||||
path: '/test/file.ts',
|
||||
contents: 'test content',
|
||||
tokenCount: 100,
|
||||
mtime: Date.now(),
|
||||
cachedAt: Date.now()
|
||||
};
|
||||
|
||||
await cache.set(entry);
|
||||
expect(cache.getStats().entries).toEqual(1);
|
||||
|
||||
await cache.clear();
|
||||
expect(cache.getStats().entries).toEqual(0);
|
||||
|
||||
await cleanupTestCache();
|
||||
});
|
||||
|
||||
tap.test('ContextCache.clearPaths should clear specific entries', async () => {
|
||||
await cleanupTestCache();
|
||||
|
||||
const cache = new ContextCache(testProjectRoot, {
|
||||
directory: testCacheDir,
|
||||
enabled: true
|
||||
});
|
||||
await cache.init();
|
||||
|
||||
const entries: ICacheEntry[] = [
|
||||
{
|
||||
path: '/test/file1.ts',
|
||||
contents: 'content 1',
|
||||
tokenCount: 100,
|
||||
mtime: Date.now(),
|
||||
cachedAt: Date.now()
|
||||
},
|
||||
{
|
||||
path: '/test/file2.ts',
|
||||
contents: 'content 2',
|
||||
tokenCount: 200,
|
||||
mtime: Date.now(),
|
||||
cachedAt: Date.now()
|
||||
}
|
||||
];
|
||||
|
||||
await cache.setMany(entries);
|
||||
expect(cache.getStats().entries).toEqual(2);
|
||||
|
||||
await cache.clearPaths(['/test/file1.ts']);
|
||||
expect(cache.getStats().entries).toEqual(1);
|
||||
|
||||
await cleanupTestCache();
|
||||
});
|
||||
|
||||
tap.test('ContextCache should enforce max size by evicting oldest entries', async () => {
|
||||
await cleanupTestCache();
|
||||
|
||||
const cache = new ContextCache(testProjectRoot, {
|
||||
directory: testCacheDir,
|
||||
enabled: true,
|
||||
maxSize: 0.001 // Very small: 0.001 MB = 1KB
|
||||
});
|
||||
await cache.init();
|
||||
|
||||
// Add entries that exceed the max size
|
||||
const largeContent = 'x'.repeat(500); // 500 bytes
|
||||
|
||||
const entries: ICacheEntry[] = [
|
||||
{
|
||||
path: '/test/file1.ts',
|
||||
contents: largeContent,
|
||||
tokenCount: 100,
|
||||
mtime: Date.now(),
|
||||
cachedAt: Date.now() - 3000 // Oldest
|
||||
},
|
||||
{
|
||||
path: '/test/file2.ts',
|
||||
contents: largeContent,
|
||||
tokenCount: 100,
|
||||
mtime: Date.now(),
|
||||
cachedAt: Date.now() - 2000
|
||||
},
|
||||
{
|
||||
path: '/test/file3.ts',
|
||||
contents: largeContent,
|
||||
tokenCount: 100,
|
||||
mtime: Date.now(),
|
||||
cachedAt: Date.now() - 1000 // Newest
|
||||
}
|
||||
];
|
||||
|
||||
await cache.setMany(entries);
|
||||
|
||||
const stats = cache.getStats();
|
||||
// Should have evicted oldest entries to stay under size limit
|
||||
expect(stats.totalSize).toBeLessThanOrEqual(1024); // 1KB
|
||||
|
||||
await cleanupTestCache();
|
||||
});
|
||||
|
||||
tap.test('ContextCache should not cache when disabled', async () => {
|
||||
await cleanupTestCache();
|
||||
|
||||
const cache = new ContextCache(testProjectRoot, {
|
||||
directory: testCacheDir,
|
||||
enabled: false
|
||||
});
|
||||
await cache.init();
|
||||
|
||||
const entry: ICacheEntry = {
|
||||
path: '/test/file.ts',
|
||||
contents: 'test content',
|
||||
tokenCount: 100,
|
||||
mtime: Date.now(),
|
||||
cachedAt: Date.now()
|
||||
};
|
||||
|
||||
await cache.set(entry);
|
||||
|
||||
const retrieved = await cache.get('/test/file.ts');
|
||||
expect(retrieved).toBeNull();
|
||||
|
||||
await cleanupTestCache();
|
||||
});
|
||||
|
||||
tap.test('ContextCache should persist to disk and reload', async () => {
|
||||
await cleanupTestCache();
|
||||
|
||||
// Create first cache instance and add entry
|
||||
const cache1 = new ContextCache(testProjectRoot, {
|
||||
directory: testCacheDir,
|
||||
enabled: true
|
||||
});
|
||||
await cache1.init();
|
||||
|
||||
const entry: ICacheEntry = {
|
||||
path: '/test/persistent-file.ts',
|
||||
contents: 'persistent content',
|
||||
tokenCount: 150,
|
||||
mtime: Date.now(),
|
||||
cachedAt: Date.now()
|
||||
};
|
||||
|
||||
await cache1.set(entry);
|
||||
|
||||
// Wait for persist
|
||||
await new Promise(resolve => setTimeout(resolve, 500));
|
||||
|
||||
// Create second cache instance (should reload from disk)
|
||||
const cache2 = new ContextCache(testProjectRoot, {
|
||||
directory: testCacheDir,
|
||||
enabled: true
|
||||
});
|
||||
await cache2.init();
|
||||
|
||||
const stats = cache2.getStats();
|
||||
expect(stats.entries).toBeGreaterThan(0);
|
||||
|
||||
await cleanupTestCache();
|
||||
});
|
||||
|
||||
tap.test('ContextCache should handle invalid cache index gracefully', async () => {
|
||||
await cleanupTestCache();
|
||||
|
||||
const cache = new ContextCache(testProjectRoot, {
|
||||
directory: testCacheDir,
|
||||
enabled: true
|
||||
});
|
||||
|
||||
// Create cache dir manually
|
||||
await fs.promises.mkdir(testCacheDir, { recursive: true });
|
||||
|
||||
// Write invalid JSON to cache index
|
||||
const cacheIndexPath = path.join(testCacheDir, 'index.json');
|
||||
await fs.promises.writeFile(cacheIndexPath, 'invalid json {', 'utf-8');
|
||||
|
||||
// Should not throw, should just start with empty cache
|
||||
await cache.init();
|
||||
|
||||
const stats = cache.getStats();
|
||||
expect(stats.entries).toEqual(0);
|
||||
|
||||
await cleanupTestCache();
|
||||
});
|
||||
|
||||
tap.test('ContextCache should return proper stats for empty cache', async () => {
|
||||
await cleanupTestCache();
|
||||
|
||||
const cache = new ContextCache(testProjectRoot, {
|
||||
directory: testCacheDir,
|
||||
enabled: true
|
||||
});
|
||||
await cache.init();
|
||||
|
||||
const stats = cache.getStats();
|
||||
|
||||
expect(stats.entries).toEqual(0);
|
||||
expect(stats.totalSize).toEqual(0);
|
||||
expect(stats.oldestEntry).toBeNull();
|
||||
expect(stats.newestEntry).toBeNull();
|
||||
|
||||
await cleanupTestCache();
|
||||
});
|
||||
|
||||
export default tap.start();
|
||||
242
test/test.lazyfileloader.node.ts
Normal file
242
test/test.lazyfileloader.node.ts
Normal file
@@ -0,0 +1,242 @@
|
||||
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||
import * as path from 'path';
|
||||
import { LazyFileLoader } from '../ts/context/lazy-file-loader.js';
|
||||
import type { IFileMetadata } from '../ts/context/types.js';
|
||||
|
||||
const testProjectRoot = process.cwd();
|
||||
|
||||
tap.test('LazyFileLoader should create instance with project root', async () => {
|
||||
const loader = new LazyFileLoader(testProjectRoot);
|
||||
expect(loader).toBeInstanceOf(LazyFileLoader);
|
||||
});
|
||||
|
||||
tap.test('LazyFileLoader.getMetadata should return file metadata without loading contents', async () => {
|
||||
const loader = new LazyFileLoader(testProjectRoot);
|
||||
const packageJsonPath = path.join(testProjectRoot, 'package.json');
|
||||
|
||||
const metadata = await loader.getMetadata(packageJsonPath);
|
||||
|
||||
expect(metadata.path).toEqual(packageJsonPath);
|
||||
expect(metadata.relativePath).toEqual('package.json');
|
||||
expect(metadata.size).toBeGreaterThan(0);
|
||||
expect(metadata.mtime).toBeGreaterThan(0);
|
||||
expect(metadata.estimatedTokens).toBeGreaterThan(0);
|
||||
// Rough estimate: size / 4
|
||||
expect(metadata.estimatedTokens).toBeCloseTo(metadata.size / 4, 10);
|
||||
});
|
||||
|
||||
tap.test('LazyFileLoader.getMetadata should cache metadata for same file', async () => {
|
||||
const loader = new LazyFileLoader(testProjectRoot);
|
||||
const packageJsonPath = path.join(testProjectRoot, 'package.json');
|
||||
|
||||
const metadata1 = await loader.getMetadata(packageJsonPath);
|
||||
const metadata2 = await loader.getMetadata(packageJsonPath);
|
||||
|
||||
// Should return identical metadata from cache
|
||||
expect(metadata1.mtime).toEqual(metadata2.mtime);
|
||||
expect(metadata1.size).toEqual(metadata2.size);
|
||||
expect(metadata1.estimatedTokens).toEqual(metadata2.estimatedTokens);
|
||||
});
|
||||
|
||||
tap.test('LazyFileLoader.scanFiles should scan TypeScript files', async () => {
|
||||
const loader = new LazyFileLoader(testProjectRoot);
|
||||
|
||||
const metadata = await loader.scanFiles(['ts/context/types.ts']);
|
||||
|
||||
expect(metadata.length).toBeGreaterThan(0);
|
||||
const typesFile = metadata.find(m => m.relativePath.includes('types.ts'));
|
||||
expect(typesFile).toBeDefined();
|
||||
expect(typesFile!.size).toBeGreaterThan(0);
|
||||
expect(typesFile!.estimatedTokens).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
tap.test('LazyFileLoader.scanFiles should handle multiple globs', async () => {
|
||||
const loader = new LazyFileLoader(testProjectRoot);
|
||||
|
||||
const metadata = await loader.scanFiles([
|
||||
'package.json',
|
||||
'readme.md'
|
||||
]);
|
||||
|
||||
expect(metadata.length).toBeGreaterThanOrEqual(2);
|
||||
const hasPackageJson = metadata.some(m => m.relativePath === 'package.json');
|
||||
const hasReadme = metadata.some(m => m.relativePath.toLowerCase() === 'readme.md');
|
||||
expect(hasPackageJson).toBe(true);
|
||||
expect(hasReadme).toBe(true);
|
||||
});
|
||||
|
||||
tap.test('LazyFileLoader.loadFile should load file with actual token count', async () => {
|
||||
const loader = new LazyFileLoader(testProjectRoot);
|
||||
const packageJsonPath = path.join(testProjectRoot, 'package.json');
|
||||
|
||||
const tokenizer = (content: string) => Math.ceil(content.length / 4);
|
||||
const fileInfo = await loader.loadFile(packageJsonPath, tokenizer);
|
||||
|
||||
expect(fileInfo.path).toEqual(packageJsonPath);
|
||||
expect(fileInfo.contents).toBeDefined();
|
||||
expect(fileInfo.contents.length).toBeGreaterThan(0);
|
||||
expect(fileInfo.tokenCount).toBeGreaterThan(0);
|
||||
expect(fileInfo.relativePath).toEqual('package.json');
|
||||
});
|
||||
|
||||
tap.test('LazyFileLoader.loadFiles should load multiple files in parallel', async () => {
|
||||
const loader = new LazyFileLoader(testProjectRoot);
|
||||
|
||||
const metadata: IFileMetadata[] = [
|
||||
{
|
||||
path: path.join(testProjectRoot, 'package.json'),
|
||||
relativePath: 'package.json',
|
||||
size: 100,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 25
|
||||
},
|
||||
{
|
||||
path: path.join(testProjectRoot, 'readme.md'),
|
||||
relativePath: 'readme.md',
|
||||
size: 200,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 50
|
||||
}
|
||||
];
|
||||
|
||||
const tokenizer = (content: string) => Math.ceil(content.length / 4);
|
||||
const startTime = Date.now();
|
||||
const files = await loader.loadFiles(metadata, tokenizer);
|
||||
const endTime = Date.now();
|
||||
|
||||
expect(files.length).toEqual(2);
|
||||
expect(files[0].contents).toBeDefined();
|
||||
expect(files[1].contents).toBeDefined();
|
||||
|
||||
// Should be fast (parallel loading)
|
||||
expect(endTime - startTime).toBeLessThan(5000); // 5 seconds max
|
||||
});
|
||||
|
||||
tap.test('LazyFileLoader.updateImportanceScores should update cached metadata', async () => {
|
||||
const loader = new LazyFileLoader(testProjectRoot);
|
||||
const packageJsonPath = path.join(testProjectRoot, 'package.json');
|
||||
|
||||
// Get initial metadata
|
||||
await loader.getMetadata(packageJsonPath);
|
||||
|
||||
// Update importance scores
|
||||
const scores = new Map<string, number>();
|
||||
scores.set(packageJsonPath, 0.95);
|
||||
loader.updateImportanceScores(scores);
|
||||
|
||||
// Check cached metadata has updated score
|
||||
const cached = loader.getCachedMetadata();
|
||||
const packageJsonMeta = cached.find(m => m.path === packageJsonPath);
|
||||
|
||||
expect(packageJsonMeta).toBeDefined();
|
||||
expect(packageJsonMeta!.importanceScore).toEqual(0.95);
|
||||
});
|
||||
|
||||
tap.test('LazyFileLoader.getTotalEstimatedTokens should sum all cached metadata tokens', async () => {
|
||||
const loader = new LazyFileLoader(testProjectRoot);
|
||||
|
||||
// Scan some files
|
||||
await loader.scanFiles(['package.json', 'readme.md']);
|
||||
|
||||
const totalTokens = loader.getTotalEstimatedTokens();
|
||||
|
||||
expect(totalTokens).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
tap.test('LazyFileLoader.clearCache should clear metadata cache', async () => {
|
||||
const loader = new LazyFileLoader(testProjectRoot);
|
||||
|
||||
// Scan files to populate cache
|
||||
await loader.scanFiles(['package.json']);
|
||||
expect(loader.getCachedMetadata().length).toBeGreaterThan(0);
|
||||
|
||||
// Clear cache
|
||||
loader.clearCache();
|
||||
|
||||
expect(loader.getCachedMetadata().length).toEqual(0);
|
||||
});
|
||||
|
||||
tap.test('LazyFileLoader.getCachedMetadata should return all cached entries', async () => {
|
||||
const loader = new LazyFileLoader(testProjectRoot);
|
||||
|
||||
// Scan files
|
||||
await loader.scanFiles(['package.json', 'readme.md']);
|
||||
|
||||
const cached = loader.getCachedMetadata();
|
||||
|
||||
expect(cached.length).toBeGreaterThanOrEqual(2);
|
||||
expect(cached.every(m => m.path && m.size && m.estimatedTokens)).toBe(true);
|
||||
});
|
||||
|
||||
tap.test('LazyFileLoader should handle non-existent files gracefully', async () => {
|
||||
const loader = new LazyFileLoader(testProjectRoot);
|
||||
const nonExistentPath = path.join(testProjectRoot, 'this-file-does-not-exist.ts');
|
||||
|
||||
try {
|
||||
await loader.getMetadata(nonExistentPath);
|
||||
expect(false).toBe(true); // Should not reach here
|
||||
} catch (error) {
|
||||
expect(error).toBeDefined();
|
||||
}
|
||||
});
|
||||
|
||||
tap.test('LazyFileLoader.loadFiles should filter out failed file loads', async () => {
|
||||
const loader = new LazyFileLoader(testProjectRoot);
|
||||
|
||||
const metadata: IFileMetadata[] = [
|
||||
{
|
||||
path: path.join(testProjectRoot, 'package.json'),
|
||||
relativePath: 'package.json',
|
||||
size: 100,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 25
|
||||
},
|
||||
{
|
||||
path: path.join(testProjectRoot, 'non-existent-file.txt'),
|
||||
relativePath: 'non-existent-file.txt',
|
||||
size: 100,
|
||||
mtime: Date.now(),
|
||||
estimatedTokens: 25
|
||||
}
|
||||
];
|
||||
|
||||
const tokenizer = (content: string) => Math.ceil(content.length / 4);
|
||||
const files = await loader.loadFiles(metadata, tokenizer);
|
||||
|
||||
// Should only include the successfully loaded file
|
||||
expect(files.length).toEqual(1);
|
||||
expect(files[0].relativePath).toEqual('package.json');
|
||||
});
|
||||
|
||||
tap.test('LazyFileLoader should handle glob patterns for TypeScript source files', async () => {
|
||||
const loader = new LazyFileLoader(testProjectRoot);
|
||||
|
||||
const metadata = await loader.scanFiles(['ts/context/*.ts']);
|
||||
|
||||
expect(metadata.length).toBeGreaterThan(0);
|
||||
|
||||
// Should find multiple context files
|
||||
const hasEnhancedContext = metadata.some(m => m.relativePath.includes('enhanced-context.ts'));
|
||||
const hasTypes = metadata.some(m => m.relativePath.includes('types.ts'));
|
||||
|
||||
expect(hasEnhancedContext).toBe(true);
|
||||
expect(hasTypes).toBe(true);
|
||||
});
|
||||
|
||||
tap.test('LazyFileLoader should estimate tokens reasonably accurately', async () => {
|
||||
const loader = new LazyFileLoader(testProjectRoot);
|
||||
const packageJsonPath = path.join(testProjectRoot, 'package.json');
|
||||
|
||||
const metadata = await loader.getMetadata(packageJsonPath);
|
||||
const tokenizer = (content: string) => Math.ceil(content.length / 4);
|
||||
const fileInfo = await loader.loadFile(packageJsonPath, tokenizer);
|
||||
|
||||
// Estimated tokens should be close to actual (within reasonable range)
|
||||
const difference = Math.abs(metadata.estimatedTokens - fileInfo.tokenCount);
|
||||
const percentDiff = (difference / fileInfo.tokenCount) * 100;
|
||||
|
||||
// Should be within 20% accuracy (since it's just an estimate)
|
||||
expect(percentDiff).toBeLessThan(20);
|
||||
});
|
||||
|
||||
export default tap.start();
|
||||
@@ -1,5 +1,5 @@
|
||||
import { expect, tap } from '@pushrocks/tapbundle';
|
||||
import * as tsdoc from '../ts/index';
|
||||
import { expect, tap } from '@push.rocks/tapbundle';
|
||||
import * as tsdoc from '../ts/index.js';
|
||||
|
||||
tap.test('first test', async () => {
|
||||
console.log('test');
|
||||
|
||||
8
ts/00_commitinfo_data.ts
Normal file
8
ts/00_commitinfo_data.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
/**
|
||||
* autocreated commitinfo by @push.rocks/commitinfo
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@git.zone/tsdoc',
|
||||
version: '1.6.0',
|
||||
description: 'A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.'
|
||||
}
|
||||
146
ts/aidocs_classes/commit.ts
Normal file
146
ts/aidocs_classes/commit.ts
Normal file
@@ -0,0 +1,146 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
import { AiDoc } from '../classes.aidoc.js';
|
||||
import { ProjectContext } from './projectcontext.js';
|
||||
|
||||
export interface INextCommitObject {
|
||||
recommendedNextVersionLevel: 'fix' | 'feat' | 'BREAKING CHANGE'; // the recommended next version level of the project
|
||||
recommendedNextVersionScope: string; // the recommended scope name of the next version, like "core" or "cli", or specific class names.
|
||||
recommendedNextVersionMessage: string; // the commit message. Don't put fix() feat() or BREAKING CHANGE in the message. Please just the message itself.
|
||||
recommendedNextVersionDetails: string[]; // detailed bullet points for the changelog
|
||||
recommendedNextVersion: string; // the recommended next version of the project, x.x.x
|
||||
changelog?: string; // the changelog for the next version
|
||||
}
|
||||
|
||||
export class Commit {
|
||||
private aiDocsRef: AiDoc;
|
||||
private projectDir: string;
|
||||
|
||||
constructor(aiDocsRef: AiDoc, projectDirArg: string) {
|
||||
this.aiDocsRef = aiDocsRef;
|
||||
this.projectDir = projectDirArg;
|
||||
}
|
||||
|
||||
public async buildNextCommitObject(): Promise<INextCommitObject> {
|
||||
const smartgitInstance = new plugins.smartgit.Smartgit();
|
||||
await smartgitInstance.init();
|
||||
const gitRepo = await plugins.smartgit.GitRepo.fromOpeningRepoDir(
|
||||
smartgitInstance,
|
||||
this.projectDir
|
||||
);
|
||||
const diffStringArray = await gitRepo.getUncommittedDiff([
|
||||
'pnpm-lock.yaml',
|
||||
'package-lock.json',
|
||||
]);
|
||||
// Use the new TaskContextFactory for optimized context
|
||||
const taskContextFactory = new (await import('../context/index.js')).TaskContextFactory(this.projectDir);
|
||||
await taskContextFactory.initialize();
|
||||
|
||||
// Generate context specifically for commit task
|
||||
const contextResult = await taskContextFactory.createContextForCommit(
|
||||
diffStringArray[0] ? diffStringArray.join('\n\n') : 'No changes.'
|
||||
);
|
||||
|
||||
// Get the optimized context string
|
||||
let contextString = contextResult.context;
|
||||
|
||||
// Log token usage statistics
|
||||
console.log(`Token usage - Context: ${contextResult.tokenCount}, Files: ${contextResult.includedFiles.length + contextResult.trimmedFiles.length}, Savings: ${contextResult.tokenSavings}`);
|
||||
|
||||
// Check for token overflow against model limits
|
||||
const MODEL_TOKEN_LIMIT = 200000; // o4-mini
|
||||
if (contextResult.tokenCount > MODEL_TOKEN_LIMIT * 0.9) {
|
||||
console.log(`⚠️ Warning: Context size (${contextResult.tokenCount} tokens) is close to or exceeds model limit (${MODEL_TOKEN_LIMIT} tokens).`);
|
||||
console.log(`The model may not be able to process all information effectively.`);
|
||||
}
|
||||
|
||||
let result = await this.aiDocsRef.openaiInstance.chat({
|
||||
systemMessage: `
|
||||
You create a commit message for a git commit.
|
||||
The commit message should be based on the files in the project.
|
||||
You should not include any licensing information.
|
||||
You should not include any personal information.
|
||||
|
||||
Important: Answer only in valid JSON.
|
||||
|
||||
Your answer should be parseable with JSON.parse() without modifying anything.
|
||||
|
||||
Here is the structure of the JSON you should return:
|
||||
|
||||
interface {
|
||||
recommendedNextVersionLevel: 'fix' | 'feat' | 'BREAKING CHANGE'; // the recommended next version level of the project
|
||||
recommendedNextVersionScope: string; // the recommended scope name of the next version, like "core" or "cli", or specific class names.
|
||||
recommendedNextVersionMessage: string; // the commit message. Don't put fix() feat() or BREAKING CHANGE in the message. Please just the message itself.
|
||||
recommendedNextVersionDetails: string[]; // detailed bullet points for the changelog
|
||||
recommendedNextVersion: string; // the recommended next version of the project, x.x.x
|
||||
}
|
||||
|
||||
For the recommendedNextVersionDetails, please only add a detail entries to the array if it has an obvious value to the reader.
|
||||
|
||||
You are being given the files of the project. You should use them to create the commit message.
|
||||
Also you are given a diff.
|
||||
Never mention CLAUDE code, or codex.
|
||||
`,
|
||||
messageHistory: [],
|
||||
userMessage: contextString,
|
||||
});
|
||||
|
||||
// console.log(result.message);
|
||||
const resultObject: INextCommitObject = JSON.parse(
|
||||
result.message.replace('```json', '').replace('```', '')
|
||||
);
|
||||
|
||||
const previousChangelogPath = plugins.path.join(this.projectDir, 'changelog.md');
|
||||
let previousChangelog: plugins.smartfile.SmartFile;
|
||||
if (await plugins.smartfile.fs.fileExists(previousChangelogPath)) {
|
||||
previousChangelog = await plugins.smartfile.SmartFile.fromFilePath(previousChangelogPath);
|
||||
}
|
||||
|
||||
if (!previousChangelog) {
|
||||
// lets build the changelog based on that
|
||||
const commitMessages = await gitRepo.getAllCommitMessages();
|
||||
console.log(JSON.stringify(commitMessages, null, 2));
|
||||
let result2 = await this.aiDocsRef.openaiInstance.chat({
|
||||
messageHistory: [],
|
||||
systemMessage: `
|
||||
You are building a changelog.md file for the project.
|
||||
Omit commits and versions that lack relevant changes, but make sure to mention them as a range with a summarizing message instead.
|
||||
|
||||
A changelog entry should look like this:
|
||||
|
||||
## yyyy-mm-dd - x.x.x - scope here
|
||||
main descriptiom here
|
||||
|
||||
- detailed bullet points follow
|
||||
|
||||
You are given:
|
||||
* the commit messages of the project
|
||||
|
||||
Only return the changelog file, so it can be written directly to changelog.md`,
|
||||
userMessage: `
|
||||
Here are the commit messages:
|
||||
|
||||
${JSON.stringify(commitMessages, null, 2)}
|
||||
`,
|
||||
});
|
||||
|
||||
previousChangelog = await plugins.smartfile.SmartFile.fromString(
|
||||
previousChangelogPath,
|
||||
result2.message.replaceAll('```markdown', '').replaceAll('```', ''),
|
||||
'utf8'
|
||||
);
|
||||
}
|
||||
|
||||
let oldChangelog = previousChangelog.contents.toString().replace('# Changelog\n\n', '');
|
||||
if (oldChangelog.startsWith('\n')) {
|
||||
oldChangelog = oldChangelog.replace('\n', '');
|
||||
}
|
||||
let newDateString = new plugins.smarttime.ExtendedDate().exportToHyphedSortableDate();
|
||||
let newChangelog = `# Changelog\n\n${`## ${newDateString} - {{nextVersion}} - {{nextVersionScope}}
|
||||
{{nextVersionMessage}}
|
||||
|
||||
{{nextVersionDetails}}`}\n\n${oldChangelog}`;
|
||||
resultObject.changelog = newChangelog;
|
||||
|
||||
return resultObject;
|
||||
}
|
||||
}
|
||||
84
ts/aidocs_classes/description.ts
Normal file
84
ts/aidocs_classes/description.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import type { AiDoc } from '../classes.aidoc.js';
|
||||
import * as plugins from '../plugins.js';
|
||||
import { ProjectContext } from './projectcontext.js';
|
||||
|
||||
interface IDescriptionInterface {
|
||||
description: string;
|
||||
keywords: string[];
|
||||
}
|
||||
|
||||
export class Description {
|
||||
// INSTANCE
|
||||
private aiDocsRef: AiDoc;
|
||||
private projectDir: string;
|
||||
|
||||
constructor(aiDocsRef: AiDoc, projectDirArg: string) {
|
||||
this.aiDocsRef = aiDocsRef;
|
||||
this.projectDir = projectDirArg;
|
||||
}
|
||||
|
||||
public async build() {
|
||||
// Use the new TaskContextFactory for optimized context
|
||||
const taskContextFactory = new (await import('../context/index.js')).TaskContextFactory(this.projectDir);
|
||||
await taskContextFactory.initialize();
|
||||
|
||||
// Generate context specifically for description task
|
||||
const contextResult = await taskContextFactory.createContextForDescription();
|
||||
const contextString = contextResult.context;
|
||||
|
||||
// Log token usage statistics
|
||||
console.log(`Token usage - Context: ${contextResult.tokenCount}, Files: ${contextResult.includedFiles.length + contextResult.trimmedFiles.length}, Savings: ${contextResult.tokenSavings}`);
|
||||
|
||||
let result = await this.aiDocsRef.openaiInstance.chat({
|
||||
systemMessage: `
|
||||
You create a json adhering the following interface:
|
||||
{
|
||||
description: string; // a sensible short, one sentence description of the project
|
||||
keywords: string[]; // an array of tags that describe the project
|
||||
}
|
||||
|
||||
The description should be based on what you understand from the project's files.
|
||||
The keywords should be based on use cases you see from the files.
|
||||
Don't be cheap about the way you think.
|
||||
|
||||
Important: Answer only in valid JSON.
|
||||
You answer should be parseable with JSON.parse() without modifying anything.
|
||||
|
||||
Don't wrap the JSON in three ticks json!!!
|
||||
`,
|
||||
messageHistory: [],
|
||||
userMessage: contextString,
|
||||
});
|
||||
|
||||
console.log(result.message);
|
||||
const resultObject: IDescriptionInterface = JSON.parse(
|
||||
result.message.replace('```json', '').replace('```', ''),
|
||||
);
|
||||
|
||||
// Create a standard ProjectContext instance for file operations
|
||||
const projectContext = new ProjectContext(this.projectDir);
|
||||
const files = await projectContext.gatherFiles();
|
||||
|
||||
const npmextraJson = files.smartfilesNpmextraJSON;
|
||||
const npmextraJsonContent = JSON.parse(npmextraJson.contents.toString());
|
||||
|
||||
npmextraJsonContent.gitzone.module.description = resultObject.description;
|
||||
npmextraJsonContent.gitzone.module.keywords = resultObject.keywords;
|
||||
|
||||
npmextraJson.contents = Buffer.from(JSON.stringify(npmextraJsonContent, null, 2));
|
||||
await npmextraJson.write();
|
||||
|
||||
// do the same with packageJson
|
||||
const packageJson = files.smartfilePackageJSON;
|
||||
const packageJsonContent = JSON.parse(packageJson.contents.toString());
|
||||
packageJsonContent.description = resultObject.description;
|
||||
packageJsonContent.keywords = resultObject.keywords;
|
||||
packageJson.contents = Buffer.from(JSON.stringify(packageJsonContent, null, 2));
|
||||
await packageJson.write();
|
||||
|
||||
console.log(`\n======================\n`);
|
||||
console.log(JSON.stringify(resultObject, null, 2));
|
||||
console.log(`\n======================\n`);
|
||||
return result.message;
|
||||
}
|
||||
}
|
||||
4
ts/aidocs_classes/index.ts
Normal file
4
ts/aidocs_classes/index.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
export * from './commit.js';
|
||||
export * from './description.js';
|
||||
export * from './projectcontext.js';
|
||||
export * from './readme.js';
|
||||
127
ts/aidocs_classes/projectcontext.ts
Normal file
127
ts/aidocs_classes/projectcontext.ts
Normal file
@@ -0,0 +1,127 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
|
||||
export class ProjectContext {
|
||||
public static async fromDir(dirArg: string) {}
|
||||
|
||||
// INSTANCE
|
||||
public projectDir: string;
|
||||
private tokenCount: number = 0;
|
||||
private contextString: string = '';
|
||||
|
||||
constructor(projectDirArg: string) {
|
||||
this.projectDir = projectDirArg;
|
||||
}
|
||||
|
||||
public async gatherFiles() {
|
||||
const smartfilePackageJSON = await plugins.smartfile.SmartFile.fromFilePath(
|
||||
plugins.path.join(this.projectDir, 'package.json'),
|
||||
this.projectDir,
|
||||
);
|
||||
const smartfilesReadme = await plugins.smartfile.SmartFile.fromFilePath(
|
||||
plugins.path.join(this.projectDir, 'readme.md'),
|
||||
this.projectDir,
|
||||
);
|
||||
|
||||
const smartfilesReadmeHints = await plugins.smartfile.SmartFile.fromFilePath(
|
||||
plugins.path.join(this.projectDir, 'readme.hints.md'),
|
||||
this.projectDir,
|
||||
);
|
||||
const smartfilesNpmextraJSON = await plugins.smartfile.SmartFile.fromFilePath(
|
||||
plugins.path.join(this.projectDir, 'npmextra.json'),
|
||||
this.projectDir,
|
||||
);
|
||||
const smartfilesMod = await plugins.smartfile.fs.fileTreeToObject(
|
||||
this.projectDir,
|
||||
'ts*/**/*.ts',
|
||||
);
|
||||
const smartfilesTest = await plugins.smartfile.fs.fileTreeToObject(
|
||||
this.projectDir,
|
||||
'test/**/*.ts',
|
||||
);
|
||||
return {
|
||||
smartfilePackageJSON,
|
||||
smartfilesReadme,
|
||||
smartfilesReadmeHints,
|
||||
smartfilesNpmextraJSON,
|
||||
smartfilesMod,
|
||||
smartfilesTest,
|
||||
};
|
||||
}
|
||||
|
||||
public async convertFilesToContext(filesArg: plugins.smartfile.SmartFile[]) {
|
||||
filesArg.map((fileArg) => {
|
||||
// console.log(` -> ${fileArg.relative}`);
|
||||
});
|
||||
return filesArg
|
||||
.map((smartfile) => {
|
||||
return `
|
||||
====== START OF FILE ${smartfile.relative} ======
|
||||
|
||||
${smartfile.contents.toString()}
|
||||
|
||||
====== END OF FILE ${smartfile.relative} ======
|
||||
`;
|
||||
})
|
||||
.join('\n');
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the token count for a string using the GPT tokenizer
|
||||
* @param text The text to count tokens for
|
||||
* @param model The model to use for token counting (default: gpt-3.5-turbo)
|
||||
* @returns The number of tokens in the text
|
||||
*/
|
||||
public countTokens(text: string, model: string = 'gpt-3.5-turbo'): number {
|
||||
try {
|
||||
// Use the gpt-tokenizer library to count tokens
|
||||
const tokens = plugins.gptTokenizer.encode(text);
|
||||
return tokens.length;
|
||||
} catch (error) {
|
||||
console.error('Error counting tokens:', error);
|
||||
// Provide a rough estimate (4 chars per token) if tokenization fails
|
||||
return Math.ceil(text.length / 4);
|
||||
}
|
||||
}
|
||||
|
||||
private async buildContext(dirArg: string) {
|
||||
const files = await this.gatherFiles();
|
||||
let context = await this.convertFilesToContext([
|
||||
files.smartfilePackageJSON,
|
||||
files.smartfilesReadme,
|
||||
files.smartfilesReadmeHints,
|
||||
files.smartfilesNpmextraJSON,
|
||||
...files.smartfilesMod,
|
||||
...files.smartfilesTest,
|
||||
]);
|
||||
// Count tokens in the context
|
||||
this.contextString = context;
|
||||
this.tokenCount = this.countTokens(context);
|
||||
|
||||
// console.log(context);
|
||||
return context;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the token count for the current context
|
||||
* @returns The number of tokens in the context
|
||||
*/
|
||||
public getTokenCount(): number {
|
||||
return this.tokenCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get both the context string and its token count
|
||||
* @returns An object containing the context string and token count
|
||||
*/
|
||||
public getContextWithTokenCount(): { context: string; tokenCount: number } {
|
||||
return {
|
||||
context: this.contextString,
|
||||
tokenCount: this.tokenCount
|
||||
};
|
||||
}
|
||||
|
||||
public async update() {
|
||||
const result = await this.buildContext(this.projectDir);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
152
ts/aidocs_classes/readme.ts
Normal file
152
ts/aidocs_classes/readme.ts
Normal file
@@ -0,0 +1,152 @@
|
||||
import type { AiDoc } from '../classes.aidoc.js';
|
||||
import * as plugins from '../plugins.js';
|
||||
import * as paths from '../paths.js';
|
||||
import { ProjectContext } from './projectcontext.js';
|
||||
import { logger } from '../logging.js';
|
||||
|
||||
export class Readme {
|
||||
// INSTANCE
|
||||
private aiDocsRef: AiDoc;
|
||||
private projectDir: string;
|
||||
|
||||
constructor(aiDocsRef: AiDoc, projectDirArg: string) {
|
||||
this.aiDocsRef = aiDocsRef;
|
||||
this.projectDir = projectDirArg;
|
||||
}
|
||||
|
||||
public async build() {
|
||||
let finalReadmeString = ``;
|
||||
|
||||
// Use the new TaskContextFactory for optimized context
|
||||
const taskContextFactory = new (await import('../context/index.js')).TaskContextFactory(this.projectDir);
|
||||
await taskContextFactory.initialize();
|
||||
|
||||
// Generate context specifically for readme task
|
||||
const contextResult = await taskContextFactory.createContextForReadme();
|
||||
const contextString = contextResult.context;
|
||||
|
||||
// Log token usage statistics
|
||||
console.log(`Token usage - Context: ${contextResult.tokenCount}, Files: ${contextResult.includedFiles.length + contextResult.trimmedFiles.length}, Savings: ${contextResult.tokenSavings}`);
|
||||
|
||||
// lets first check legal before introducung any cost
|
||||
const projectContext = new ProjectContext(this.projectDir);
|
||||
const npmExtraJson = JSON.parse(
|
||||
(await projectContext.gatherFiles()).smartfilesNpmextraJSON.contents.toString()
|
||||
);
|
||||
const legalInfo = npmExtraJson?.tsdoc?.legal;
|
||||
if (!legalInfo) {
|
||||
const error = new Error(`No legal information found in npmextra.json`);
|
||||
console.log(error);
|
||||
}
|
||||
|
||||
let result = await this.aiDocsRef.openaiInstance.chat({
|
||||
systemMessage: `
|
||||
You create markdown readmes for npm projects. You only output the markdown readme.
|
||||
|
||||
The Readme should follow the following template:
|
||||
|
||||
# Project Name
|
||||
[
|
||||
The name is the module name of package.json
|
||||
The description is in the description field of package.json
|
||||
]
|
||||
|
||||
## Install
|
||||
[
|
||||
Write a short text on how to install the project
|
||||
]
|
||||
|
||||
## Usage
|
||||
[
|
||||
Give code examples here.
|
||||
Construct sensible scenarios for the user.
|
||||
Make sure to show a complete set of features of the module.
|
||||
Don't omit use cases.
|
||||
It does not matter how much time you need.
|
||||
ALWAYS USE ESM SYNTAX AND TYPESCRIPT.
|
||||
DON'T CHICKEN OUT. Write at least 4000 words. More if necessary.
|
||||
If there is already a readme, take the Usage section as base. Remove outdated content, and expand and improve upon the valid parts.
|
||||
Super important: Check for completenes.
|
||||
Don't include any licensing information. This will be added in a later step.
|
||||
Avoid "in conclusions".
|
||||
|
||||
Good to know:
|
||||
* npmextra.json contains overall module information.
|
||||
* readme.hints.md provides valuable hints about module ideas.
|
||||
]
|
||||
`,
|
||||
messageHistory: [],
|
||||
userMessage: contextString,
|
||||
});
|
||||
|
||||
finalReadmeString += result.message + '\n' + legalInfo;
|
||||
|
||||
console.log(`\n======================\n`);
|
||||
console.log(result.message);
|
||||
console.log(`\n======================\n`);
|
||||
|
||||
const readme = (await projectContext.gatherFiles()).smartfilesReadme;
|
||||
readme.contents = Buffer.from(finalReadmeString);
|
||||
await readme.write();
|
||||
|
||||
// lets care about monorepo aspects
|
||||
const tsPublishInstance = new plugins.tspublish.TsPublish();
|
||||
const subModules = await tsPublishInstance.getModuleSubDirs(paths.cwd);
|
||||
logger.log('info', `Found ${Object.keys(subModules).length} sub modules`);
|
||||
for (const subModule of Object.keys(subModules)) {
|
||||
logger.log('info', `Building readme for ${subModule}`);
|
||||
const subModuleContextString = await projectContext.update();
|
||||
let result = await this.aiDocsRef.openaiInstance.chat({
|
||||
systemMessage: `
|
||||
You create markdown readmes for npm projects. You only output the markdown readme.
|
||||
|
||||
IMPORTANT: YOU ARE NOW CREATING THE README FOR THE FOLLOWING SUB MODULE: ${subModule} !!!!!!!!!!!
|
||||
The Sub Module will be published with the following data:
|
||||
${JSON.stringify(plugins.smartfile.fs.toStringSync(plugins.path.join(paths.cwd, subModule, 'tspublish.json')), null, 2)}
|
||||
|
||||
|
||||
The Readme should follow the following template:
|
||||
|
||||
# Project Name
|
||||
[
|
||||
The name is the module name of package.json
|
||||
The description is in the description field of package.json
|
||||
]
|
||||
|
||||
## Install
|
||||
[
|
||||
Write a short text on how to install the project
|
||||
]
|
||||
|
||||
## Usage
|
||||
[
|
||||
Give code examples here.
|
||||
Construct sensible scenarios for the user.
|
||||
Make sure to show a complete set of features of the module.
|
||||
Don't omit use cases.
|
||||
It does not matter how much time you need.
|
||||
ALWAYS USE ESM SYNTAX AND TYPESCRIPT.
|
||||
DON'T CHICKEN OUT. Write at least 4000 words. More if necessary.
|
||||
If there is already a readme, take the Usage section as base. Remove outdated content, and expand and improve upon the valid parts.
|
||||
Super important: Check for completenes.
|
||||
Don't include any licensing information. This will be added in a later step.
|
||||
Avoid "in conclusions".
|
||||
|
||||
Good to know:
|
||||
* npmextra.json contains overall module information.
|
||||
* readme.hints.md provides valuable hints about module ideas.
|
||||
* Your output lands directly in the readme.md file.
|
||||
* Don't use \`\`\` at the beginning or the end. It'll cause problems. Only use it for codeblocks. You are directly writing markdown. No need to introduce it weirdly.
|
||||
]
|
||||
`,
|
||||
messageHistory: [],
|
||||
userMessage: subModuleContextString,
|
||||
});
|
||||
|
||||
const subModuleReadmeString = result.message + '\n' + legalInfo;
|
||||
await plugins.smartfile.memory.toFs(subModuleReadmeString, plugins.path.join(paths.cwd, subModule, 'readme.md'));
|
||||
logger.log('success', `Built readme for ${subModule}`);
|
||||
}
|
||||
return result.message;
|
||||
}
|
||||
}
|
||||
134
ts/classes.aidoc.ts
Normal file
134
ts/classes.aidoc.ts
Normal file
@@ -0,0 +1,134 @@
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
import * as aiDocsClasses from './aidocs_classes/index.js';
|
||||
|
||||
export class AiDoc {
|
||||
private openaiToken: string;
|
||||
|
||||
public npmextraKV: plugins.npmextra.KeyValueStore;
|
||||
public qenvInstance: plugins.qenv.Qenv;
|
||||
public aidocInteract: plugins.smartinteract.SmartInteract;
|
||||
public openaiInstance: plugins.smartai.OpenAiProvider;
|
||||
|
||||
argvArg: any;
|
||||
|
||||
constructor(argvArg?: any) {
|
||||
this.argvArg = argvArg;
|
||||
}
|
||||
|
||||
private printSanitizedToken() {
|
||||
// Check if the token length is greater than the sum of startLength and endLength
|
||||
let printToken: string;
|
||||
if (this.openaiToken.length > 6) {
|
||||
// Extract the beginning and end parts of the token
|
||||
const start = this.openaiToken.substring(0, 3);
|
||||
const end = this.openaiToken.substring(this.openaiToken.length - 3);
|
||||
printToken = `${start}...${end}`;
|
||||
} else {
|
||||
// If the token is not long enough, return it as is
|
||||
printToken = this.openaiToken;
|
||||
}
|
||||
console.log(`OpenAI Token on record: ${printToken}`);
|
||||
}
|
||||
|
||||
public async start() {
|
||||
// lets care about prerequisites
|
||||
this.aidocInteract = new plugins.smartinteract.SmartInteract();
|
||||
this.qenvInstance = new plugins.qenv.Qenv();
|
||||
if (!(await this.qenvInstance.getEnvVarOnDemand('OPENAI_TOKEN'))) {
|
||||
this.npmextraKV = new plugins.npmextra.KeyValueStore({
|
||||
typeArg: 'userHomeDir',
|
||||
identityArg: 'tsdoc',
|
||||
mandatoryKeys: ['OPENAI_TOKEN'],
|
||||
});
|
||||
|
||||
const missingKeys = await this.npmextraKV.getMissingMandatoryKeys();
|
||||
if (missingKeys.length > 0) {
|
||||
// lets try argv
|
||||
if (this.argvArg?.OPENAI_TOKEN) {
|
||||
this.openaiToken = this.argvArg.OPENAI_TOKEN;
|
||||
} else {
|
||||
// lets try smartinteract
|
||||
// wait for a second until OpenAI fixes punycode problem...
|
||||
await plugins.smartdelay.delayFor(1000);
|
||||
const answerObject = await this.aidocInteract.askQuestion({
|
||||
type: 'input',
|
||||
message: `Please provide your OpenAI token. This will be persisted in your home directory.`,
|
||||
name: 'OPENAI_TOKEN',
|
||||
default: '',
|
||||
});
|
||||
this.openaiToken = answerObject.value;
|
||||
}
|
||||
|
||||
this.printSanitizedToken();
|
||||
await this.npmextraKV.writeKey('OPENAI_TOKEN', this.openaiToken);
|
||||
}
|
||||
}
|
||||
if (!this.openaiToken) {
|
||||
this.openaiToken = await this.npmextraKV.readKey('OPENAI_TOKEN');
|
||||
}
|
||||
|
||||
// lets assume we have an OPENAI_Token now
|
||||
this.openaiInstance = new plugins.smartai.OpenAiProvider({
|
||||
openaiToken: this.openaiToken,
|
||||
});
|
||||
await this.openaiInstance.start();
|
||||
}
|
||||
|
||||
public async stop() {
|
||||
await this.openaiInstance.stop();
|
||||
}
|
||||
|
||||
public async buildReadme(projectDirArg: string) {
|
||||
const readmeInstance = new aiDocsClasses.Readme(this, projectDirArg);
|
||||
return await readmeInstance.build();
|
||||
}
|
||||
|
||||
public async buildDescription(projectDirArg: string) {
|
||||
const descriptionInstance = new aiDocsClasses.Description(this, projectDirArg);
|
||||
return await descriptionInstance.build();
|
||||
}
|
||||
|
||||
public async buildNextCommitObject(projectDirArg: string) {
|
||||
const commitInstance = new aiDocsClasses.Commit(this, projectDirArg);
|
||||
return await commitInstance.buildNextCommitObject();
|
||||
}
|
||||
|
||||
public async getProjectContext(projectDirArg: string) {
|
||||
const projectContextInstance = new aiDocsClasses.ProjectContext(projectDirArg);
|
||||
return await projectContextInstance.gatherFiles();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the context with token count information
|
||||
* @param projectDirArg The path to the project directory
|
||||
* @returns An object containing the context string and its token count
|
||||
*/
|
||||
public async getProjectContextWithTokenCount(projectDirArg: string) {
|
||||
const projectContextInstance = new aiDocsClasses.ProjectContext(projectDirArg);
|
||||
await projectContextInstance.update();
|
||||
return projectContextInstance.getContextWithTokenCount();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get just the token count for a project's context
|
||||
* @param projectDirArg The path to the project directory
|
||||
* @returns The number of tokens in the project context
|
||||
*/
|
||||
public async getProjectContextTokenCount(projectDirArg: string) {
|
||||
const projectContextInstance = new aiDocsClasses.ProjectContext(projectDirArg);
|
||||
await projectContextInstance.update();
|
||||
return projectContextInstance.getTokenCount();
|
||||
}
|
||||
|
||||
/**
|
||||
* Count tokens in a text string using GPT tokenizer
|
||||
* @param text The text to count tokens for
|
||||
* @param model The model to use for tokenization (default: gpt-3.5-turbo)
|
||||
* @returns The number of tokens in the text
|
||||
*/
|
||||
public countTokens(text: string, model: string = 'gpt-3.5-turbo'): number {
|
||||
const projectContextInstance = new aiDocsClasses.ProjectContext('');
|
||||
return projectContextInstance.countTokens(text, model);
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import * as plugins from './tsdoc.plugins';
|
||||
import * as paths from './tsdoc.paths';
|
||||
import * as plugins from './plugins.js';
|
||||
import * as paths from './paths.js';
|
||||
|
||||
export class TypeDoc {
|
||||
public smartshellInstance = new plugins.smartshell.Smartshell({
|
||||
@@ -9,10 +9,7 @@ export class TypeDoc {
|
||||
|
||||
// Static
|
||||
public static async isTypeDocDir(dirPathArg: string): Promise<boolean> {
|
||||
const result = await plugins.smartfile.fs.fileExists(
|
||||
plugins.path.join(dirPathArg, 'mkdocs.yml')
|
||||
);
|
||||
return !result;
|
||||
return true;
|
||||
}
|
||||
|
||||
// Instance
|
||||
@@ -24,19 +21,28 @@ export class TypeDoc {
|
||||
public async compile(options?: { publicSubdir?: string }) {
|
||||
const data = {
|
||||
compilerOptions: {
|
||||
target: 'es2017',
|
||||
module: 'commonjs',
|
||||
esModuleInterop: true,
|
||||
experimentalDecorators: true,
|
||||
useDefineForClassFields: false,
|
||||
target: 'ES2022',
|
||||
module: 'NodeNext',
|
||||
moduleResolution: 'NodeNext',
|
||||
esModuleInterop: true,
|
||||
verbatimModuleSyntax: true,
|
||||
skipLibCheck: true,
|
||||
},
|
||||
include: [],
|
||||
};
|
||||
let startDirectory = '';
|
||||
if (plugins.smartfile.fs.isDirectory(plugins.path.join(paths.cwd, './ts'))) {
|
||||
data.include.push(plugins.path.join(paths.cwd, './ts/**/*'));
|
||||
startDirectory = 'ts';
|
||||
}
|
||||
|
||||
if (plugins.smartfile.fs.isDirectory(plugins.path.join(paths.cwd, './ts_web'))) {
|
||||
data.include.push(plugins.path.join(paths.cwd, './ts_web/**/*'));
|
||||
if (!startDirectory) {
|
||||
startDirectory = 'ts_web';
|
||||
}
|
||||
}
|
||||
|
||||
await plugins.smartfile.memory.toFs(JSON.stringify(data), paths.tsconfigFile);
|
||||
@@ -45,7 +51,7 @@ export class TypeDoc {
|
||||
targetDir = plugins.path.join(targetDir, options.publicSubdir);
|
||||
}
|
||||
await this.smartshellInstance.exec(
|
||||
`typedoc --tsconfig ${paths.tsconfigFile} --out ${targetDir} ts/index.ts`
|
||||
`typedoc --tsconfig ${paths.tsconfigFile} --out ${targetDir} ${startDirectory}/index.ts`,
|
||||
);
|
||||
plugins.smartfile.fs.remove(paths.tsconfigFile);
|
||||
}
|
||||
177
ts/cli.ts
Normal file
177
ts/cli.ts
Normal file
@@ -0,0 +1,177 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import * as paths from './paths.js';
|
||||
import { logger } from './logging.js';
|
||||
|
||||
import { TypeDoc } from './classes.typedoc.js';
|
||||
import { AiDoc } from './classes.aidoc.js';
|
||||
import * as context from './context/index.js';
|
||||
|
||||
export const run = async () => {
|
||||
const tsdocCli = new plugins.smartcli.Smartcli();
|
||||
|
||||
tsdocCli.standardCommand().subscribe(async (argvArg) => {
|
||||
logger.log('warn', `Auto detecting environment!`);
|
||||
switch (true) {
|
||||
case await TypeDoc.isTypeDocDir(paths.cwd):
|
||||
logger.log('ok', `Detected TypeDoc compliant directory at ${paths.cwd}`);
|
||||
tsdocCli.triggerCommand('typedoc', argvArg);
|
||||
break;
|
||||
default:
|
||||
logger.log('error', `Cannot determine docs format at ${paths.cwd}`);
|
||||
}
|
||||
});
|
||||
|
||||
tsdocCli.addCommand('typedoc').subscribe(async (argvArg) => {
|
||||
const typeDocInstance = new TypeDoc(paths.cwd);
|
||||
await typeDocInstance.compile({
|
||||
publicSubdir: argvArg.publicSubdir,
|
||||
});
|
||||
});
|
||||
|
||||
tsdocCli.addCommand('aidoc').subscribe(async (argvArg) => {
|
||||
const aidocInstance = new AiDoc();
|
||||
await aidocInstance.start();
|
||||
|
||||
// Get context token count if requested
|
||||
if (argvArg.tokens || argvArg.showTokens) {
|
||||
logger.log('info', `Calculating context token count...`);
|
||||
const tokenCount = await aidocInstance.getProjectContextTokenCount(paths.cwd);
|
||||
logger.log('ok', `Total context token count: ${tokenCount}`);
|
||||
|
||||
if (argvArg.tokensOnly) {
|
||||
return; // Exit early if we only want token count
|
||||
}
|
||||
}
|
||||
|
||||
logger.log('info', `Generating new readme...`);
|
||||
logger.log('info', `This may take some time...`);
|
||||
await aidocInstance.buildReadme(paths.cwd);
|
||||
logger.log('info', `Generating new keywords...`);
|
||||
logger.log('info', `This may take some time...`);
|
||||
await aidocInstance.buildDescription(paths.cwd);
|
||||
});
|
||||
|
||||
tsdocCli.addCommand('tokens').subscribe(async (argvArg) => {
|
||||
const aidocInstance = new AiDoc();
|
||||
await aidocInstance.start();
|
||||
|
||||
logger.log('info', `Calculating context token count...`);
|
||||
|
||||
// Determine context mode based on args
|
||||
let contextMode: context.ContextMode = 'full';
|
||||
if (argvArg.trim || argvArg.trimmed) {
|
||||
contextMode = 'trimmed';
|
||||
} else if (argvArg.summarize || argvArg.summarized) {
|
||||
contextMode = 'summarized';
|
||||
}
|
||||
|
||||
// Get task type if specified
|
||||
let taskType: context.TaskType | undefined = undefined;
|
||||
if (argvArg.task) {
|
||||
if (['readme', 'commit', 'description'].includes(argvArg.task)) {
|
||||
taskType = argvArg.task as context.TaskType;
|
||||
} else {
|
||||
logger.log('warn', `Unknown task type: ${argvArg.task}. Using default context.`);
|
||||
}
|
||||
}
|
||||
|
||||
// Use enhanced context
|
||||
const taskFactory = new context.TaskContextFactory(paths.cwd);
|
||||
await taskFactory.initialize();
|
||||
|
||||
let contextResult: context.IContextResult;
|
||||
|
||||
if (argvArg.all) {
|
||||
// Show stats for all task types
|
||||
const stats = await taskFactory.getTokenStats();
|
||||
|
||||
logger.log('ok', 'Token statistics by task:');
|
||||
for (const [task, data] of Object.entries(stats)) {
|
||||
logger.log('info', `\n${task.toUpperCase()}:`);
|
||||
logger.log('info', ` Tokens: ${data.tokenCount}`);
|
||||
logger.log('info', ` Token savings: ${data.savings}`);
|
||||
logger.log('info', ` Files: ${data.includedFiles} included, ${data.trimmedFiles} trimmed, ${data.excludedFiles} excluded`);
|
||||
|
||||
// Calculate percentage of model context
|
||||
const o4MiniPercentage = (data.tokenCount / 200000 * 100).toFixed(2);
|
||||
logger.log('info', ` Context usage: ${o4MiniPercentage}% of o4-mini (200K tokens)`);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (taskType) {
|
||||
// Get context for specific task
|
||||
contextResult = await taskFactory.createContextForTask(taskType);
|
||||
} else {
|
||||
// Get generic context with specified mode
|
||||
const enhancedContext = new context.EnhancedContext(paths.cwd);
|
||||
await enhancedContext.initialize();
|
||||
enhancedContext.setContextMode(contextMode);
|
||||
|
||||
if (argvArg.maxTokens) {
|
||||
enhancedContext.setTokenBudget(parseInt(argvArg.maxTokens, 10));
|
||||
}
|
||||
|
||||
contextResult = await enhancedContext.buildContext();
|
||||
}
|
||||
|
||||
// Display results
|
||||
logger.log('ok', `Total context token count: ${contextResult.tokenCount}`);
|
||||
logger.log('info', `Files included: ${contextResult.includedFiles.length}`);
|
||||
logger.log('info', `Files trimmed: ${contextResult.trimmedFiles.length}`);
|
||||
logger.log('info', `Files excluded: ${contextResult.excludedFiles.length}`);
|
||||
logger.log('info', `Token savings: ${contextResult.tokenSavings}`);
|
||||
|
||||
if (argvArg.detailed) {
|
||||
// Show more detailed info about the context and token usage
|
||||
const o4MiniPercentage = (contextResult.tokenCount / 200000 * 100).toFixed(2);
|
||||
logger.log('info', `Token usage: ${o4MiniPercentage}% of o4-mini 200K token context window`);
|
||||
|
||||
if (argvArg.model) {
|
||||
// Show percentages for different models
|
||||
if (argvArg.model === 'gpt4') {
|
||||
const gpt4Percentage = (contextResult.tokenCount / 8192 * 100).toFixed(2);
|
||||
logger.log('info', `Token usage (GPT-4): ${gpt4Percentage}% of 8192 token context window`);
|
||||
} else if (argvArg.model === 'gpt35') {
|
||||
const gpt35Percentage = (contextResult.tokenCount / 4096 * 100).toFixed(2);
|
||||
logger.log('info', `Token usage (GPT-3.5): ${gpt35Percentage}% of 4096 token context window`);
|
||||
}
|
||||
}
|
||||
|
||||
// Estimate cost (approximate values)
|
||||
const o4MiniInputCost = 0.00005; // per 1K tokens for o4-mini
|
||||
const estimatedCost = (contextResult.tokenCount / 1000 * o4MiniInputCost).toFixed(6);
|
||||
logger.log('info', `Estimated input cost: $${estimatedCost} (o4-mini)`);
|
||||
|
||||
if (argvArg.listFiles) {
|
||||
// List files included in context
|
||||
logger.log('info', '\nIncluded files:');
|
||||
contextResult.includedFiles.forEach(file => {
|
||||
logger.log('info', ` ${file.relativePath} (${file.tokenCount} tokens)`);
|
||||
});
|
||||
|
||||
logger.log('info', '\nTrimmed files:');
|
||||
contextResult.trimmedFiles.forEach(file => {
|
||||
logger.log('info', ` ${file.relativePath} (${file.tokenCount} tokens)`);
|
||||
});
|
||||
|
||||
if (contextResult.excludedFiles.length > 0) {
|
||||
logger.log('info', '\nExcluded files:');
|
||||
contextResult.excludedFiles.forEach(file => {
|
||||
logger.log('info', ` ${file.relativePath} (${file.tokenCount} tokens)`);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
tsdocCli.addCommand('test').subscribe((argvArg) => {
|
||||
tsdocCli.triggerCommand('typedoc', argvArg);
|
||||
process.on('exit', async () => {
|
||||
await plugins.smartfile.fs.remove(paths.publicDir);
|
||||
});
|
||||
});
|
||||
|
||||
tsdocCli.startParse();
|
||||
};
|
||||
341
ts/context/config-manager.ts
Normal file
341
ts/context/config-manager.ts
Normal file
@@ -0,0 +1,341 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
import * as fs from 'fs';
|
||||
import type {
|
||||
IContextConfig,
|
||||
ITrimConfig,
|
||||
ITaskConfig,
|
||||
TaskType,
|
||||
ContextMode,
|
||||
ICacheConfig,
|
||||
IAnalyzerConfig,
|
||||
IPrioritizationWeights,
|
||||
ITierConfig
|
||||
} from './types.js';
|
||||
|
||||
/**
|
||||
* Manages configuration for context building
|
||||
*/
|
||||
export class ConfigManager {
|
||||
private static instance: ConfigManager;
|
||||
private config: IContextConfig;
|
||||
private projectDir: string = '';
|
||||
private configCache: { mtime: number; config: IContextConfig } | null = null;
|
||||
|
||||
/**
|
||||
* Get the singleton instance of ConfigManager
|
||||
*/
|
||||
public static getInstance(): ConfigManager {
|
||||
if (!ConfigManager.instance) {
|
||||
ConfigManager.instance = new ConfigManager();
|
||||
}
|
||||
return ConfigManager.instance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Private constructor for singleton pattern
|
||||
*/
|
||||
private constructor() {
|
||||
this.config = this.getDefaultConfig();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the config manager with a project directory
|
||||
* @param projectDir The project directory
|
||||
*/
|
||||
public async initialize(projectDir: string): Promise<void> {
|
||||
this.projectDir = projectDir;
|
||||
await this.loadConfig();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the default configuration
|
||||
*/
|
||||
private getDefaultConfig(): IContextConfig {
|
||||
return {
|
||||
maxTokens: 190000, // Default for o4-mini with some buffer
|
||||
defaultMode: 'trimmed',
|
||||
taskSpecificSettings: {
|
||||
readme: {
|
||||
mode: 'trimmed',
|
||||
includePaths: ['ts/', 'src/'],
|
||||
excludePaths: ['test/', 'node_modules/']
|
||||
},
|
||||
commit: {
|
||||
mode: 'trimmed',
|
||||
focusOnChangedFiles: true
|
||||
},
|
||||
description: {
|
||||
mode: 'trimmed',
|
||||
includePackageInfo: true
|
||||
}
|
||||
},
|
||||
trimming: {
|
||||
removeImplementations: true,
|
||||
preserveInterfaces: true,
|
||||
preserveTypeDefs: true,
|
||||
preserveJSDoc: true,
|
||||
maxFunctionLines: 5,
|
||||
removeComments: true,
|
||||
removeBlankLines: true
|
||||
},
|
||||
cache: {
|
||||
enabled: true,
|
||||
ttl: 3600, // 1 hour
|
||||
maxSize: 100, // 100MB
|
||||
directory: undefined // Will be set to .nogit/context-cache by ContextCache
|
||||
},
|
||||
analyzer: {
|
||||
enabled: true,
|
||||
useAIRefinement: false, // Disabled by default for now
|
||||
aiModel: 'haiku'
|
||||
},
|
||||
prioritization: {
|
||||
dependencyWeight: 0.3,
|
||||
relevanceWeight: 0.4,
|
||||
efficiencyWeight: 0.2,
|
||||
recencyWeight: 0.1
|
||||
},
|
||||
tiers: {
|
||||
essential: { minScore: 0.8, trimLevel: 'none' },
|
||||
important: { minScore: 0.5, trimLevel: 'light' },
|
||||
optional: { minScore: 0.2, trimLevel: 'aggressive' }
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Load configuration from npmextra.json
|
||||
*/
|
||||
private async loadConfig(): Promise<void> {
|
||||
try {
|
||||
if (!this.projectDir) {
|
||||
return;
|
||||
}
|
||||
|
||||
const npmextraJsonPath = plugins.path.join(this.projectDir, 'npmextra.json');
|
||||
|
||||
// Check if file exists
|
||||
const fileExists = await plugins.smartfile.fs.fileExists(npmextraJsonPath);
|
||||
if (!fileExists) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Check cache
|
||||
const stats = await fs.promises.stat(npmextraJsonPath);
|
||||
const currentMtime = Math.floor(stats.mtimeMs);
|
||||
|
||||
if (this.configCache && this.configCache.mtime === currentMtime) {
|
||||
// Use cached config
|
||||
this.config = this.configCache.config;
|
||||
return;
|
||||
}
|
||||
|
||||
// Read the npmextra.json file
|
||||
const npmextraJsonFile = await plugins.smartfile.SmartFile.fromFilePath(npmextraJsonPath);
|
||||
const npmextraContent = JSON.parse(npmextraJsonFile.contents.toString());
|
||||
|
||||
// Check for tsdoc context configuration
|
||||
if (npmextraContent?.tsdoc?.context) {
|
||||
// Merge with default config
|
||||
this.config = this.mergeConfigs(this.config, npmextraContent.tsdoc.context);
|
||||
}
|
||||
|
||||
// Cache the config
|
||||
this.configCache = {
|
||||
mtime: currentMtime,
|
||||
config: { ...this.config }
|
||||
};
|
||||
} catch (error) {
|
||||
console.error('Error loading context configuration:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Merge configurations, with userConfig taking precedence
|
||||
* @param defaultConfig The default configuration
|
||||
* @param userConfig The user configuration
|
||||
*/
|
||||
private mergeConfigs(defaultConfig: IContextConfig, userConfig: Partial<IContextConfig>): IContextConfig {
|
||||
const result: IContextConfig = { ...defaultConfig };
|
||||
|
||||
// Merge top-level properties
|
||||
if (userConfig.maxTokens !== undefined) result.maxTokens = userConfig.maxTokens;
|
||||
if (userConfig.defaultMode !== undefined) result.defaultMode = userConfig.defaultMode;
|
||||
|
||||
// Merge task-specific settings
|
||||
if (userConfig.taskSpecificSettings) {
|
||||
result.taskSpecificSettings = result.taskSpecificSettings || {};
|
||||
|
||||
// For each task type, merge settings
|
||||
(['readme', 'commit', 'description'] as TaskType[]).forEach(taskType => {
|
||||
if (userConfig.taskSpecificSettings?.[taskType]) {
|
||||
result.taskSpecificSettings![taskType] = {
|
||||
...result.taskSpecificSettings![taskType],
|
||||
...userConfig.taskSpecificSettings[taskType]
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Merge trimming configuration
|
||||
if (userConfig.trimming) {
|
||||
result.trimming = {
|
||||
...result.trimming,
|
||||
...userConfig.trimming
|
||||
};
|
||||
}
|
||||
|
||||
// Merge cache configuration
|
||||
if (userConfig.cache) {
|
||||
result.cache = {
|
||||
...result.cache,
|
||||
...userConfig.cache
|
||||
};
|
||||
}
|
||||
|
||||
// Merge analyzer configuration
|
||||
if (userConfig.analyzer) {
|
||||
result.analyzer = {
|
||||
...result.analyzer,
|
||||
...userConfig.analyzer
|
||||
};
|
||||
}
|
||||
|
||||
// Merge prioritization weights
|
||||
if (userConfig.prioritization) {
|
||||
result.prioritization = {
|
||||
...result.prioritization,
|
||||
...userConfig.prioritization
|
||||
};
|
||||
}
|
||||
|
||||
// Merge tier configuration
|
||||
if (userConfig.tiers) {
|
||||
result.tiers = {
|
||||
...result.tiers,
|
||||
...userConfig.tiers
|
||||
};
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the complete configuration
|
||||
*/
|
||||
public getConfig(): IContextConfig {
|
||||
return this.config;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the trimming configuration
|
||||
*/
|
||||
public getTrimConfig(): ITrimConfig {
|
||||
return this.config.trimming || {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get configuration for a specific task
|
||||
* @param taskType The type of task
|
||||
*/
|
||||
public getTaskConfig(taskType: TaskType): ITaskConfig {
|
||||
// Get task-specific config or empty object
|
||||
const taskConfig = this.config.taskSpecificSettings?.[taskType] || {};
|
||||
|
||||
// If mode is not specified, use default mode
|
||||
if (!taskConfig.mode) {
|
||||
taskConfig.mode = this.config.defaultMode;
|
||||
}
|
||||
|
||||
return taskConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the maximum tokens allowed for context
|
||||
*/
|
||||
public getMaxTokens(): number {
|
||||
return this.config.maxTokens || 190000;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the configuration
|
||||
* @param config The new configuration
|
||||
*/
|
||||
public async updateConfig(config: Partial<IContextConfig>): Promise<void> {
|
||||
// Merge with existing config
|
||||
this.config = this.mergeConfigs(this.config, config);
|
||||
|
||||
// Invalidate cache
|
||||
this.configCache = null;
|
||||
|
||||
try {
|
||||
if (!this.projectDir) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Read the existing npmextra.json file
|
||||
const npmextraJsonPath = plugins.path.join(this.projectDir, 'npmextra.json');
|
||||
let npmextraContent = {};
|
||||
|
||||
if (await plugins.smartfile.fs.fileExists(npmextraJsonPath)) {
|
||||
const npmextraJsonFile = await plugins.smartfile.SmartFile.fromFilePath(npmextraJsonPath);
|
||||
npmextraContent = JSON.parse(npmextraJsonFile.contents.toString()) || {};
|
||||
}
|
||||
|
||||
// Update the tsdoc context configuration
|
||||
const typedContent = npmextraContent as any;
|
||||
if (!typedContent.tsdoc) typedContent.tsdoc = {};
|
||||
typedContent.tsdoc.context = this.config;
|
||||
|
||||
// Write back to npmextra.json
|
||||
const updatedContent = JSON.stringify(npmextraContent, null, 2);
|
||||
await plugins.smartfile.memory.toFs(updatedContent, npmextraJsonPath);
|
||||
} catch (error) {
|
||||
console.error('Error updating context configuration:', error);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cache configuration
|
||||
*/
|
||||
public getCacheConfig(): ICacheConfig {
|
||||
return this.config.cache || { enabled: true, ttl: 3600, maxSize: 100 };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get analyzer configuration
|
||||
*/
|
||||
public getAnalyzerConfig(): IAnalyzerConfig {
|
||||
return this.config.analyzer || { enabled: true, useAIRefinement: false, aiModel: 'haiku' };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get prioritization weights
|
||||
*/
|
||||
public getPrioritizationWeights(): IPrioritizationWeights {
|
||||
return this.config.prioritization || {
|
||||
dependencyWeight: 0.3,
|
||||
relevanceWeight: 0.4,
|
||||
efficiencyWeight: 0.2,
|
||||
recencyWeight: 0.1
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get tier configuration
|
||||
*/
|
||||
public getTierConfig(): ITierConfig {
|
||||
return this.config.tiers || {
|
||||
essential: { minScore: 0.8, trimLevel: 'none' },
|
||||
important: { minScore: 0.5, trimLevel: 'light' },
|
||||
optional: { minScore: 0.2, trimLevel: 'aggressive' }
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear the config cache (force reload on next access)
|
||||
*/
|
||||
public clearCache(): void {
|
||||
this.configCache = null;
|
||||
}
|
||||
}
|
||||
391
ts/context/context-analyzer.ts
Normal file
391
ts/context/context-analyzer.ts
Normal file
@@ -0,0 +1,391 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
import type {
|
||||
IFileMetadata,
|
||||
IFileDependencies,
|
||||
IFileAnalysis,
|
||||
IAnalysisResult,
|
||||
TaskType,
|
||||
IPrioritizationWeights,
|
||||
ITierConfig,
|
||||
} from './types.js';
|
||||
|
||||
/**
|
||||
* ContextAnalyzer provides intelligent file selection and prioritization
|
||||
* based on dependency analysis, task relevance, and configurable weights
|
||||
*/
|
||||
export class ContextAnalyzer {
|
||||
private projectRoot: string;
|
||||
private weights: Required<IPrioritizationWeights>;
|
||||
private tiers: Required<ITierConfig>;
|
||||
|
||||
/**
|
||||
* Creates a new ContextAnalyzer
|
||||
* @param projectRoot - Root directory of the project
|
||||
* @param weights - Prioritization weights
|
||||
* @param tiers - Tier configuration
|
||||
*/
|
||||
constructor(
|
||||
projectRoot: string,
|
||||
weights: Partial<IPrioritizationWeights> = {},
|
||||
tiers: Partial<ITierConfig> = {}
|
||||
) {
|
||||
this.projectRoot = projectRoot;
|
||||
|
||||
// Default weights
|
||||
this.weights = {
|
||||
dependencyWeight: weights.dependencyWeight ?? 0.3,
|
||||
relevanceWeight: weights.relevanceWeight ?? 0.4,
|
||||
efficiencyWeight: weights.efficiencyWeight ?? 0.2,
|
||||
recencyWeight: weights.recencyWeight ?? 0.1,
|
||||
};
|
||||
|
||||
// Default tiers
|
||||
this.tiers = {
|
||||
essential: tiers.essential ?? { minScore: 0.8, trimLevel: 'none' },
|
||||
important: tiers.important ?? { minScore: 0.5, trimLevel: 'light' },
|
||||
optional: tiers.optional ?? { minScore: 0.2, trimLevel: 'aggressive' },
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes files for a specific task type
|
||||
* @param metadata - Array of file metadata to analyze
|
||||
* @param taskType - Type of task being performed
|
||||
* @param changedFiles - Optional list of recently changed files (for commits)
|
||||
* @returns Analysis result with scored files
|
||||
*/
|
||||
public async analyze(
|
||||
metadata: IFileMetadata[],
|
||||
taskType: TaskType,
|
||||
changedFiles: string[] = []
|
||||
): Promise<IAnalysisResult> {
|
||||
const startTime = Date.now();
|
||||
|
||||
// Build dependency graph
|
||||
const dependencyGraph = await this.buildDependencyGraph(metadata);
|
||||
|
||||
// Calculate centrality scores
|
||||
this.calculateCentrality(dependencyGraph);
|
||||
|
||||
// Analyze each file
|
||||
const files: IFileAnalysis[] = [];
|
||||
for (const meta of metadata) {
|
||||
const analysis = await this.analyzeFile(
|
||||
meta,
|
||||
taskType,
|
||||
dependencyGraph,
|
||||
changedFiles
|
||||
);
|
||||
files.push(analysis);
|
||||
}
|
||||
|
||||
// Sort by importance score (highest first)
|
||||
files.sort((a, b) => b.importanceScore - a.importanceScore);
|
||||
|
||||
const analysisDuration = Date.now() - startTime;
|
||||
|
||||
return {
|
||||
taskType,
|
||||
files,
|
||||
dependencyGraph,
|
||||
totalFiles: metadata.length,
|
||||
analysisDuration,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Builds a dependency graph from file metadata
|
||||
* @param metadata - Array of file metadata
|
||||
* @returns Dependency graph as a map
|
||||
*/
|
||||
private async buildDependencyGraph(
|
||||
metadata: IFileMetadata[]
|
||||
): Promise<Map<string, IFileDependencies>> {
|
||||
const graph = new Map<string, IFileDependencies>();
|
||||
|
||||
// Initialize graph entries
|
||||
for (const meta of metadata) {
|
||||
graph.set(meta.path, {
|
||||
path: meta.path,
|
||||
imports: [],
|
||||
importedBy: [],
|
||||
centrality: 0,
|
||||
});
|
||||
}
|
||||
|
||||
// Parse imports from each file
|
||||
for (const meta of metadata) {
|
||||
try {
|
||||
const contents = await plugins.smartfile.fs.toStringSync(meta.path);
|
||||
const imports = this.extractImports(contents, meta.path);
|
||||
|
||||
const deps = graph.get(meta.path)!;
|
||||
deps.imports = imports;
|
||||
|
||||
// Update importedBy for imported files
|
||||
for (const importPath of imports) {
|
||||
const importedDeps = graph.get(importPath);
|
||||
if (importedDeps) {
|
||||
importedDeps.importedBy.push(meta.path);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(`Failed to parse imports from ${meta.path}:`, error.message);
|
||||
}
|
||||
}
|
||||
|
||||
return graph;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts import statements from file contents
|
||||
* @param contents - File contents
|
||||
* @param filePath - Path of the file being analyzed
|
||||
* @returns Array of absolute paths to imported files
|
||||
*/
|
||||
private extractImports(contents: string, filePath: string): string[] {
|
||||
const imports: string[] = [];
|
||||
const fileDir = plugins.path.dirname(filePath);
|
||||
|
||||
// Match various import patterns
|
||||
const importRegex = /(?:import|export).*?from\s+['"](.+?)['"]/g;
|
||||
let match;
|
||||
|
||||
while ((match = importRegex.exec(contents)) !== null) {
|
||||
const importPath = match[1];
|
||||
|
||||
// Skip external modules
|
||||
if (!importPath.startsWith('.')) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Resolve relative import to absolute path
|
||||
let resolvedPath = plugins.path.resolve(fileDir, importPath);
|
||||
|
||||
// Handle various file extensions
|
||||
const extensions = ['.ts', '.js', '.tsx', '.jsx', '/index.ts', '/index.js'];
|
||||
let found = false;
|
||||
|
||||
for (const ext of extensions) {
|
||||
const testPath = resolvedPath.endsWith(ext) ? resolvedPath : resolvedPath + ext;
|
||||
try {
|
||||
// Use synchronous file check to avoid async in this context
|
||||
const fs = require('fs');
|
||||
const exists = fs.existsSync(testPath);
|
||||
if (exists) {
|
||||
imports.push(testPath);
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
} catch (error) {
|
||||
// Continue trying other extensions
|
||||
}
|
||||
}
|
||||
|
||||
if (!found && !resolvedPath.includes('.')) {
|
||||
// Try with .ts extension as default
|
||||
imports.push(resolvedPath + '.ts');
|
||||
}
|
||||
}
|
||||
|
||||
return imports;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates centrality scores for all nodes in the dependency graph
|
||||
* Uses a simplified PageRank-like algorithm
|
||||
* @param graph - Dependency graph
|
||||
*/
|
||||
private calculateCentrality(graph: Map<string, IFileDependencies>): void {
|
||||
const damping = 0.85;
|
||||
const iterations = 10;
|
||||
const nodeCount = graph.size;
|
||||
|
||||
// Initialize scores
|
||||
const scores = new Map<string, number>();
|
||||
for (const path of graph.keys()) {
|
||||
scores.set(path, 1.0 / nodeCount);
|
||||
}
|
||||
|
||||
// Iterative calculation
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
const newScores = new Map<string, number>();
|
||||
|
||||
for (const [path, deps] of graph.entries()) {
|
||||
let score = (1 - damping) / nodeCount;
|
||||
|
||||
// Add contributions from nodes that import this file
|
||||
for (const importerPath of deps.importedBy) {
|
||||
const importerDeps = graph.get(importerPath);
|
||||
if (importerDeps) {
|
||||
const importerScore = scores.get(importerPath) ?? 0;
|
||||
const outgoingCount = importerDeps.imports.length || 1;
|
||||
score += damping * (importerScore / outgoingCount);
|
||||
}
|
||||
}
|
||||
|
||||
newScores.set(path, score);
|
||||
}
|
||||
|
||||
// Update scores
|
||||
for (const [path, score] of newScores) {
|
||||
scores.set(path, score);
|
||||
}
|
||||
}
|
||||
|
||||
// Normalize scores to 0-1 range
|
||||
const maxScore = Math.max(...scores.values());
|
||||
if (maxScore > 0) {
|
||||
for (const deps of graph.values()) {
|
||||
const score = scores.get(deps.path) ?? 0;
|
||||
deps.centrality = score / maxScore;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzes a single file
|
||||
* @param meta - File metadata
|
||||
* @param taskType - Task being performed
|
||||
* @param graph - Dependency graph
|
||||
* @param changedFiles - Recently changed files
|
||||
* @returns File analysis
|
||||
*/
|
||||
private async analyzeFile(
|
||||
meta: IFileMetadata,
|
||||
taskType: TaskType,
|
||||
graph: Map<string, IFileDependencies>,
|
||||
changedFiles: string[]
|
||||
): Promise<IFileAnalysis> {
|
||||
const deps = graph.get(meta.path);
|
||||
const centralityScore = deps?.centrality ?? 0;
|
||||
|
||||
// Calculate task-specific relevance
|
||||
const relevanceScore = this.calculateRelevance(meta, taskType);
|
||||
|
||||
// Calculate efficiency (information per token)
|
||||
const efficiencyScore = this.calculateEfficiency(meta);
|
||||
|
||||
// Calculate recency (for commit tasks)
|
||||
const recencyScore = this.calculateRecency(meta, changedFiles);
|
||||
|
||||
// Calculate combined importance score
|
||||
const importanceScore =
|
||||
relevanceScore * this.weights.relevanceWeight +
|
||||
centralityScore * this.weights.dependencyWeight +
|
||||
efficiencyScore * this.weights.efficiencyWeight +
|
||||
recencyScore * this.weights.recencyWeight;
|
||||
|
||||
// Assign tier
|
||||
const tier = this.assignTier(importanceScore);
|
||||
|
||||
return {
|
||||
path: meta.path,
|
||||
relevanceScore,
|
||||
centralityScore,
|
||||
efficiencyScore,
|
||||
recencyScore,
|
||||
importanceScore,
|
||||
tier,
|
||||
reason: this.generateReason(meta, taskType, importanceScore, tier),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates task-specific relevance score
|
||||
*/
|
||||
private calculateRelevance(meta: IFileMetadata, taskType: TaskType): number {
|
||||
const relativePath = meta.relativePath.toLowerCase();
|
||||
let score = 0.5; // Base score
|
||||
|
||||
// README generation - prioritize public APIs and main exports
|
||||
if (taskType === 'readme') {
|
||||
if (relativePath.includes('index.ts')) score += 0.3;
|
||||
if (relativePath.match(/^ts\/[^\/]+\.ts$/)) score += 0.2; // Root level exports
|
||||
if (relativePath.includes('test/')) score -= 0.3;
|
||||
if (relativePath.includes('classes/')) score += 0.1;
|
||||
if (relativePath.includes('interfaces/')) score += 0.1;
|
||||
}
|
||||
|
||||
// Commit messages - prioritize changed files and their dependencies
|
||||
if (taskType === 'commit') {
|
||||
if (relativePath.includes('test/')) score -= 0.2;
|
||||
// Recency will handle changed files
|
||||
}
|
||||
|
||||
// Description generation - prioritize main exports and core interfaces
|
||||
if (taskType === 'description') {
|
||||
if (relativePath.includes('index.ts')) score += 0.4;
|
||||
if (relativePath.match(/^ts\/[^\/]+\.ts$/)) score += 0.3;
|
||||
if (relativePath.includes('test/')) score -= 0.4;
|
||||
if (relativePath.includes('interfaces/')) score += 0.2;
|
||||
}
|
||||
|
||||
return Math.max(0, Math.min(1, score));
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates efficiency score (information density)
|
||||
*/
|
||||
private calculateEfficiency(meta: IFileMetadata): number {
|
||||
// Prefer files that are not too large (good signal-to-noise ratio)
|
||||
const optimalSize = 5000; // ~1250 tokens
|
||||
const distance = Math.abs(meta.estimatedTokens - optimalSize);
|
||||
const normalized = Math.max(0, 1 - distance / optimalSize);
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates recency score for changed files
|
||||
*/
|
||||
private calculateRecency(meta: IFileMetadata, changedFiles: string[]): number {
|
||||
if (changedFiles.length === 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Check if this file was changed
|
||||
const isChanged = changedFiles.some((changed) => changed === meta.path);
|
||||
|
||||
return isChanged ? 1.0 : 0.0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Assigns a tier based on importance score
|
||||
*/
|
||||
private assignTier(score: number): 'essential' | 'important' | 'optional' | 'excluded' {
|
||||
if (score >= this.tiers.essential.minScore) return 'essential';
|
||||
if (score >= this.tiers.important.minScore) return 'important';
|
||||
if (score >= this.tiers.optional.minScore) return 'optional';
|
||||
return 'excluded';
|
||||
}
|
||||
|
||||
/**
|
||||
* Generates a human-readable reason for the score
|
||||
*/
|
||||
private generateReason(
|
||||
meta: IFileMetadata,
|
||||
taskType: TaskType,
|
||||
score: number,
|
||||
tier: string
|
||||
): string {
|
||||
const reasons: string[] = [];
|
||||
|
||||
if (meta.relativePath.includes('index.ts')) {
|
||||
reasons.push('main export file');
|
||||
}
|
||||
|
||||
if (meta.relativePath.includes('test/')) {
|
||||
reasons.push('test file (lower priority)');
|
||||
}
|
||||
|
||||
if (taskType === 'readme' && meta.relativePath.match(/^ts\/[^\/]+\.ts$/)) {
|
||||
reasons.push('root-level module');
|
||||
}
|
||||
|
||||
reasons.push(`score: ${score.toFixed(2)}`);
|
||||
reasons.push(`tier: ${tier}`);
|
||||
|
||||
return reasons.join(', ');
|
||||
}
|
||||
}
|
||||
285
ts/context/context-cache.ts
Normal file
285
ts/context/context-cache.ts
Normal file
@@ -0,0 +1,285 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
import * as fs from 'fs';
|
||||
import type { ICacheEntry, ICacheConfig } from './types.js';
|
||||
|
||||
/**
|
||||
* ContextCache provides persistent caching of file contents and token counts
|
||||
* with automatic invalidation on file changes
|
||||
*/
|
||||
export class ContextCache {
|
||||
private cacheDir: string;
|
||||
private cache: Map<string, ICacheEntry> = new Map();
|
||||
private config: Required<ICacheConfig>;
|
||||
private cacheIndexPath: string;
|
||||
|
||||
/**
|
||||
* Creates a new ContextCache
|
||||
* @param projectRoot - Root directory of the project
|
||||
* @param config - Cache configuration
|
||||
*/
|
||||
constructor(projectRoot: string, config: Partial<ICacheConfig> = {}) {
|
||||
this.config = {
|
||||
enabled: config.enabled ?? true,
|
||||
ttl: config.ttl ?? 3600, // 1 hour default
|
||||
maxSize: config.maxSize ?? 100, // 100MB default
|
||||
directory: config.directory ?? plugins.path.join(projectRoot, '.nogit', 'context-cache'),
|
||||
};
|
||||
|
||||
this.cacheDir = this.config.directory;
|
||||
this.cacheIndexPath = plugins.path.join(this.cacheDir, 'index.json');
|
||||
}
|
||||
|
||||
/**
|
||||
* Initializes the cache by loading from disk
|
||||
*/
|
||||
public async init(): Promise<void> {
|
||||
if (!this.config.enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Ensure cache directory exists
|
||||
await plugins.smartfile.fs.ensureDir(this.cacheDir);
|
||||
|
||||
// Load cache index if it exists
|
||||
try {
|
||||
const indexExists = await plugins.smartfile.fs.fileExists(this.cacheIndexPath);
|
||||
if (indexExists) {
|
||||
const indexContent = await plugins.smartfile.fs.toStringSync(this.cacheIndexPath);
|
||||
const indexData = JSON.parse(indexContent) as ICacheEntry[];
|
||||
if (Array.isArray(indexData)) {
|
||||
for (const entry of indexData) {
|
||||
this.cache.set(entry.path, entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Failed to load cache index:', error.message);
|
||||
// Start with empty cache if loading fails
|
||||
}
|
||||
|
||||
// Clean up expired and invalid entries
|
||||
await this.cleanup();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets a cached entry if it's still valid
|
||||
* @param filePath - Absolute path to the file
|
||||
* @returns Cache entry if valid, null otherwise
|
||||
*/
|
||||
public async get(filePath: string): Promise<ICacheEntry | null> {
|
||||
if (!this.config.enabled) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const entry = this.cache.get(filePath);
|
||||
if (!entry) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Check if entry is expired
|
||||
const now = Date.now();
|
||||
if (now - entry.cachedAt > this.config.ttl * 1000) {
|
||||
this.cache.delete(filePath);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Check if file has been modified
|
||||
try {
|
||||
const stats = await fs.promises.stat(filePath);
|
||||
const currentMtime = Math.floor(stats.mtimeMs);
|
||||
|
||||
if (currentMtime !== entry.mtime) {
|
||||
// File has changed, invalidate cache
|
||||
this.cache.delete(filePath);
|
||||
return null;
|
||||
}
|
||||
|
||||
return entry;
|
||||
} catch (error) {
|
||||
// File doesn't exist anymore
|
||||
this.cache.delete(filePath);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores a cache entry
|
||||
* @param entry - Cache entry to store
|
||||
*/
|
||||
public async set(entry: ICacheEntry): Promise<void> {
|
||||
if (!this.config.enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.cache.set(entry.path, entry);
|
||||
|
||||
// Check cache size and evict old entries if needed
|
||||
await this.enforceMaxSize();
|
||||
|
||||
// Persist to disk (async, don't await)
|
||||
this.persist().catch((error) => {
|
||||
console.warn('Failed to persist cache:', error.message);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Stores multiple cache entries
|
||||
* @param entries - Array of cache entries
|
||||
*/
|
||||
public async setMany(entries: ICacheEntry[]): Promise<void> {
|
||||
if (!this.config.enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const entry of entries) {
|
||||
this.cache.set(entry.path, entry);
|
||||
}
|
||||
|
||||
await this.enforceMaxSize();
|
||||
await this.persist();
|
||||
}
|
||||
|
||||
/**
|
||||
* Checks if a file is cached and valid
|
||||
* @param filePath - Absolute path to the file
|
||||
* @returns True if cached and valid
|
||||
*/
|
||||
public async has(filePath: string): Promise<boolean> {
|
||||
const entry = await this.get(filePath);
|
||||
return entry !== null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets cache statistics
|
||||
*/
|
||||
public getStats(): {
|
||||
entries: number;
|
||||
totalSize: number;
|
||||
oldestEntry: number | null;
|
||||
newestEntry: number | null;
|
||||
} {
|
||||
let totalSize = 0;
|
||||
let oldestEntry: number | null = null;
|
||||
let newestEntry: number | null = null;
|
||||
|
||||
for (const entry of this.cache.values()) {
|
||||
totalSize += entry.contents.length;
|
||||
|
||||
if (oldestEntry === null || entry.cachedAt < oldestEntry) {
|
||||
oldestEntry = entry.cachedAt;
|
||||
}
|
||||
|
||||
if (newestEntry === null || entry.cachedAt > newestEntry) {
|
||||
newestEntry = entry.cachedAt;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
entries: this.cache.size,
|
||||
totalSize,
|
||||
oldestEntry,
|
||||
newestEntry,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears all cache entries
|
||||
*/
|
||||
public async clear(): Promise<void> {
|
||||
this.cache.clear();
|
||||
await this.persist();
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears specific cache entries
|
||||
* @param filePaths - Array of file paths to clear
|
||||
*/
|
||||
public async clearPaths(filePaths: string[]): Promise<void> {
|
||||
for (const path of filePaths) {
|
||||
this.cache.delete(path);
|
||||
}
|
||||
await this.persist();
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleans up expired and invalid cache entries
|
||||
*/
|
||||
private async cleanup(): Promise<void> {
|
||||
const now = Date.now();
|
||||
const toDelete: string[] = [];
|
||||
|
||||
for (const [path, entry] of this.cache.entries()) {
|
||||
// Check expiration
|
||||
if (now - entry.cachedAt > this.config.ttl * 1000) {
|
||||
toDelete.push(path);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if file still exists and hasn't changed
|
||||
try {
|
||||
const stats = await fs.promises.stat(path);
|
||||
const currentMtime = Math.floor(stats.mtimeMs);
|
||||
|
||||
if (currentMtime !== entry.mtime) {
|
||||
toDelete.push(path);
|
||||
}
|
||||
} catch (error) {
|
||||
// File doesn't exist
|
||||
toDelete.push(path);
|
||||
}
|
||||
}
|
||||
|
||||
for (const path of toDelete) {
|
||||
this.cache.delete(path);
|
||||
}
|
||||
|
||||
if (toDelete.length > 0) {
|
||||
await this.persist();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Enforces maximum cache size by evicting oldest entries
|
||||
*/
|
||||
private async enforceMaxSize(): Promise<void> {
|
||||
const stats = this.getStats();
|
||||
const maxSizeBytes = this.config.maxSize * 1024 * 1024; // Convert MB to bytes
|
||||
|
||||
if (stats.totalSize <= maxSizeBytes) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Sort entries by age (oldest first)
|
||||
const entries = Array.from(this.cache.entries()).sort(
|
||||
(a, b) => a[1].cachedAt - b[1].cachedAt
|
||||
);
|
||||
|
||||
// Remove oldest entries until we're under the limit
|
||||
let currentSize = stats.totalSize;
|
||||
for (const [path, entry] of entries) {
|
||||
if (currentSize <= maxSizeBytes) {
|
||||
break;
|
||||
}
|
||||
|
||||
currentSize -= entry.contents.length;
|
||||
this.cache.delete(path);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Persists cache index to disk
|
||||
*/
|
||||
private async persist(): Promise<void> {
|
||||
if (!this.config.enabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const entries = Array.from(this.cache.values());
|
||||
const content = JSON.stringify(entries, null, 2);
|
||||
await plugins.smartfile.memory.toFs(content, this.cacheIndexPath);
|
||||
} catch (error) {
|
||||
console.warn('Failed to persist cache index:', error.message);
|
||||
}
|
||||
}
|
||||
}
|
||||
310
ts/context/context-trimmer.ts
Normal file
310
ts/context/context-trimmer.ts
Normal file
@@ -0,0 +1,310 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
import type { ITrimConfig, ContextMode } from './types.js';
|
||||
|
||||
/**
|
||||
* Class responsible for trimming file contents to reduce token usage
|
||||
* while preserving important information for context
|
||||
*/
|
||||
export class ContextTrimmer {
|
||||
private config: ITrimConfig;
|
||||
|
||||
/**
|
||||
* Create a new ContextTrimmer with the given configuration
|
||||
* @param config The trimming configuration
|
||||
*/
|
||||
constructor(config?: ITrimConfig) {
|
||||
this.config = {
|
||||
removeImplementations: true,
|
||||
preserveInterfaces: true,
|
||||
preserveTypeDefs: true,
|
||||
preserveJSDoc: true,
|
||||
maxFunctionLines: 5,
|
||||
removeComments: true,
|
||||
removeBlankLines: true,
|
||||
...config
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Trim a file's contents based on the configuration
|
||||
* @param filePath The path to the file
|
||||
* @param content The file's contents
|
||||
* @param mode The context mode to use
|
||||
* @returns The trimmed file contents
|
||||
*/
|
||||
public trimFile(filePath: string, content: string, mode: ContextMode = 'trimmed'): string {
|
||||
// If mode is 'full', return the original content
|
||||
if (mode === 'full') {
|
||||
return content;
|
||||
}
|
||||
|
||||
// Process based on file type
|
||||
if (filePath.endsWith('.ts') || filePath.endsWith('.tsx')) {
|
||||
return this.trimTypeScriptFile(content);
|
||||
} else if (filePath.endsWith('.md')) {
|
||||
return this.trimMarkdownFile(content);
|
||||
} else if (filePath.endsWith('.json')) {
|
||||
return this.trimJsonFile(content);
|
||||
}
|
||||
|
||||
// Default to returning the original content for unknown file types
|
||||
return content;
|
||||
}
|
||||
|
||||
/**
|
||||
* Trim a TypeScript file to reduce token usage
|
||||
* @param content The TypeScript file contents
|
||||
* @returns The trimmed file contents
|
||||
*/
|
||||
private trimTypeScriptFile(content: string): string {
|
||||
let result = content;
|
||||
|
||||
// Step 1: Preserve JSDoc comments if configured
|
||||
const jsDocComments: string[] = [];
|
||||
if (this.config.preserveJSDoc) {
|
||||
const jsDocRegex = /\/\*\*[\s\S]*?\*\//g;
|
||||
const matches = result.match(jsDocRegex) || [];
|
||||
jsDocComments.push(...matches);
|
||||
}
|
||||
|
||||
// Step 2: Remove comments if configured
|
||||
if (this.config.removeComments) {
|
||||
// Remove single-line comments
|
||||
result = result.replace(/\/\/.*$/gm, '');
|
||||
// Remove multi-line comments (except JSDoc if preserveJSDoc is true)
|
||||
if (!this.config.preserveJSDoc) {
|
||||
result = result.replace(/\/\*[\s\S]*?\*\//g, '');
|
||||
} else {
|
||||
// Only remove non-JSDoc comments
|
||||
result = result.replace(/\/\*(?!\*)[\s\S]*?\*\//g, '');
|
||||
}
|
||||
}
|
||||
|
||||
// Step 3: Remove function implementations if configured
|
||||
if (this.config.removeImplementations) {
|
||||
// Match function and method bodies
|
||||
result = result.replace(
|
||||
/(\b(function|constructor|async function)\s+[\w$]*\s*\([^)]*\)\s*{)([\s\S]*?)(})/g,
|
||||
(match, start, funcType, body, end) => {
|
||||
// Keep function signature and opening brace, replace body with comment
|
||||
return `${start} /* implementation removed */ ${end}`;
|
||||
}
|
||||
);
|
||||
|
||||
// Match arrow function bodies
|
||||
result = result.replace(
|
||||
/(\([^)]*\)\s*=>\s*{)([\s\S]*?)(})/g,
|
||||
(match, start, body, end) => {
|
||||
return `${start} /* implementation removed */ ${end}`;
|
||||
}
|
||||
);
|
||||
|
||||
// Match method declarations
|
||||
result = result.replace(
|
||||
/(^\s*[\w$]*\s*\([^)]*\)\s*{)([\s\S]*?)(})/gm,
|
||||
(match, start, body, end) => {
|
||||
return `${start} /* implementation removed */ ${end}`;
|
||||
}
|
||||
);
|
||||
|
||||
// Match class methods
|
||||
result = result.replace(
|
||||
/(\b(public|private|protected|static|async)?\s+[\w$]+\s*\([^)]*\)\s*{)([\s\S]*?)(})/g,
|
||||
(match, start, modifier, body, end) => {
|
||||
return `${start} /* implementation removed */ ${end}`;
|
||||
}
|
||||
);
|
||||
} else if (this.config.maxFunctionLines && this.config.maxFunctionLines > 0) {
|
||||
// If not removing implementations completely, limit the number of lines
|
||||
// Match function and method bodies
|
||||
result = result.replace(
|
||||
/(\b(function|constructor|async function)\s+[\w$]*\s*\([^)]*\)\s*{)([\s\S]*?)(})/g,
|
||||
(match, start, funcType, body, end) => {
|
||||
return this.limitFunctionBody(start, body, end);
|
||||
}
|
||||
);
|
||||
|
||||
// Match arrow function bodies
|
||||
result = result.replace(
|
||||
/(\([^)]*\)\s*=>\s*{)([\s\S]*?)(})/g,
|
||||
(match, start, body, end) => {
|
||||
return this.limitFunctionBody(start, body, end);
|
||||
}
|
||||
);
|
||||
|
||||
// Match method declarations
|
||||
result = result.replace(
|
||||
/(^\s*[\w$]*\s*\([^)]*\)\s*{)([\s\S]*?)(})/gm,
|
||||
(match, start, body, end) => {
|
||||
return this.limitFunctionBody(start, body, end);
|
||||
}
|
||||
);
|
||||
|
||||
// Match class methods
|
||||
result = result.replace(
|
||||
/(\b(public|private|protected|static|async)?\s+[\w$]+\s*\([^)]*\)\s*{)([\s\S]*?)(})/g,
|
||||
(match, start, modifier, body, end) => {
|
||||
return this.limitFunctionBody(start, body, end);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// Step 4: Remove blank lines if configured
|
||||
if (this.config.removeBlankLines) {
|
||||
result = result.replace(/^\s*[\r\n]/gm, '');
|
||||
}
|
||||
|
||||
// Step 5: Restore preserved JSDoc comments
|
||||
if (this.config.preserveJSDoc && jsDocComments.length > 0) {
|
||||
// This is a placeholder; we already preserved JSDoc comments in the regex steps
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Limit a function body to a maximum number of lines
|
||||
* @param start The function signature and opening brace
|
||||
* @param body The function body
|
||||
* @param end The closing brace
|
||||
* @returns The limited function body
|
||||
*/
|
||||
private limitFunctionBody(start: string, body: string, end: string): string {
|
||||
const lines = body.split('\n');
|
||||
if (lines.length > this.config.maxFunctionLines!) {
|
||||
const limitedBody = lines.slice(0, this.config.maxFunctionLines!).join('\n');
|
||||
return `${start}${limitedBody}\n // ... (${lines.length - this.config.maxFunctionLines!} lines trimmed)\n${end}`;
|
||||
}
|
||||
return `${start}${body}${end}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Trim a Markdown file to reduce token usage
|
||||
* @param content The Markdown file contents
|
||||
* @returns The trimmed file contents
|
||||
*/
|
||||
private trimMarkdownFile(content: string): string {
|
||||
// For markdown files, we generally want to keep most content
|
||||
// but we can remove lengthy code blocks if needed
|
||||
return content;
|
||||
}
|
||||
|
||||
/**
|
||||
* Trim a JSON file to reduce token usage
|
||||
* @param content The JSON file contents
|
||||
* @returns The trimmed file contents
|
||||
*/
|
||||
private trimJsonFile(content: string): string {
|
||||
try {
|
||||
// Parse the JSON
|
||||
const json = JSON.parse(content);
|
||||
|
||||
// For package.json, keep only essential information
|
||||
if ('name' in json && 'version' in json && 'dependencies' in json) {
|
||||
const essentialKeys = [
|
||||
'name', 'version', 'description', 'author', 'license',
|
||||
'main', 'types', 'exports', 'type'
|
||||
];
|
||||
|
||||
const trimmedJson: any = {};
|
||||
essentialKeys.forEach(key => {
|
||||
if (key in json) {
|
||||
trimmedJson[key] = json[key];
|
||||
}
|
||||
});
|
||||
|
||||
// Add dependency information without versions
|
||||
if ('dependencies' in json) {
|
||||
trimmedJson.dependencies = Object.keys(json.dependencies).reduce((acc, dep) => {
|
||||
acc[dep] = '*'; // Replace version with wildcard
|
||||
return acc;
|
||||
}, {} as Record<string, string>);
|
||||
}
|
||||
|
||||
// Return the trimmed JSON
|
||||
return JSON.stringify(trimmedJson, null, 2);
|
||||
}
|
||||
|
||||
// For other JSON files, leave as is
|
||||
return content;
|
||||
} catch (error) {
|
||||
// If there's an error parsing the JSON, return the original content
|
||||
return content;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the trimmer configuration
|
||||
* @param config The new configuration to apply
|
||||
*/
|
||||
public updateConfig(config: ITrimConfig): void {
|
||||
this.config = {
|
||||
...this.config,
|
||||
...config
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Trim a file based on its importance tier
|
||||
* @param filePath The path to the file
|
||||
* @param content The file's contents
|
||||
* @param level The trimming level to apply ('none', 'light', 'aggressive')
|
||||
* @returns The trimmed file contents
|
||||
*/
|
||||
public trimFileWithLevel(
|
||||
filePath: string,
|
||||
content: string,
|
||||
level: 'none' | 'light' | 'aggressive'
|
||||
): string {
|
||||
// No trimming for essential files
|
||||
if (level === 'none') {
|
||||
return content;
|
||||
}
|
||||
|
||||
// Create a temporary config based on level
|
||||
const originalConfig = { ...this.config };
|
||||
|
||||
try {
|
||||
if (level === 'light') {
|
||||
// Light trimming: preserve signatures, remove only complex implementations
|
||||
this.config = {
|
||||
...this.config,
|
||||
removeImplementations: false,
|
||||
preserveInterfaces: true,
|
||||
preserveTypeDefs: true,
|
||||
preserveJSDoc: true,
|
||||
maxFunctionLines: 10,
|
||||
removeComments: false,
|
||||
removeBlankLines: true
|
||||
};
|
||||
} else if (level === 'aggressive') {
|
||||
// Aggressive trimming: remove all implementations, keep only signatures
|
||||
this.config = {
|
||||
...this.config,
|
||||
removeImplementations: true,
|
||||
preserveInterfaces: true,
|
||||
preserveTypeDefs: true,
|
||||
preserveJSDoc: true,
|
||||
maxFunctionLines: 3,
|
||||
removeComments: true,
|
||||
removeBlankLines: true
|
||||
};
|
||||
}
|
||||
|
||||
// Process based on file type
|
||||
let result = content;
|
||||
if (filePath.endsWith('.ts') || filePath.endsWith('.tsx')) {
|
||||
result = this.trimTypeScriptFile(content);
|
||||
} else if (filePath.endsWith('.md')) {
|
||||
result = this.trimMarkdownFile(content);
|
||||
} else if (filePath.endsWith('.json')) {
|
||||
result = this.trimJsonFile(content);
|
||||
}
|
||||
|
||||
return result;
|
||||
} finally {
|
||||
// Restore original config
|
||||
this.config = originalConfig;
|
||||
}
|
||||
}
|
||||
}
|
||||
549
ts/context/enhanced-context.ts
Normal file
549
ts/context/enhanced-context.ts
Normal file
@@ -0,0 +1,549 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
import type { ContextMode, IContextResult, IFileInfo, TaskType, IFileMetadata } from './types.js';
|
||||
import { ContextTrimmer } from './context-trimmer.js';
|
||||
import { ConfigManager } from './config-manager.js';
|
||||
import { LazyFileLoader } from './lazy-file-loader.js';
|
||||
import { ContextCache } from './context-cache.js';
|
||||
import { ContextAnalyzer } from './context-analyzer.js';
|
||||
|
||||
/**
|
||||
* Enhanced ProjectContext that supports context optimization strategies
|
||||
*/
|
||||
export class EnhancedContext {
|
||||
private projectDir: string;
|
||||
private trimmer: ContextTrimmer;
|
||||
private configManager: ConfigManager;
|
||||
private lazyLoader: LazyFileLoader;
|
||||
private cache: ContextCache;
|
||||
private analyzer: ContextAnalyzer;
|
||||
private contextMode: ContextMode = 'trimmed';
|
||||
private tokenBudget: number = 190000; // Default for o4-mini
|
||||
private contextResult: IContextResult = {
|
||||
context: '',
|
||||
tokenCount: 0,
|
||||
includedFiles: [],
|
||||
trimmedFiles: [],
|
||||
excludedFiles: [],
|
||||
tokenSavings: 0
|
||||
};
|
||||
|
||||
/**
|
||||
* Create a new EnhancedContext
|
||||
* @param projectDirArg The project directory
|
||||
*/
|
||||
constructor(projectDirArg: string) {
|
||||
this.projectDir = projectDirArg;
|
||||
this.configManager = ConfigManager.getInstance();
|
||||
this.trimmer = new ContextTrimmer(this.configManager.getTrimConfig());
|
||||
this.lazyLoader = new LazyFileLoader(projectDirArg);
|
||||
this.cache = new ContextCache(projectDirArg, this.configManager.getCacheConfig());
|
||||
this.analyzer = new ContextAnalyzer(
|
||||
projectDirArg,
|
||||
this.configManager.getPrioritizationWeights(),
|
||||
this.configManager.getTierConfig()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the context builder
|
||||
*/
|
||||
public async initialize(): Promise<void> {
|
||||
await this.configManager.initialize(this.projectDir);
|
||||
this.tokenBudget = this.configManager.getMaxTokens();
|
||||
this.trimmer.updateConfig(this.configManager.getTrimConfig());
|
||||
await this.cache.init();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the context mode
|
||||
* @param mode The context mode to use
|
||||
*/
|
||||
public setContextMode(mode: ContextMode): void {
|
||||
this.contextMode = mode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the token budget
|
||||
* @param maxTokens The maximum tokens to use
|
||||
*/
|
||||
public setTokenBudget(maxTokens: number): void {
|
||||
this.tokenBudget = maxTokens;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gather files from the project
|
||||
* @param includePaths Optional paths to include
|
||||
* @param excludePaths Optional paths to exclude
|
||||
*/
|
||||
public async gatherFiles(includePaths?: string[], excludePaths?: string[]): Promise<Record<string, plugins.smartfile.SmartFile | plugins.smartfile.SmartFile[]>> {
|
||||
const smartfilePackageJSON = await plugins.smartfile.SmartFile.fromFilePath(
|
||||
plugins.path.join(this.projectDir, 'package.json'),
|
||||
this.projectDir,
|
||||
);
|
||||
|
||||
const smartfilesReadme = await plugins.smartfile.SmartFile.fromFilePath(
|
||||
plugins.path.join(this.projectDir, 'readme.md'),
|
||||
this.projectDir,
|
||||
);
|
||||
|
||||
const smartfilesReadmeHints = await plugins.smartfile.SmartFile.fromFilePath(
|
||||
plugins.path.join(this.projectDir, 'readme.hints.md'),
|
||||
this.projectDir,
|
||||
);
|
||||
|
||||
const smartfilesNpmextraJSON = await plugins.smartfile.SmartFile.fromFilePath(
|
||||
plugins.path.join(this.projectDir, 'npmextra.json'),
|
||||
this.projectDir,
|
||||
);
|
||||
|
||||
// Use provided include paths or default to all TypeScript files
|
||||
const includeGlobs = includePaths?.map(path => `${path}/**/*.ts`) || ['ts*/**/*.ts'];
|
||||
|
||||
// Get TypeScript files
|
||||
const smartfilesModPromises = includeGlobs.map(glob =>
|
||||
plugins.smartfile.fs.fileTreeToObject(this.projectDir, glob)
|
||||
);
|
||||
|
||||
const smartfilesModArrays = await Promise.all(smartfilesModPromises);
|
||||
|
||||
// Flatten the arrays
|
||||
const smartfilesMod: plugins.smartfile.SmartFile[] = [];
|
||||
smartfilesModArrays.forEach(array => {
|
||||
smartfilesMod.push(...array);
|
||||
});
|
||||
|
||||
// Get test files if not excluded
|
||||
let smartfilesTest: plugins.smartfile.SmartFile[] = [];
|
||||
if (!excludePaths?.includes('test/')) {
|
||||
smartfilesTest = await plugins.smartfile.fs.fileTreeToObject(
|
||||
this.projectDir,
|
||||
'test/**/*.ts',
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
smartfilePackageJSON,
|
||||
smartfilesReadme,
|
||||
smartfilesReadmeHints,
|
||||
smartfilesNpmextraJSON,
|
||||
smartfilesMod,
|
||||
smartfilesTest,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert files to context string
|
||||
* @param files The files to convert
|
||||
* @param mode The context mode to use
|
||||
*/
|
||||
public async convertFilesToContext(
|
||||
files: plugins.smartfile.SmartFile[],
|
||||
mode: ContextMode = this.contextMode
|
||||
): Promise<string> {
|
||||
// Reset context result
|
||||
this.contextResult = {
|
||||
context: '',
|
||||
tokenCount: 0,
|
||||
includedFiles: [],
|
||||
trimmedFiles: [],
|
||||
excludedFiles: [],
|
||||
tokenSavings: 0
|
||||
};
|
||||
|
||||
let totalTokenCount = 0;
|
||||
let totalOriginalTokens = 0;
|
||||
|
||||
// Convert SmartFile objects to IFileMetadata for analysis
|
||||
const metadata: IFileMetadata[] = files.map(sf => ({
|
||||
path: sf.path,
|
||||
relativePath: sf.relative,
|
||||
size: sf.contents.toString().length,
|
||||
mtime: Date.now(), // SmartFile doesn't expose mtime, use current time
|
||||
estimatedTokens: this.countTokens(sf.contents.toString()),
|
||||
importanceScore: 0
|
||||
}));
|
||||
|
||||
// Analyze files using ContextAnalyzer to get smart prioritization
|
||||
// (Note: This requires task type which we'll pass from buildContext)
|
||||
// For now, sort files by estimated tokens (smaller files first for better efficiency)
|
||||
const sortedFiles = [...files].sort((a, b) => {
|
||||
const aTokens = this.countTokens(a.contents.toString());
|
||||
const bTokens = this.countTokens(b.contents.toString());
|
||||
return aTokens - bTokens;
|
||||
});
|
||||
|
||||
const processedFiles: string[] = [];
|
||||
|
||||
for (const smartfile of sortedFiles) {
|
||||
// Calculate original token count
|
||||
const originalContent = smartfile.contents.toString();
|
||||
const originalTokenCount = this.countTokens(originalContent);
|
||||
totalOriginalTokens += originalTokenCount;
|
||||
|
||||
// Apply trimming based on mode
|
||||
let processedContent = originalContent;
|
||||
|
||||
if (mode !== 'full') {
|
||||
processedContent = this.trimmer.trimFile(
|
||||
smartfile.relative,
|
||||
originalContent,
|
||||
mode
|
||||
);
|
||||
}
|
||||
|
||||
// Calculate new token count
|
||||
const processedTokenCount = this.countTokens(processedContent);
|
||||
|
||||
// Check if we have budget for this file
|
||||
if (totalTokenCount + processedTokenCount > this.tokenBudget) {
|
||||
// We don't have budget for this file
|
||||
this.contextResult.excludedFiles.push({
|
||||
path: smartfile.path,
|
||||
contents: originalContent,
|
||||
relativePath: smartfile.relative,
|
||||
tokenCount: originalTokenCount
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// Format the file for context
|
||||
const formattedContent = `
|
||||
====== START OF FILE ${smartfile.relative} ======
|
||||
|
||||
${processedContent}
|
||||
|
||||
====== END OF FILE ${smartfile.relative} ======
|
||||
`;
|
||||
|
||||
processedFiles.push(formattedContent);
|
||||
totalTokenCount += processedTokenCount;
|
||||
|
||||
// Track file in appropriate list
|
||||
const fileInfo: IFileInfo = {
|
||||
path: smartfile.path,
|
||||
contents: processedContent,
|
||||
relativePath: smartfile.relative,
|
||||
tokenCount: processedTokenCount
|
||||
};
|
||||
|
||||
if (mode === 'full' || processedContent === originalContent) {
|
||||
this.contextResult.includedFiles.push(fileInfo);
|
||||
} else {
|
||||
this.contextResult.trimmedFiles.push(fileInfo);
|
||||
this.contextResult.tokenSavings += (originalTokenCount - processedTokenCount);
|
||||
}
|
||||
}
|
||||
|
||||
// Join all processed files
|
||||
const context = processedFiles.join('\n');
|
||||
|
||||
// Update context result
|
||||
this.contextResult.context = context;
|
||||
this.contextResult.tokenCount = totalTokenCount;
|
||||
|
||||
return context;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert files to context with smart analysis and prioritization
|
||||
* @param metadata - File metadata to analyze
|
||||
* @param taskType - Task type for context-aware prioritization
|
||||
* @param mode - Context mode to use
|
||||
* @returns Context string
|
||||
*/
|
||||
public async convertFilesToContextWithAnalysis(
|
||||
metadata: IFileMetadata[],
|
||||
taskType: TaskType,
|
||||
mode: ContextMode = this.contextMode
|
||||
): Promise<string> {
|
||||
// Reset context result
|
||||
this.contextResult = {
|
||||
context: '',
|
||||
tokenCount: 0,
|
||||
includedFiles: [],
|
||||
trimmedFiles: [],
|
||||
excludedFiles: [],
|
||||
tokenSavings: 0
|
||||
};
|
||||
|
||||
// Analyze files for smart prioritization
|
||||
const analysis = await this.analyzer.analyze(metadata, taskType, []);
|
||||
|
||||
// Sort files by importance score (highest first)
|
||||
const sortedAnalysis = [...analysis.files].sort(
|
||||
(a, b) => b.importanceScore - a.importanceScore
|
||||
);
|
||||
|
||||
// Filter out excluded tier
|
||||
const relevantFiles = sortedAnalysis.filter(f => f.tier !== 'excluded');
|
||||
|
||||
let totalTokenCount = 0;
|
||||
let totalOriginalTokens = 0;
|
||||
const processedFiles: string[] = [];
|
||||
|
||||
// Load files with cache support
|
||||
for (const fileAnalysis of relevantFiles) {
|
||||
try {
|
||||
// Check cache first
|
||||
let contents: string;
|
||||
let originalTokenCount: number;
|
||||
|
||||
const cached = await this.cache.get(fileAnalysis.path);
|
||||
if (cached) {
|
||||
contents = cached.contents;
|
||||
originalTokenCount = cached.tokenCount;
|
||||
} else {
|
||||
// Load file
|
||||
const fileData = await plugins.smartfile.fs.toStringSync(fileAnalysis.path);
|
||||
contents = fileData;
|
||||
originalTokenCount = this.countTokens(contents);
|
||||
|
||||
// Cache it
|
||||
await this.cache.set({
|
||||
path: fileAnalysis.path,
|
||||
contents,
|
||||
tokenCount: originalTokenCount,
|
||||
mtime: Date.now(),
|
||||
cachedAt: Date.now()
|
||||
});
|
||||
}
|
||||
|
||||
totalOriginalTokens += originalTokenCount;
|
||||
|
||||
// Apply tier-based trimming
|
||||
let processedContent = contents;
|
||||
let trimLevel: 'none' | 'light' | 'aggressive' = 'light';
|
||||
|
||||
if (fileAnalysis.tier === 'essential') {
|
||||
trimLevel = 'none';
|
||||
} else if (fileAnalysis.tier === 'important') {
|
||||
trimLevel = 'light';
|
||||
} else if (fileAnalysis.tier === 'optional') {
|
||||
trimLevel = 'aggressive';
|
||||
}
|
||||
|
||||
// Apply trimming based on mode and tier
|
||||
if (mode !== 'full' && trimLevel !== 'none') {
|
||||
const relativePath = plugins.path.relative(this.projectDir, fileAnalysis.path);
|
||||
processedContent = this.trimmer.trimFileWithLevel(
|
||||
relativePath,
|
||||
contents,
|
||||
trimLevel
|
||||
);
|
||||
}
|
||||
|
||||
// Calculate token count
|
||||
const processedTokenCount = this.countTokens(processedContent);
|
||||
|
||||
// Check token budget
|
||||
if (totalTokenCount + processedTokenCount > this.tokenBudget) {
|
||||
// We don't have budget for this file
|
||||
const relativePath = plugins.path.relative(this.projectDir, fileAnalysis.path);
|
||||
this.contextResult.excludedFiles.push({
|
||||
path: fileAnalysis.path,
|
||||
contents,
|
||||
relativePath,
|
||||
tokenCount: originalTokenCount,
|
||||
importanceScore: fileAnalysis.importanceScore
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
// Format the file for context
|
||||
const relativePath = plugins.path.relative(this.projectDir, fileAnalysis.path);
|
||||
const formattedContent = `
|
||||
====== START OF FILE ${relativePath} ======
|
||||
|
||||
${processedContent}
|
||||
|
||||
====== END OF FILE ${relativePath} ======
|
||||
`;
|
||||
|
||||
processedFiles.push(formattedContent);
|
||||
totalTokenCount += processedTokenCount;
|
||||
|
||||
// Track file in appropriate list
|
||||
const fileInfo: IFileInfo = {
|
||||
path: fileAnalysis.path,
|
||||
contents: processedContent,
|
||||
relativePath,
|
||||
tokenCount: processedTokenCount,
|
||||
importanceScore: fileAnalysis.importanceScore
|
||||
};
|
||||
|
||||
if (trimLevel === 'none' || processedContent === contents) {
|
||||
this.contextResult.includedFiles.push(fileInfo);
|
||||
} else {
|
||||
this.contextResult.trimmedFiles.push(fileInfo);
|
||||
this.contextResult.tokenSavings += (originalTokenCount - processedTokenCount);
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn(`Failed to process file ${fileAnalysis.path}:`, error.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Join all processed files
|
||||
const context = processedFiles.join('\n');
|
||||
|
||||
// Update context result
|
||||
this.contextResult.context = context;
|
||||
this.contextResult.tokenCount = totalTokenCount;
|
||||
|
||||
return context;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build context for the project
|
||||
* @param taskType Optional task type for task-specific context
|
||||
*/
|
||||
public async buildContext(taskType?: TaskType): Promise<IContextResult> {
|
||||
// Initialize if needed
|
||||
if (this.tokenBudget === 0) {
|
||||
await this.initialize();
|
||||
}
|
||||
|
||||
// Get task-specific configuration if a task type is provided
|
||||
if (taskType) {
|
||||
const taskConfig = this.configManager.getTaskConfig(taskType);
|
||||
if (taskConfig.mode) {
|
||||
this.setContextMode(taskConfig.mode);
|
||||
}
|
||||
}
|
||||
|
||||
// Check if analyzer is enabled in config
|
||||
const analyzerConfig = this.configManager.getAnalyzerConfig();
|
||||
const useAnalyzer = analyzerConfig.enabled && taskType;
|
||||
|
||||
if (useAnalyzer) {
|
||||
// Use new smart context building with lazy loading and analysis
|
||||
const taskConfig = this.configManager.getTaskConfig(taskType!);
|
||||
|
||||
// Build globs for scanning
|
||||
const includeGlobs = taskConfig?.includePaths?.map(p => `${p}/**/*.ts`) || [
|
||||
'ts/**/*.ts',
|
||||
'ts*/**/*.ts'
|
||||
];
|
||||
|
||||
// Add config files
|
||||
const configGlobs = [
|
||||
'package.json',
|
||||
'readme.md',
|
||||
'readme.hints.md',
|
||||
'npmextra.json'
|
||||
];
|
||||
|
||||
// Scan files for metadata (fast, doesn't load contents)
|
||||
const metadata = await this.lazyLoader.scanFiles([...configGlobs, ...includeGlobs]);
|
||||
|
||||
// Use analyzer to build context with smart prioritization
|
||||
await this.convertFilesToContextWithAnalysis(metadata, taskType!, this.contextMode);
|
||||
} else {
|
||||
// Fall back to old method for backward compatibility
|
||||
const taskConfig = taskType ? this.configManager.getTaskConfig(taskType) : undefined;
|
||||
const files = await this.gatherFiles(
|
||||
taskConfig?.includePaths,
|
||||
taskConfig?.excludePaths
|
||||
);
|
||||
|
||||
// Convert files to context
|
||||
// Create an array of all files to process
|
||||
const allFiles: plugins.smartfile.SmartFile[] = [];
|
||||
|
||||
// Add individual files
|
||||
if (files.smartfilePackageJSON) allFiles.push(files.smartfilePackageJSON as plugins.smartfile.SmartFile);
|
||||
if (files.smartfilesReadme) allFiles.push(files.smartfilesReadme as plugins.smartfile.SmartFile);
|
||||
if (files.smartfilesReadmeHints) allFiles.push(files.smartfilesReadmeHints as plugins.smartfile.SmartFile);
|
||||
if (files.smartfilesNpmextraJSON) allFiles.push(files.smartfilesNpmextraJSON as plugins.smartfile.SmartFile);
|
||||
|
||||
// Add arrays of files
|
||||
if (files.smartfilesMod) {
|
||||
if (Array.isArray(files.smartfilesMod)) {
|
||||
allFiles.push(...files.smartfilesMod);
|
||||
} else {
|
||||
allFiles.push(files.smartfilesMod);
|
||||
}
|
||||
}
|
||||
|
||||
if (files.smartfilesTest) {
|
||||
if (Array.isArray(files.smartfilesTest)) {
|
||||
allFiles.push(...files.smartfilesTest);
|
||||
} else {
|
||||
allFiles.push(files.smartfilesTest);
|
||||
}
|
||||
}
|
||||
|
||||
await this.convertFilesToContext(allFiles);
|
||||
}
|
||||
|
||||
return this.contextResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the context with git diff information for commit tasks
|
||||
* @param gitDiff The git diff to include
|
||||
*/
|
||||
public updateWithGitDiff(gitDiff: string): IContextResult {
|
||||
// If we don't have a context yet, return empty result
|
||||
if (!this.contextResult.context) {
|
||||
return this.contextResult;
|
||||
}
|
||||
|
||||
// Add git diff to context
|
||||
const diffSection = `
|
||||
====== GIT DIFF ======
|
||||
|
||||
${gitDiff}
|
||||
|
||||
====== END GIT DIFF ======
|
||||
`;
|
||||
|
||||
const diffTokenCount = this.countTokens(diffSection);
|
||||
|
||||
// Update context and token count
|
||||
this.contextResult.context += diffSection;
|
||||
this.contextResult.tokenCount += diffTokenCount;
|
||||
|
||||
return this.contextResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* Count tokens in a string
|
||||
* @param text The text to count tokens for
|
||||
* @param model The model to use for token counting
|
||||
*/
|
||||
public countTokens(text: string, model: string = 'gpt-3.5-turbo'): number {
|
||||
try {
|
||||
// Use the gpt-tokenizer library to count tokens
|
||||
const tokens = plugins.gptTokenizer.encode(text);
|
||||
return tokens.length;
|
||||
} catch (error) {
|
||||
console.error('Error counting tokens:', error);
|
||||
// Provide a rough estimate if tokenization fails
|
||||
return Math.ceil(text.length / 4);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the context result
|
||||
*/
|
||||
public getContextResult(): IContextResult {
|
||||
return this.contextResult;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the token count for the current context
|
||||
*/
|
||||
public getTokenCount(): number {
|
||||
return this.contextResult.tokenCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get both the context string and its token count
|
||||
*/
|
||||
public getContextWithTokenCount(): { context: string; tokenCount: number } {
|
||||
return {
|
||||
context: this.contextResult.context,
|
||||
tokenCount: this.contextResult.tokenCount
|
||||
};
|
||||
}
|
||||
}
|
||||
58
ts/context/index.ts
Normal file
58
ts/context/index.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import { EnhancedContext } from './enhanced-context.js';
|
||||
import { TaskContextFactory } from './task-context-factory.js';
|
||||
import { ConfigManager } from './config-manager.js';
|
||||
import { ContextTrimmer } from './context-trimmer.js';
|
||||
import { LazyFileLoader } from './lazy-file-loader.js';
|
||||
import { ContextCache } from './context-cache.js';
|
||||
import { ContextAnalyzer } from './context-analyzer.js';
|
||||
import type {
|
||||
ContextMode,
|
||||
IContextConfig,
|
||||
IContextResult,
|
||||
IFileInfo,
|
||||
ITrimConfig,
|
||||
ITaskConfig,
|
||||
TaskType,
|
||||
ICacheConfig,
|
||||
IAnalyzerConfig,
|
||||
IPrioritizationWeights,
|
||||
ITierConfig,
|
||||
ITierSettings,
|
||||
IFileMetadata,
|
||||
ICacheEntry,
|
||||
IFileDependencies,
|
||||
IFileAnalysis,
|
||||
IAnalysisResult
|
||||
} from './types.js';
|
||||
|
||||
export {
|
||||
// Classes
|
||||
EnhancedContext,
|
||||
TaskContextFactory,
|
||||
ConfigManager,
|
||||
ContextTrimmer,
|
||||
LazyFileLoader,
|
||||
ContextCache,
|
||||
ContextAnalyzer,
|
||||
};
|
||||
|
||||
// Types
|
||||
export type {
|
||||
ContextMode,
|
||||
IContextConfig,
|
||||
IContextResult,
|
||||
IFileInfo,
|
||||
ITrimConfig,
|
||||
ITaskConfig,
|
||||
TaskType,
|
||||
ICacheConfig,
|
||||
IAnalyzerConfig,
|
||||
IPrioritizationWeights,
|
||||
ITierConfig,
|
||||
ITierSettings,
|
||||
IFileMetadata,
|
||||
ICacheEntry,
|
||||
IFileDependencies,
|
||||
IFileAnalysis,
|
||||
IAnalysisResult
|
||||
};
|
||||
191
ts/context/lazy-file-loader.ts
Normal file
191
ts/context/lazy-file-loader.ts
Normal file
@@ -0,0 +1,191 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
import * as fs from 'fs';
|
||||
import type { IFileMetadata, IFileInfo } from './types.js';
|
||||
|
||||
/**
|
||||
* LazyFileLoader handles efficient file loading by:
|
||||
* - Scanning files for metadata without loading contents
|
||||
* - Providing fast file size and token estimates
|
||||
* - Loading contents only when requested
|
||||
* - Parallel loading of selected files
|
||||
*/
|
||||
export class LazyFileLoader {
|
||||
private projectRoot: string;
|
||||
private metadataCache: Map<string, IFileMetadata> = new Map();
|
||||
|
||||
/**
|
||||
* Creates a new LazyFileLoader
|
||||
* @param projectRoot - Root directory of the project
|
||||
*/
|
||||
constructor(projectRoot: string) {
|
||||
this.projectRoot = projectRoot;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scans files in given globs and creates metadata without loading contents
|
||||
* @param globs - File patterns to scan (e.g., ['ts/**\/*.ts', 'test/**\/*.ts'])
|
||||
* @returns Array of file metadata
|
||||
*/
|
||||
public async scanFiles(globs: string[]): Promise<IFileMetadata[]> {
|
||||
const metadata: IFileMetadata[] = [];
|
||||
|
||||
for (const globPattern of globs) {
|
||||
try {
|
||||
const smartFiles = await plugins.smartfile.fs.fileTreeToObject(this.projectRoot, globPattern);
|
||||
const fileArray = Array.isArray(smartFiles) ? smartFiles : [smartFiles];
|
||||
|
||||
for (const smartFile of fileArray) {
|
||||
try {
|
||||
const meta = await this.getMetadata(smartFile.path);
|
||||
metadata.push(meta);
|
||||
} catch (error) {
|
||||
// Skip files that can't be read
|
||||
console.warn(`Failed to get metadata for ${smartFile.path}:`, error.message);
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
// Skip patterns that don't match any files
|
||||
console.warn(`No files found for pattern ${globPattern}`);
|
||||
}
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets metadata for a single file without loading contents
|
||||
* @param filePath - Absolute path to the file
|
||||
* @returns File metadata
|
||||
*/
|
||||
public async getMetadata(filePath: string): Promise<IFileMetadata> {
|
||||
// Check cache first
|
||||
if (this.metadataCache.has(filePath)) {
|
||||
const cached = this.metadataCache.get(filePath)!;
|
||||
const currentStats = await fs.promises.stat(filePath);
|
||||
|
||||
// Return cached if file hasn't changed
|
||||
if (cached.mtime === Math.floor(currentStats.mtimeMs)) {
|
||||
return cached;
|
||||
}
|
||||
}
|
||||
|
||||
// Get file stats
|
||||
const stats = await fs.promises.stat(filePath);
|
||||
const relativePath = plugins.path.relative(this.projectRoot, filePath);
|
||||
|
||||
// Estimate tokens: rough estimate of ~4 characters per token
|
||||
// This is faster than reading and tokenizing the entire file
|
||||
const estimatedTokens = Math.ceil(stats.size / 4);
|
||||
|
||||
const metadata: IFileMetadata = {
|
||||
path: filePath,
|
||||
relativePath,
|
||||
size: stats.size,
|
||||
mtime: Math.floor(stats.mtimeMs),
|
||||
estimatedTokens,
|
||||
};
|
||||
|
||||
// Cache the metadata
|
||||
this.metadataCache.set(filePath, metadata);
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads file contents for selected files in parallel
|
||||
* @param metadata - Array of file metadata to load
|
||||
* @param tokenizer - Function to calculate accurate token count
|
||||
* @returns Array of complete file info with contents
|
||||
*/
|
||||
public async loadFiles(
|
||||
metadata: IFileMetadata[],
|
||||
tokenizer: (content: string) => number
|
||||
): Promise<IFileInfo[]> {
|
||||
// Load files in parallel
|
||||
const loadPromises = metadata.map(async (meta) => {
|
||||
try {
|
||||
const contents = await plugins.smartfile.fs.toStringSync(meta.path);
|
||||
const tokenCount = tokenizer(contents);
|
||||
|
||||
const fileInfo: IFileInfo = {
|
||||
path: meta.path,
|
||||
relativePath: meta.relativePath,
|
||||
contents,
|
||||
tokenCount,
|
||||
importanceScore: meta.importanceScore,
|
||||
};
|
||||
|
||||
return fileInfo;
|
||||
} catch (error) {
|
||||
console.warn(`Failed to load file ${meta.path}:`, error.message);
|
||||
return null;
|
||||
}
|
||||
});
|
||||
|
||||
// Wait for all loads to complete and filter out failures
|
||||
const results = await Promise.all(loadPromises);
|
||||
return results.filter((r): r is IFileInfo => r !== null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Loads a single file with contents
|
||||
* @param filePath - Absolute path to the file
|
||||
* @param tokenizer - Function to calculate accurate token count
|
||||
* @returns Complete file info with contents
|
||||
*/
|
||||
public async loadFile(
|
||||
filePath: string,
|
||||
tokenizer: (content: string) => number
|
||||
): Promise<IFileInfo> {
|
||||
const meta = await this.getMetadata(filePath);
|
||||
const contents = await plugins.smartfile.fs.toStringSync(filePath);
|
||||
const tokenCount = tokenizer(contents);
|
||||
const relativePath = plugins.path.relative(this.projectRoot, filePath);
|
||||
|
||||
return {
|
||||
path: filePath,
|
||||
relativePath,
|
||||
contents,
|
||||
tokenCount,
|
||||
importanceScore: meta.importanceScore,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates importance scores for metadata entries
|
||||
* @param scores - Map of file paths to importance scores
|
||||
*/
|
||||
public updateImportanceScores(scores: Map<string, number>): void {
|
||||
for (const [path, score] of scores) {
|
||||
const meta = this.metadataCache.get(path);
|
||||
if (meta) {
|
||||
meta.importanceScore = score;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clears the metadata cache
|
||||
*/
|
||||
public clearCache(): void {
|
||||
this.metadataCache.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets total estimated tokens for all cached metadata
|
||||
*/
|
||||
public getTotalEstimatedTokens(): number {
|
||||
let total = 0;
|
||||
for (const meta of this.metadataCache.values()) {
|
||||
total += meta.estimatedTokens;
|
||||
}
|
||||
return total;
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets cached metadata entries
|
||||
*/
|
||||
public getCachedMetadata(): IFileMetadata[] {
|
||||
return Array.from(this.metadataCache.values());
|
||||
}
|
||||
}
|
||||
138
ts/context/task-context-factory.ts
Normal file
138
ts/context/task-context-factory.ts
Normal file
@@ -0,0 +1,138 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
import { EnhancedContext } from './enhanced-context.js';
|
||||
import { ConfigManager } from './config-manager.js';
|
||||
import type { IContextResult, TaskType } from './types.js';
|
||||
|
||||
/**
|
||||
* Factory class for creating task-specific context
|
||||
*/
|
||||
export class TaskContextFactory {
|
||||
private projectDir: string;
|
||||
private configManager: ConfigManager;
|
||||
|
||||
/**
|
||||
* Create a new TaskContextFactory
|
||||
* @param projectDirArg The project directory
|
||||
*/
|
||||
constructor(projectDirArg: string) {
|
||||
this.projectDir = projectDirArg;
|
||||
this.configManager = ConfigManager.getInstance();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the factory
|
||||
*/
|
||||
public async initialize(): Promise<void> {
|
||||
await this.configManager.initialize(this.projectDir);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create context for README generation
|
||||
*/
|
||||
public async createContextForReadme(): Promise<IContextResult> {
|
||||
const contextBuilder = new EnhancedContext(this.projectDir);
|
||||
await contextBuilder.initialize();
|
||||
|
||||
// Get README-specific configuration
|
||||
const taskConfig = this.configManager.getTaskConfig('readme');
|
||||
if (taskConfig.mode) {
|
||||
contextBuilder.setContextMode(taskConfig.mode);
|
||||
}
|
||||
|
||||
// Build the context for README task
|
||||
return await contextBuilder.buildContext('readme');
|
||||
}
|
||||
|
||||
/**
|
||||
* Create context for description generation
|
||||
*/
|
||||
public async createContextForDescription(): Promise<IContextResult> {
|
||||
const contextBuilder = new EnhancedContext(this.projectDir);
|
||||
await contextBuilder.initialize();
|
||||
|
||||
// Get description-specific configuration
|
||||
const taskConfig = this.configManager.getTaskConfig('description');
|
||||
if (taskConfig.mode) {
|
||||
contextBuilder.setContextMode(taskConfig.mode);
|
||||
}
|
||||
|
||||
// Build the context for description task
|
||||
return await contextBuilder.buildContext('description');
|
||||
}
|
||||
|
||||
/**
|
||||
* Create context for commit message generation
|
||||
* @param gitDiff Optional git diff to include
|
||||
*/
|
||||
public async createContextForCommit(gitDiff?: string): Promise<IContextResult> {
|
||||
const contextBuilder = new EnhancedContext(this.projectDir);
|
||||
await contextBuilder.initialize();
|
||||
|
||||
// Get commit-specific configuration
|
||||
const taskConfig = this.configManager.getTaskConfig('commit');
|
||||
if (taskConfig.mode) {
|
||||
contextBuilder.setContextMode(taskConfig.mode);
|
||||
}
|
||||
|
||||
// Build the context for commit task
|
||||
const contextResult = await contextBuilder.buildContext('commit');
|
||||
|
||||
// If git diff is provided, add it to the context
|
||||
if (gitDiff) {
|
||||
contextBuilder.updateWithGitDiff(gitDiff);
|
||||
}
|
||||
|
||||
return contextBuilder.getContextResult();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create context for any task type
|
||||
* @param taskType The task type to create context for
|
||||
* @param additionalContent Optional additional content to include
|
||||
*/
|
||||
public async createContextForTask(
|
||||
taskType: TaskType,
|
||||
additionalContent?: string
|
||||
): Promise<IContextResult> {
|
||||
switch (taskType) {
|
||||
case 'readme':
|
||||
return this.createContextForReadme();
|
||||
case 'description':
|
||||
return this.createContextForDescription();
|
||||
case 'commit':
|
||||
return this.createContextForCommit(additionalContent);
|
||||
default:
|
||||
// Generic context for unknown task types
|
||||
const contextBuilder = new EnhancedContext(this.projectDir);
|
||||
await contextBuilder.initialize();
|
||||
return await contextBuilder.buildContext();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get token stats for all task types
|
||||
*/
|
||||
public async getTokenStats(): Promise<Record<TaskType, {
|
||||
tokenCount: number;
|
||||
savings: number;
|
||||
includedFiles: number;
|
||||
trimmedFiles: number;
|
||||
excludedFiles: number;
|
||||
}>> {
|
||||
const taskTypes: TaskType[] = ['readme', 'description', 'commit'];
|
||||
const stats: Record<TaskType, any> = {} as any;
|
||||
|
||||
for (const taskType of taskTypes) {
|
||||
const result = await this.createContextForTask(taskType);
|
||||
stats[taskType] = {
|
||||
tokenCount: result.tokenCount,
|
||||
savings: result.tokenSavings,
|
||||
includedFiles: result.includedFiles.length,
|
||||
trimmedFiles: result.trimmedFiles.length,
|
||||
excludedFiles: result.excludedFiles.length
|
||||
};
|
||||
}
|
||||
|
||||
return stats;
|
||||
}
|
||||
}
|
||||
248
ts/context/types.ts
Normal file
248
ts/context/types.ts
Normal file
@@ -0,0 +1,248 @@
|
||||
/**
|
||||
* Context processing mode to control how context is built
|
||||
*/
|
||||
export type ContextMode = 'full' | 'trimmed' | 'summarized';
|
||||
|
||||
/**
|
||||
* Configuration for context trimming
|
||||
*/
|
||||
export interface ITrimConfig {
|
||||
/** Whether to remove function implementations */
|
||||
removeImplementations?: boolean;
|
||||
/** Whether to preserve interface definitions */
|
||||
preserveInterfaces?: boolean;
|
||||
/** Whether to preserve type definitions */
|
||||
preserveTypeDefs?: boolean;
|
||||
/** Whether to preserve JSDoc comments */
|
||||
preserveJSDoc?: boolean;
|
||||
/** Maximum lines to keep for function bodies (if not removing completely) */
|
||||
maxFunctionLines?: number;
|
||||
/** Whether to remove normal comments (non-JSDoc) */
|
||||
removeComments?: boolean;
|
||||
/** Whether to remove blank lines */
|
||||
removeBlankLines?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Task types that require different context optimization
|
||||
*/
|
||||
export type TaskType = 'readme' | 'commit' | 'description';
|
||||
|
||||
/**
|
||||
* Configuration for different tasks
|
||||
*/
|
||||
export interface ITaskConfig {
|
||||
/** The context mode to use for this task */
|
||||
mode?: ContextMode;
|
||||
/** File paths to include for this task */
|
||||
includePaths?: string[];
|
||||
/** File paths to exclude for this task */
|
||||
excludePaths?: string[];
|
||||
/** For commit tasks, whether to focus on changed files */
|
||||
focusOnChangedFiles?: boolean;
|
||||
/** For description tasks, whether to include package info */
|
||||
includePackageInfo?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Complete context configuration
|
||||
*/
|
||||
export interface IContextConfig {
|
||||
/** Maximum tokens to use for context */
|
||||
maxTokens?: number;
|
||||
/** Default context mode */
|
||||
defaultMode?: ContextMode;
|
||||
/** Task-specific settings */
|
||||
taskSpecificSettings?: {
|
||||
[key in TaskType]?: ITaskConfig;
|
||||
};
|
||||
/** Trimming configuration */
|
||||
trimming?: ITrimConfig;
|
||||
/** Cache configuration */
|
||||
cache?: ICacheConfig;
|
||||
/** Analyzer configuration */
|
||||
analyzer?: IAnalyzerConfig;
|
||||
/** Prioritization weights */
|
||||
prioritization?: IPrioritizationWeights;
|
||||
/** Tier configuration for adaptive trimming */
|
||||
tiers?: ITierConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache configuration
|
||||
*/
|
||||
export interface ICacheConfig {
|
||||
/** Whether caching is enabled */
|
||||
enabled?: boolean;
|
||||
/** Time-to-live in seconds */
|
||||
ttl?: number;
|
||||
/** Maximum cache size in MB */
|
||||
maxSize?: number;
|
||||
/** Cache directory path */
|
||||
directory?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyzer configuration
|
||||
*/
|
||||
export interface IAnalyzerConfig {
|
||||
/** Whether analyzer is enabled */
|
||||
enabled?: boolean;
|
||||
/** Whether to use AI refinement for selection */
|
||||
useAIRefinement?: boolean;
|
||||
/** AI model to use for refinement */
|
||||
aiModel?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Weights for file prioritization
|
||||
*/
|
||||
export interface IPrioritizationWeights {
|
||||
/** Weight for dependency centrality */
|
||||
dependencyWeight?: number;
|
||||
/** Weight for task relevance */
|
||||
relevanceWeight?: number;
|
||||
/** Weight for token efficiency */
|
||||
efficiencyWeight?: number;
|
||||
/** Weight for file recency */
|
||||
recencyWeight?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Tier configuration for adaptive trimming
|
||||
*/
|
||||
export interface ITierConfig {
|
||||
essential?: ITierSettings;
|
||||
important?: ITierSettings;
|
||||
optional?: ITierSettings;
|
||||
}
|
||||
|
||||
/**
|
||||
* Settings for a single tier
|
||||
*/
|
||||
export interface ITierSettings {
|
||||
/** Minimum score to qualify for this tier */
|
||||
minScore: number;
|
||||
/** Trimming level to apply */
|
||||
trimLevel: 'none' | 'light' | 'aggressive';
|
||||
}
|
||||
|
||||
/**
|
||||
* Basic file information interface
|
||||
*/
|
||||
export interface IFileInfo {
|
||||
/** The file path */
|
||||
path: string;
|
||||
/** The file contents */
|
||||
contents: string;
|
||||
/** The file's relative path from the project root */
|
||||
relativePath: string;
|
||||
/** The estimated token count of the file */
|
||||
tokenCount?: number;
|
||||
/** The file's importance score (higher is more important) */
|
||||
importanceScore?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of context building
|
||||
*/
|
||||
export interface IContextResult {
|
||||
/** The generated context string */
|
||||
context: string;
|
||||
/** The total token count of the context */
|
||||
tokenCount: number;
|
||||
/** Files included in the context */
|
||||
includedFiles: IFileInfo[];
|
||||
/** Files that were trimmed */
|
||||
trimmedFiles: IFileInfo[];
|
||||
/** Files that were excluded */
|
||||
excludedFiles: IFileInfo[];
|
||||
/** Token savings from trimming */
|
||||
tokenSavings: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* File metadata without contents (for lazy loading)
|
||||
*/
|
||||
export interface IFileMetadata {
|
||||
/** The file path */
|
||||
path: string;
|
||||
/** The file's relative path from the project root */
|
||||
relativePath: string;
|
||||
/** File size in bytes */
|
||||
size: number;
|
||||
/** Last modified time (Unix timestamp) */
|
||||
mtime: number;
|
||||
/** Estimated token count (without loading full contents) */
|
||||
estimatedTokens: number;
|
||||
/** The file's importance score */
|
||||
importanceScore?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache entry for a file
|
||||
*/
|
||||
export interface ICacheEntry {
|
||||
/** File path */
|
||||
path: string;
|
||||
/** File contents */
|
||||
contents: string;
|
||||
/** Token count */
|
||||
tokenCount: number;
|
||||
/** Last modified time when cached */
|
||||
mtime: number;
|
||||
/** When this cache entry was created */
|
||||
cachedAt: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Dependency information for a file
|
||||
*/
|
||||
export interface IFileDependencies {
|
||||
/** File path */
|
||||
path: string;
|
||||
/** Files this file imports */
|
||||
imports: string[];
|
||||
/** Files that import this file */
|
||||
importedBy: string[];
|
||||
/** Centrality score (0-1) - how central this file is in the dependency graph */
|
||||
centrality: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Analysis result for a file
|
||||
*/
|
||||
export interface IFileAnalysis {
|
||||
/** File path */
|
||||
path: string;
|
||||
/** Task relevance score (0-1) */
|
||||
relevanceScore: number;
|
||||
/** Dependency centrality score (0-1) */
|
||||
centralityScore: number;
|
||||
/** Token efficiency score (0-1) */
|
||||
efficiencyScore: number;
|
||||
/** Recency score (0-1) */
|
||||
recencyScore: number;
|
||||
/** Combined importance score (0-1) */
|
||||
importanceScore: number;
|
||||
/** Assigned tier */
|
||||
tier: 'essential' | 'important' | 'optional' | 'excluded';
|
||||
/** Reason for the score */
|
||||
reason?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of context analysis
|
||||
*/
|
||||
export interface IAnalysisResult {
|
||||
/** Task type being analyzed */
|
||||
taskType: TaskType;
|
||||
/** Analyzed files with scores */
|
||||
files: IFileAnalysis[];
|
||||
/** Dependency graph */
|
||||
dependencyGraph: Map<string, IFileDependencies>;
|
||||
/** Total files analyzed */
|
||||
totalFiles: number;
|
||||
/** Analysis duration in ms */
|
||||
analysisDuration: number;
|
||||
}
|
||||
@@ -1,9 +1,12 @@
|
||||
import * as early from '@pushrocks/early';
|
||||
import * as early from '@push.rocks/early';
|
||||
early.start('tsdoc');
|
||||
import * as plugins from './tsdoc.plugins';
|
||||
import * as cli from './tsdoc.cli';
|
||||
import * as plugins from './plugins.js';
|
||||
import * as cli from './cli.js';
|
||||
early.stop();
|
||||
|
||||
export const runCli = async () => {
|
||||
await cli.run();
|
||||
};
|
||||
|
||||
// exports
|
||||
export * from './classes.aidoc.js';
|
||||
|
||||
6
ts/logging.ts
Normal file
6
ts/logging.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { commitinfo } from './00_commitinfo_data.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
export const logger = plugins.smartlog.Smartlog.createForCommitinfo(commitinfo);
|
||||
|
||||
logger.addLogDestination(new plugins.smartlogDestinationLocal.DestinationLocal());
|
||||
@@ -1,7 +1,10 @@
|
||||
import * as plugins from './tsdoc.plugins';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
// dirs
|
||||
export const packageDir = plugins.path.join(__dirname, '../');
|
||||
export const packageDir = plugins.path.join(
|
||||
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
|
||||
'../',
|
||||
);
|
||||
export const cwd = process.cwd();
|
||||
export const binDir = plugins.path.join(packageDir, './node_modules/.bin');
|
||||
export const assetsDir = plugins.path.join(packageDir, './assets');
|
||||
46
ts/plugins.ts
Normal file
46
ts/plugins.ts
Normal file
@@ -0,0 +1,46 @@
|
||||
// node native
|
||||
import * as path from 'path';
|
||||
|
||||
export { path };
|
||||
|
||||
// pushrocks scope
|
||||
import * as npmextra from '@push.rocks/npmextra';
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
import * as smartai from '@push.rocks/smartai';
|
||||
import * as smartcli from '@push.rocks/smartcli';
|
||||
import * as smartdelay from '@push.rocks/smartdelay';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as smartgit from '@push.rocks/smartgit';
|
||||
import * as smartinteract from '@push.rocks/smartinteract';
|
||||
import * as smartlog from '@push.rocks/smartlog';
|
||||
import * as smartlogDestinationLocal from '@push.rocks/smartlog-destination-local';
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartshell from '@push.rocks/smartshell';
|
||||
import * as smarttime from '@push.rocks/smarttime';
|
||||
|
||||
export {
|
||||
npmextra,
|
||||
qenv,
|
||||
smartai,
|
||||
smartcli,
|
||||
smartdelay,
|
||||
smartfile,
|
||||
smartgit,
|
||||
smartinteract,
|
||||
smartlog,
|
||||
smartlogDestinationLocal,
|
||||
smartpath,
|
||||
smartshell,
|
||||
smarttime,
|
||||
};
|
||||
|
||||
// @git.zone scope
|
||||
import * as tspublish from '@git.zone/tspublish';
|
||||
|
||||
export { tspublish };
|
||||
|
||||
// third party scope
|
||||
import * as typedoc from 'typedoc';
|
||||
import * as gptTokenizer from 'gpt-tokenizer';
|
||||
|
||||
export { typedoc, gptTokenizer };
|
||||
@@ -1,37 +0,0 @@
|
||||
import * as plugins from './tsdoc.plugins';
|
||||
import * as paths from './tsdoc.paths';
|
||||
import { logger } from './tsdoc.logging';
|
||||
|
||||
import { TypeDoc } from './tsdoc.classes.typedoc';
|
||||
|
||||
export const run = async () => {
|
||||
const tsdocCli = new plugins.smartcli.Smartcli();
|
||||
|
||||
tsdocCli.standardTask().subscribe(async (argvArg) => {
|
||||
logger.log('warn', `Auto detecting environment!`);
|
||||
switch (true) {
|
||||
case await TypeDoc.isTypeDocDir(paths.cwd):
|
||||
logger.log('ok', `Detected TypeDoc compliant directory at ${paths.cwd}`);
|
||||
tsdocCli.trigger('typedoc');
|
||||
break;
|
||||
default:
|
||||
logger.log('error', `Cannot determine docs format at ${paths.cwd}`);
|
||||
}
|
||||
});
|
||||
|
||||
tsdocCli.addCommand('typedoc').subscribe(async (argvArg) => {
|
||||
const typeDocInstance = new TypeDoc(paths.cwd);
|
||||
await typeDocInstance.compile({
|
||||
publicSubdir: argvArg.publicSubdir,
|
||||
});
|
||||
});
|
||||
|
||||
tsdocCli.addCommand('test').subscribe((argvArg) => {
|
||||
tsdocCli.trigger('typedoc');
|
||||
process.on('exit', async () => {
|
||||
await plugins.smartfile.fs.remove(paths.publicDir);
|
||||
});
|
||||
});
|
||||
|
||||
tsdocCli.startParse();
|
||||
};
|
||||
@@ -1,15 +0,0 @@
|
||||
import * as plugins from './tsdoc.plugins';
|
||||
|
||||
export const logger = new plugins.smartlog.Smartlog({
|
||||
logContext: {
|
||||
company: 'Some Company',
|
||||
companyunit: 'Some CompanyUnit',
|
||||
containerName: 'Some Containername',
|
||||
environment: 'local',
|
||||
runtime: 'node',
|
||||
zone: 'gitzone',
|
||||
},
|
||||
minimumLogLevel: 'silly',
|
||||
});
|
||||
|
||||
logger.addLogDestination(new plugins.smartlogDestinationLocal.DestinationLocal());
|
||||
@@ -1,18 +0,0 @@
|
||||
// node native
|
||||
import * as path from 'path';
|
||||
|
||||
export { path };
|
||||
|
||||
// pushrocks scope
|
||||
import * as smartcli from '@pushrocks/smartcli';
|
||||
import * as smartfile from '@pushrocks/smartfile';
|
||||
import * as smartlog from '@pushrocks/smartlog';
|
||||
import * as smartlogDestinationLocal from '@pushrocks/smartlog-destination-local';
|
||||
import * as smartshell from '@pushrocks/smartshell';
|
||||
|
||||
export { smartcli, smartfile, smartlog, smartlogDestinationLocal, smartshell };
|
||||
|
||||
// third party scope
|
||||
import * as typedoc from 'typedoc';
|
||||
|
||||
export { typedoc };
|
||||
14
tsconfig.json
Normal file
14
tsconfig.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"experimentalDecorators": true,
|
||||
"useDefineForClassFields": false,
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"esModuleInterop": true,
|
||||
"verbatimModuleSyntax": true
|
||||
},
|
||||
"exclude": [
|
||||
"dist_*/**/*.d.ts"
|
||||
]
|
||||
}
|
||||
17
tslint.json
17
tslint.json
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"extends": ["tslint:latest", "tslint-config-prettier"],
|
||||
"rules": {
|
||||
"semicolon": [true, "always"],
|
||||
"no-console": false,
|
||||
"ordered-imports": false,
|
||||
"object-literal-sort-keys": false,
|
||||
"member-ordering": {
|
||||
"options":{
|
||||
"order": [
|
||||
"static-method"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"defaultSeverity": "warning"
|
||||
}
|
||||
Reference in New Issue
Block a user