Compare commits
207 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 4e2f938d71 | |||
| abdc2f2534 | |||
| a2b596e5db | |||
| a3a0537ddc | |||
| fc85f28f69 | |||
| 6b2957b272 | |||
| 883985dbc0 | |||
| 21006b41d0 | |||
| 5d0411a5ba | |||
| 39f5410b76 | |||
| 1a517fdd1b | |||
| 90af6eb1b1 | |||
| 3485392979 | |||
| 89adae2cff | |||
| 3451ab7456 | |||
| bcded1eafa | |||
| 9cae46e2fe | |||
| 65c1df30da | |||
| e8f2add812 | |||
| 8fcc304ee3 | |||
| 69802b46b6 | |||
| e500455557 | |||
| 4029691ccd | |||
| 3b1c84d7e8 | |||
| f8d0895aab | |||
| d7ec2220a1 | |||
| c24ce31b1f | |||
| fec2017cc6 | |||
| 88fac91c79 | |||
| ce4da89da9 | |||
| 6524adea18 | |||
| 4bf0c02618 | |||
| f84a65217d | |||
| 3f22fc91ae | |||
| 11e65b92ec | |||
| 0a3080518f | |||
| d0a4ddbb4b | |||
| 481339d3cb | |||
| ebc3d760af | |||
| a6d678e36c | |||
| 8c3e16a4f2 | |||
| 2276fb0c0c | |||
| 0a9d535df4 | |||
| d46fd1590e | |||
| 1d7317f063 | |||
| fe5121ec9c | |||
| c084b20390 | |||
| 6f024536a8 | |||
| 2405fb3370 | |||
| 8561940b8c | |||
| ab273ea75c | |||
| 620737566f | |||
| 23453bf16b | |||
| 84947cfb80 | |||
| 1a9ac9091d | |||
| 88b93b8b83 | |||
| 77279a9135 | |||
| 7426addbdd | |||
| 58d060d729 | |||
| 370cbfe6f3 | |||
| 2adb4e8cb0 | |||
| e8608b1cae | |||
| 33fa7fa337 | |||
| 2946bcaf49 | |||
| d962e17c18 | |||
| a22c400355 | |||
| 08b7305ef0 | |||
| d7b462fda9 | |||
| 01e6c15626 | |||
| 94a066247f | |||
| 7de157ccb3 | |||
| d783965b25 | |||
| 07f1413d5e | |||
| d7bf45f6b5 | |||
| 3eb64bcb5d | |||
| e24a027fdd | |||
| 3f451cfcb1 | |||
| e355c51c8d | |||
| b0fcaba2c3 | |||
| 4ea205e11b | |||
| f819e7b521 | |||
| d4903f32f0 | |||
| 34102a2544 | |||
| 5e2171dbfd | |||
| 70d4af653a | |||
| 06f6fdef98 | |||
| b6fb7bf029 | |||
| 4c83725120 | |||
| a060cd1a03 | |||
| e8372effc7 | |||
| 571249705e | |||
| 927cd961fd | |||
| 63b4fcc232 | |||
| 4188ed7f24 | |||
| 1feddc6e85 | |||
| 499baebc18 | |||
| 01fc0d0c6e | |||
| b6c9cea5d1 | |||
| a949039192 | |||
| 11bde9d756 | |||
| eac26521c6 | |||
| e1323569f5 | |||
| 41e4bd6689 | |||
| 164a58ec59 | |||
| e1c0f82fe8 | |||
| 8a0046818b | |||
| 97fa9db32f | |||
| d61de9b615 | |||
| fba54035ea | |||
| 9a3d8588a8 | |||
| eb8f8fa70a | |||
| afe7b5e99e | |||
| e074562362 | |||
| 240d6bb314 | |||
| 2d0839a1da | |||
| 9f250ae2b3 | |||
| 1223bb8567 | |||
| 9395cfc166 | |||
| 3b4c6bd97f | |||
| 5d2c9e6158 | |||
| 89977038ec | |||
| b753c206b0 | |||
| 1965bd9b47 | |||
| 138d71e8c5 | |||
| 15397e8609 | |||
| 1489420e47 | |||
| 5e3b122b59 | |||
| 02fa9215d3 | |||
| 32f12c67cf | |||
| be53225bb1 | |||
| a5db530879 | |||
| c5b07c2504 | |||
| 1bd215d18d | |||
| e5a348f57c | |||
| d243880d55 | |||
| c1bd85fc58 | |||
| b81220b2ba | |||
| ca26d9e98d | |||
| 61b6161470 | |||
| 463183bd3a | |||
| 069a74d2b4 | |||
| 87c1ae53b3 | |||
| 774aea55ff | |||
| ee7038e0d7 | |||
| 7c3bae4c6e | |||
| 69d59e02f8 | |||
| b4b6797fdf | |||
| 4bbb154c4f | |||
| eec33e29d3 | |||
| c33a7d37ee | |||
| 084b321e6a | |||
| cf1cfbd647 | |||
| 489349e45a | |||
| c0c627fedb | |||
| 8d4b278a5d | |||
| a0969912eb | |||
| 39d64ffcf3 | |||
| 529297bd09 | |||
| 4c16cb9c3e | |||
| 3a6cdf5fb5 | |||
| 2460c89151 | |||
| 3dae706a67 | |||
| c150052380 | |||
| 1d00a95885 | |||
| d9bfba1b5f | |||
| c56db7d1d0 | |||
| a2bcd1a1c5 | |||
| 795ce9b014 | |||
| 9a84009f47 | |||
| 6efe00abd9 | |||
| d81b9dd213 | |||
| 751a5b8630 | |||
| 3c9e421351 | |||
| fe05144a56 | |||
| ad7035e5e3 | |||
| 49601f3bac | |||
| 6c13622b33 | |||
| 9021e9ae39 | |||
| 7289b77398 | |||
| ae90b8297f | |||
| d75a65ee46 | |||
| c28ff5212e | |||
| 97bf1e3990 | |||
| 769a22057f | |||
| b5910b6557 | |||
| f4ea7f0d0a | |||
| 50eff3fbd5 | |||
| 68fb3ed643 | |||
| c9bfbadbf6 | |||
| b06005b949 | |||
| 5e4b42a920 | |||
| 066401c474 | |||
| 972681834f | |||
| 44b81bb478 | |||
| e1eb88e298 | |||
| 50da9a5ce7 | |||
| a2fd6998ee | |||
| d5908d4bc6 | |||
| a34b77f469 | |||
| 027b4ca26f | |||
| e9b8b3b6d9 | |||
| 8d2c9e8241 | |||
| b78b0ef599 | |||
| 5a046a7667 | |||
| b04f3be3db | |||
| d441f5b489 | |||
| 38f3ccb364 |
66
.gitea/workflows/default_nottags.yaml
Normal file
66
.gitea/workflows/default_nottags.yaml
Normal file
@@ -0,0 +1,66 @@
|
|||||||
|
name: Default (not tags)
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags-ignore:
|
||||||
|
- '**'
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE: code.foss.global/hosttoday/ht-docker-node:npmci
|
||||||
|
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
|
||||||
|
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||||
|
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||||
|
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||||
|
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
security:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
continue-on-error: true
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Install pnpm and npmci
|
||||||
|
run: |
|
||||||
|
pnpm install -g pnpm
|
||||||
|
pnpm install -g @ship.zone/npmci
|
||||||
|
|
||||||
|
- name: Run npm prepare
|
||||||
|
run: npmci npm prepare
|
||||||
|
|
||||||
|
- name: Audit production dependencies
|
||||||
|
run: |
|
||||||
|
npmci command npm config set registry https://registry.npmjs.org
|
||||||
|
npmci command pnpm audit --audit-level=high --prod
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Audit development dependencies
|
||||||
|
run: |
|
||||||
|
npmci command npm config set registry https://registry.npmjs.org
|
||||||
|
npmci command pnpm audit --audit-level=high --dev
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
test:
|
||||||
|
if: ${{ always() }}
|
||||||
|
needs: security
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Test stable
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm install
|
||||||
|
npmci npm test
|
||||||
|
|
||||||
|
- name: Test build
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm install
|
||||||
|
npmci npm build
|
||||||
124
.gitea/workflows/default_tags.yaml
Normal file
124
.gitea/workflows/default_tags.yaml
Normal file
@@ -0,0 +1,124 @@
|
|||||||
|
name: Default (tags)
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
tags:
|
||||||
|
- '*'
|
||||||
|
|
||||||
|
env:
|
||||||
|
IMAGE: code.foss.global/hosttoday/ht-docker-node:npmci
|
||||||
|
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
|
||||||
|
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||||
|
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||||
|
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||||
|
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
security:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
continue-on-error: true
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
pnpm install -g pnpm
|
||||||
|
pnpm install -g @ship.zone/npmci
|
||||||
|
npmci npm prepare
|
||||||
|
|
||||||
|
- name: Audit production dependencies
|
||||||
|
run: |
|
||||||
|
npmci command npm config set registry https://registry.npmjs.org
|
||||||
|
npmci command pnpm audit --audit-level=high --prod
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
- name: Audit development dependencies
|
||||||
|
run: |
|
||||||
|
npmci command npm config set registry https://registry.npmjs.org
|
||||||
|
npmci command pnpm audit --audit-level=high --dev
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
test:
|
||||||
|
if: ${{ always() }}
|
||||||
|
needs: security
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
pnpm install -g pnpm
|
||||||
|
pnpm install -g @ship.zone/npmci
|
||||||
|
npmci npm prepare
|
||||||
|
|
||||||
|
- name: Test stable
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm install
|
||||||
|
npmci npm test
|
||||||
|
|
||||||
|
- name: Test build
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm install
|
||||||
|
npmci npm build
|
||||||
|
|
||||||
|
release:
|
||||||
|
needs: test
|
||||||
|
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
pnpm install -g pnpm
|
||||||
|
pnpm install -g @ship.zone/npmci
|
||||||
|
npmci npm prepare
|
||||||
|
|
||||||
|
- name: Release
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm publish
|
||||||
|
|
||||||
|
metadata:
|
||||||
|
needs: test
|
||||||
|
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: ${{ env.IMAGE }}
|
||||||
|
continue-on-error: true
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Prepare
|
||||||
|
run: |
|
||||||
|
pnpm install -g pnpm
|
||||||
|
pnpm install -g @ship.zone/npmci
|
||||||
|
npmci npm prepare
|
||||||
|
|
||||||
|
- name: Code quality
|
||||||
|
run: |
|
||||||
|
npmci command npm install -g typescript
|
||||||
|
npmci npm install
|
||||||
|
|
||||||
|
- name: Trigger
|
||||||
|
run: npmci trigger
|
||||||
|
|
||||||
|
- name: Build docs and upload artifacts
|
||||||
|
run: |
|
||||||
|
npmci node install stable
|
||||||
|
npmci npm install
|
||||||
|
pnpm install -g @git.zone/tsdoc
|
||||||
|
npmci command tsdoc
|
||||||
|
continue-on-error: true
|
||||||
7
.gitignore
vendored
7
.gitignore
vendored
@@ -15,8 +15,7 @@ node_modules/
|
|||||||
|
|
||||||
# builds
|
# builds
|
||||||
dist/
|
dist/
|
||||||
dist_web/
|
dist_*/
|
||||||
dist_serve/
|
|
||||||
dist_ts_web/
|
|
||||||
|
|
||||||
# custom
|
# custom
|
||||||
|
**/.claude/settings.local.json
|
||||||
|
|||||||
125
.gitlab-ci.yml
125
.gitlab-ci.yml
@@ -1,125 +0,0 @@
|
|||||||
# gitzone standard
|
|
||||||
image: hosttoday/ht-docker-node:npmci
|
|
||||||
|
|
||||||
cache:
|
|
||||||
paths:
|
|
||||||
- .npmci_cache/
|
|
||||||
key: "$CI_BUILD_STAGE"
|
|
||||||
|
|
||||||
stages:
|
|
||||||
- security
|
|
||||||
- test
|
|
||||||
- release
|
|
||||||
- metadata
|
|
||||||
|
|
||||||
# ====================
|
|
||||||
# security stage
|
|
||||||
# ====================
|
|
||||||
mirror:
|
|
||||||
stage: security
|
|
||||||
script:
|
|
||||||
- npmci git mirror
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
snyk:
|
|
||||||
stage: security
|
|
||||||
script:
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci command npm install -g snyk
|
|
||||||
- npmci command npm install --ignore-scripts
|
|
||||||
- npmci command snyk test
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
# ====================
|
|
||||||
# test stage
|
|
||||||
# ====================
|
|
||||||
|
|
||||||
testLTS:
|
|
||||||
stage: test
|
|
||||||
script:
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci node install lts
|
|
||||||
- npmci npm install
|
|
||||||
- npmci npm test
|
|
||||||
coverage: /\d+.?\d+?\%\s*coverage/
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
testSTABLE:
|
|
||||||
stage: test
|
|
||||||
script:
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci node install stable
|
|
||||||
- npmci npm install
|
|
||||||
- npmci npm test
|
|
||||||
coverage: /\d+.?\d+?\%\s*coverage/
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
release:
|
|
||||||
stage: release
|
|
||||||
script:
|
|
||||||
- npmci node install stable
|
|
||||||
- npmci npm publish
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
# ====================
|
|
||||||
# metadata stage
|
|
||||||
# ====================
|
|
||||||
codequality:
|
|
||||||
stage: metadata
|
|
||||||
image: docker:stable
|
|
||||||
allow_failure: true
|
|
||||||
services:
|
|
||||||
- docker:stable-dind
|
|
||||||
script:
|
|
||||||
- export SP_VERSION=$(echo "$CI_SERVER_VERSION" | sed 's/^\([0-9]*\)\.\([0-9]*\).*/\1-\2-stable/')
|
|
||||||
- docker run
|
|
||||||
--env SOURCE_CODE="$PWD"
|
|
||||||
--volume "$PWD":/code
|
|
||||||
--volume /var/run/docker.sock:/var/run/docker.sock
|
|
||||||
"registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
|
|
||||||
artifacts:
|
|
||||||
paths: [codeclimate.json]
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
- priv
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
stage: metadata
|
|
||||||
script:
|
|
||||||
- npmci trigger
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
pages:
|
|
||||||
image: hosttoday/ht-docker-node:npmci
|
|
||||||
stage: metadata
|
|
||||||
script:
|
|
||||||
- npmci command npm install -g typedoc typescript
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci npm install
|
|
||||||
- npmci command typedoc --module "commonjs" --target "ES2016" --out public/ ts/
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
artifacts:
|
|
||||||
expire_in: 1 week
|
|
||||||
paths:
|
|
||||||
- public
|
|
||||||
allow_failure: true
|
|
||||||
44
.smartconfig.json
Normal file
44
.smartconfig.json
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
{
|
||||||
|
"gitzone": {
|
||||||
|
"projectType": "npm",
|
||||||
|
"module": {
|
||||||
|
"githost": "gitlab.com",
|
||||||
|
"gitscope": "gitzone",
|
||||||
|
"gitrepo": "tsdoc",
|
||||||
|
"shortDescription": "a tool for better documentation",
|
||||||
|
"npmPackagename": "@git.zone/tsdoc",
|
||||||
|
"license": "MIT",
|
||||||
|
"projectDomain": "git.zone",
|
||||||
|
"description": "A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.",
|
||||||
|
"keywords": [
|
||||||
|
"TypeScript",
|
||||||
|
"documentation",
|
||||||
|
"AI",
|
||||||
|
"CLI",
|
||||||
|
"README",
|
||||||
|
"TypeDoc",
|
||||||
|
"commit messages",
|
||||||
|
"automation",
|
||||||
|
"code analysis",
|
||||||
|
"context trimming",
|
||||||
|
"developer tools"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"npmci": {
|
||||||
|
"npmGlobalTools": [],
|
||||||
|
"npmAccessLevel": "public"
|
||||||
|
},
|
||||||
|
"tsdoc": {
|
||||||
|
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code licensed under the MIT License. A copy of the license can be found in the [LICENSE](./license) file.\n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH or third parties, and are not included within the scope of the MIT license granted herein.\n\nUse of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines or the guidelines of the respective third-party owners, and any usage must be approved in writing. Third-party trademarks used herein are the property of their respective owners and used only in a descriptive manner, e.g. for an implementation of an API or similar.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District Court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
||||||
|
},
|
||||||
|
"@git.zone/cli": {
|
||||||
|
"release": {
|
||||||
|
"registries": [
|
||||||
|
"https://verdaccio.lossless.digital",
|
||||||
|
"https://registry.npmjs.org"
|
||||||
|
],
|
||||||
|
"accessLevel": "public"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
13
.snyk
13
.snyk
@@ -1,13 +0,0 @@
|
|||||||
# Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities.
|
|
||||||
version: v1.13.5
|
|
||||||
# ignores vulnerabilities until expiry date; change duration by modifying expiry date
|
|
||||||
ignore:
|
|
||||||
SNYK-JS-MARKED-174116:
|
|
||||||
- typedoc > marked:
|
|
||||||
reason: None given
|
|
||||||
expires: '2019-06-13T06:50:33.594Z'
|
|
||||||
'npm:shelljs:20140723':
|
|
||||||
- typedoc > shelljs:
|
|
||||||
reason: None given
|
|
||||||
expires: '2019-06-13T06:50:33.594Z'
|
|
||||||
patch: {}
|
|
||||||
11
.vscode/launch.json
vendored
Normal file
11
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
{
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"command": "npm test",
|
||||||
|
"name": "Run npm test",
|
||||||
|
"request": "launch",
|
||||||
|
"type": "node-terminal"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
26
.vscode/settings.json
vendored
Normal file
26
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
{
|
||||||
|
"json.schemas": [
|
||||||
|
{
|
||||||
|
"fileMatch": ["/.smartconfig.json"],
|
||||||
|
"schema": {
|
||||||
|
"type": "object",
|
||||||
|
"properties": {
|
||||||
|
"npmci": {
|
||||||
|
"type": "object",
|
||||||
|
"description": "settings for npmci"
|
||||||
|
},
|
||||||
|
"gitzone": {
|
||||||
|
"type": "object",
|
||||||
|
"description": "settings for gitzone",
|
||||||
|
"properties": {
|
||||||
|
"projectType": {
|
||||||
|
"type": "string",
|
||||||
|
"enum": ["website", "element", "service", "npm", "wcc"]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
0
assets/.gitkeep
Normal file
0
assets/.gitkeep
Normal file
280
changelog.md
Normal file
280
changelog.md
Normal file
@@ -0,0 +1,280 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
## 2026-03-24 - 2.0.2 - fix(smartconfig)
|
||||||
|
migrate project metadata and config handling to .smartconfig.json
|
||||||
|
|
||||||
|
- replace npmextra.json with .smartconfig.json across packaging, project context, README generation, and description prompts
|
||||||
|
- fix description updates to write metadata under gitzone.module and use smartconfig KeyValueStore for persisted settings
|
||||||
|
- refresh documentation and dependency versions to reflect the new config location and storage path
|
||||||
|
|
||||||
|
## 2026-03-24 - 2.0.1 - fix(aidocs, config)
|
||||||
|
migrate aidocs configuration handling from npmextra to smartconfig
|
||||||
|
|
||||||
|
- replace the @push.rocks/npmextra dependency with @push.rocks/smartconfig
|
||||||
|
- load project metadata from smartconfig.json instead of npmextra.json in aidocs workflows
|
||||||
|
- move user key-value store paths from ~/.npmextra to ~/.smartconfig and preserve existing OPENAI_TOKEN data through migration
|
||||||
|
|
||||||
|
## 2026-03-11 - 2.0.0 - BREAKING CHANGE(aidoc)
|
||||||
|
migrate agent orchestration to new runAgent API and filesystem tools; refactor model handling and update README and tests
|
||||||
|
|
||||||
|
- Replace DualAgentOrchestrator with plugins.smartagent.runAgent and scoped filesystem tools
|
||||||
|
- Introduce smartagentTools export and use filesystemTool for agents
|
||||||
|
- Replace smartAiInstance with model via plugins.smartai.getModel() and remove previous lifecycle methods (breaking API change)
|
||||||
|
- Normalize agent output property from result to text and standardize log messages (removed emojis)
|
||||||
|
- Update changelog/README/description generation flows to use new agent interface
|
||||||
|
- Bump several devDependencies and dependencies (tsbuild, tstest, @types/node, tspublish, push.rocks packages, typedoc, typescript)
|
||||||
|
- Change test entry to export default tap.start()
|
||||||
|
- Revise README content and structure
|
||||||
|
|
||||||
|
## 2026-01-04 - 1.12.0 - feat(commit)
|
||||||
|
add token budgeting and dynamic diff token calculation to avoid OpenAI context limit issues
|
||||||
|
|
||||||
|
- Introduce TOKEN_BUDGET constants and calculateMaxDiffTokens() in ts/aidocs_classes/commit.ts
|
||||||
|
- Use dynamic maxDiffTokens for DiffProcessor and validate/log warnings when estimated tokens approach limits
|
||||||
|
- Add token budgeting notes to readme.hints.md (guidance for splitting large commits and adjusting overhead)
|
||||||
|
- Bump dependencies/devDependencies: @git.zone/tstest ^3.1.4, @types/node ^25.0.3, @git.zone/tspublish ^1.11.0, @push.rocks/smartfs ^1.3.1
|
||||||
|
|
||||||
|
## 2025-12-16 - 1.11.4 - fix(aidocs_classes)
|
||||||
|
clarify recommendedNextVersionMessage field to require only the description body without the type(scope) prefix
|
||||||
|
|
||||||
|
- Updated inline documentation in ts/aidocs_classes/commit.ts to explicitly state that recommendedNextVersionMessage must be only the description body (example: 'bump dependency to ^1.2.6') and not include the type(scope) prefix.
|
||||||
|
- Removes ambiguity in the example text and improves guidance for commit message generation.
|
||||||
|
|
||||||
|
## 2025-12-15 - 1.11.0 - feat(commit)
|
||||||
|
Integrate DualAgentOrchestrator for commit message generation and improve diff/context handling
|
||||||
|
|
||||||
|
- Add @push.rocks/smartagent dependency and export it from plugins
|
||||||
|
- Use DualAgentOrchestrator to generate and guardian-validate commit messages
|
||||||
|
- Use DualAgentOrchestrator for changelog generation with guardian validation
|
||||||
|
- Switch commit flow to TaskContextFactory and DiffProcessor for token-efficient context
|
||||||
|
- Expose getOpenaiToken() and wire orchestrator with the project OpenAI token
|
||||||
|
- Enhance iterative context builder and context components to better manage token budgets and sampling
|
||||||
|
- Update npmextra.json with release config for @git.zone/cli and reference local smartagent package in package.json
|
||||||
|
|
||||||
|
## 2025-12-02 - 1.10.0 - feat(diff-processor)
|
||||||
|
Improve diff sampling and file prioritization: increase inclusion thresholds, expand sampled context, and boost priority for interface/type and entry-point files
|
||||||
|
|
||||||
|
- Raise small/medium file thresholds used by DiffProcessor (smallFileLines 50 -> 300, mediumFileLines 200 -> 800) so more source files are included fully or summarized rather than treated as large metadata-only files
|
||||||
|
- Increase sample window for medium files (sampleHeadLines/sampleTailLines 20 -> 75) to provide more context when summarizing diffs
|
||||||
|
- Boost importance scoring for interfaces/type files and entry points (adds +20 for interfaces/.types and +15 for index/mod entry files) to prioritize critical API surface in diff processing
|
||||||
|
- Keep other prioritization rules intact (source/test/config/docs/build heuristics), and align the aidoc commit DiffProcessor usage with the new defaults
|
||||||
|
|
||||||
|
## 2025-11-04 - 1.9.2 - fix(deps)
|
||||||
|
Update dependencies and devDependencies to newer versions (bump multiple packages)
|
||||||
|
|
||||||
|
- Bumped devDependencies: @git.zone/tsbuild 2.6.8 -> 2.7.1, @git.zone/tsrun 1.2.46 -> 1.6.2, @git.zone/tstest 2.3.6 -> 2.7.0
|
||||||
|
- Bumped runtime dependencies: @push.rocks/smartai 0.5.11 -> 0.8.0, @push.rocks/smartcli 4.0.11 -> 4.0.19, @push.rocks/smartgit 3.2.1 -> 3.3.1, @push.rocks/smartlog 3.1.9 -> 3.1.10, gpt-tokenizer 3.0.1 -> 3.2.0, typedoc 0.28.12 -> 0.28.14, typescript 5.9.2 -> 5.9.3
|
||||||
|
- No source code changes in this commit; dependency-only updates. Run the test suite and CI to verify compatibility.
|
||||||
|
|
||||||
|
## 2025-11-04 - 1.9.1 - fix(iterative-context-builder)
|
||||||
|
Rely on DiffProcessor for git diff pre-processing; remove raw char truncation, raise diff token safety, and improve logging
|
||||||
|
|
||||||
|
- Removed raw character-based truncation of additionalContext — diffs are expected to be pre-processed by DiffProcessor instead of blind substring truncation.
|
||||||
|
- Now validates pre-processed diff token count only and treats DiffProcessor as the primary sampler (DiffProcessor typically uses a ~100k token budget).
|
||||||
|
- Increased MAX_DIFF_TOKENS safety net to 200,000 to cover edge cases and avoid false positives; updated logs to reflect pre-processed diffs.
|
||||||
|
- Improved error messaging to indicate a likely DiffProcessor misconfiguration when pre-processed diffs exceed the safety limit.
|
||||||
|
- Updated informational logs to state that a pre-processed git diff was added to context.
|
||||||
|
|
||||||
|
## 2025-11-04 - 1.9.0 - feat(context)
|
||||||
|
Add intelligent DiffProcessor to summarize and prioritize git diffs and integrate it into the commit context pipeline
|
||||||
|
|
||||||
|
- Add DiffProcessor (ts/context/diff-processor.ts) to intelligently process git diffs: include small files fully, summarize medium files (head/tail sampling), and mark very large files as metadata-only to stay within token budgets.
|
||||||
|
- Integrate DiffProcessor into commit workflow (ts/aidocs_classes/commit.ts): preprocess raw diffs, emit processed diff statistics, and pass a token-efficient diff section into the TaskContextFactory for commit context generation.
|
||||||
|
- Export DiffProcessor and its types through the context index and types (ts/context/index.ts, ts/context/types.ts) so other context components can reuse it.
|
||||||
|
- Add comprehensive tests for the DiffProcessor behavior and integration (test/test.diffprocessor.node.ts) covering small/medium/large diffs, added/deleted files, prioritization, token budgets, and formatting for context.
|
||||||
|
- Minor adjustments across context/task factories and builders to accept and propagate processed diff strings rather than raw diffs, reducing risk of token overflows during iterative context building.
|
||||||
|
|
||||||
|
## 2025-11-04 - 1.8.3 - fix(context)
|
||||||
|
Prevent enormous git diffs and OOM during context building by adding exclusion patterns, truncation, and diagnostic logging
|
||||||
|
|
||||||
|
- Add comprehensive git diff exclusion globs (locks, build artifacts, maps, bundles, IDE folders, logs, caches) when collecting uncommitted diffs to avoid noisy/huge diffs
|
||||||
|
- Pass glob patterns directly to smartgit.getUncommittedDiff for efficient server-side matching
|
||||||
|
- Emit diagnostic statistics for diffs (files changed, total characters, estimated tokens, number of exclusion patterns) and warn on unusually large diffs
|
||||||
|
- Introduce pre-tokenization safety checks in iterative context builder: truncate raw diff text if it exceeds MAX_DIFF_CHARS and throw a clear error if token count still exceeds MAX_DIFF_TOKENS
|
||||||
|
- Format and log token counts using locale-aware formatting for clarity
|
||||||
|
- Improve robustness of commit context generation to reduce risk of OOM / model-limit overruns
|
||||||
|
|
||||||
|
## 2025-11-03 - 1.8.0 - feat(context)
|
||||||
|
Wire OpenAI provider through task context factory and add git-diff support to iterative context builder
|
||||||
|
|
||||||
|
- Pass AiDoc.openaiInstance through TaskContextFactory into IterativeContextBuilder to reuse the same OpenAI provider and avoid reinitialization.
|
||||||
|
- IterativeContextBuilder now accepts an optional OpenAiProvider and an additionalContext string; when provided, git diffs (or other extra context) are prepended to the AI context and token counts are updated.
|
||||||
|
- createContextForCommit now forwards the git diff into the iterative builder so commit-specific context includes the diff.
|
||||||
|
- Updated aidocs_classes (commit, description, readme) to supply the existing openaiInstance when creating the TaskContextFactory.
|
||||||
|
|
||||||
|
## 2025-11-03 - 1.7.0 - feat(IterativeContextBuilder)
|
||||||
|
Add iterative AI-driven context builder and integrate into task factory; add tests and iterative configuration
|
||||||
|
|
||||||
|
- Introduce IterativeContextBuilder: iterative, token-aware context construction that asks the AI which files to load and evaluates context sufficiency.
|
||||||
|
- Switch TaskContextFactory to use IterativeContextBuilder for readme, description and commit tasks (replaces earlier EnhancedContext flow for these tasks).
|
||||||
|
- Add iterative configuration options (maxIterations, firstPassFileLimit, subsequentPassFileLimit, temperature, model) in types and ConfigManager and merge support for user config.
|
||||||
|
- Update CLI (tokens and aidoc flows) to use the iterative context factory and improve task handling and messaging.
|
||||||
|
- Add test coverage: test/test.iterativecontextbuilder.node.ts to validate initialization, iterative builds, token budget respect and multiple task types.
|
||||||
|
- Enhance ContextCache, LazyFileLoader, ContextAnalyzer and ContextTrimmer to support the iterative pipeline and smarter prioritization/prompts.
|
||||||
|
|
||||||
|
## 2025-11-03 - 1.6.1 - fix(context)
|
||||||
|
Improve context building, caching and test robustness
|
||||||
|
|
||||||
|
- EnhancedContext: refactored smart context building to use the analyzer and TaskContextFactory by default; taskType now defaults to 'description' and task-specific modes are applied.
|
||||||
|
- ConfigManager: simplified analyzer configuration (removed enabled flag) and fixed getAnalyzerConfig fallback shape.
|
||||||
|
- ContextCache: more robust mtime handling and persistence; tests updated to use real file mtimes so cache validation works reliably.
|
||||||
|
- LazyFileLoader: adjusted token estimation tolerance and improved metadata caching behavior.
|
||||||
|
- ContextAnalyzer & trimming pipeline: improved prioritization and trimming integration to better enforce token budgets.
|
||||||
|
- Tests: relaxed strict timing/boolean checks and made assertions more tolerant (toEqual vs toBe) to reduce false negatives.
|
||||||
|
|
||||||
|
## 2025-11-02 - 1.6.0 - feat(context)
|
||||||
|
Introduce smart context system: analyzer, lazy loader, cache and README/docs improvements
|
||||||
|
|
||||||
|
- Add ContextAnalyzer for dependency-based file scoring and prioritization (PageRank-like centrality, relevance, efficiency, recency)
|
||||||
|
- Add LazyFileLoader to scan metadata and load files in parallel with lightweight token estimates
|
||||||
|
- Add ContextCache for persistent file content/token caching with TTL and max-size eviction
|
||||||
|
- Enhance ContextTrimmer with tier-based trimming and configurable light/aggressive levels
|
||||||
|
- Integrate new components into EnhancedContext and TaskContextFactory to build task-aware, token-optimized contexts
|
||||||
|
- Extend ConfigManager and types to support cache, analyzer, prioritization weights and tier configs (npmextra.json driven)
|
||||||
|
- Add comprehensive unit tests for ContextAnalyzer, ContextCache and LazyFileLoader
|
||||||
|
- Update README with Smart Context Building docs, examples, configuration options and CI workflow snippet
|
||||||
|
|
||||||
|
## 2025-09-07 - 1.5.2 - fix(package)
|
||||||
|
Bump dependencies, refine test script and imports, and overhaul README and docs
|
||||||
|
|
||||||
|
- Bumped multiple dependencies and devDependencies (including @git.zone/tspublish, @git.zone/tsbuild, @git.zone/tstest, @push.rocks/npmextra, @push.rocks/qenv, @push.rocks/smartfile, @push.rocks/smartlog, @push.rocks/smartshell, gpt-tokenizer, typedoc, etc.).
|
||||||
|
- Updated test script to run tstest with verbose, logfile and increased timeout; adjusted testCli script invocation.
|
||||||
|
- Fixed test import in test/test.aidoc.nonci.ts to use @git.zone/tstest tapbundle.
|
||||||
|
- Large README rewrite: reorganized and expanded content, added quick start, CLI commands, examples, configuration, troubleshooting and usage sections.
|
||||||
|
- Minor clarification added to commit prompt in ts/aidocs_classes/commit.ts (text cleanup and guidance).
|
||||||
|
|
||||||
|
## 2025-08-16 - 1.5.1 - fix(aidoc)
|
||||||
|
Bump dependencies, add pnpm workspace config, and add AiDoc.stop()
|
||||||
|
|
||||||
|
- Bumped multiple dependencies and devDependencies in package.json (notable upgrades: @git.zone/tsbuild, @git.zone/tspublish, @push.rocks/npmextra, @push.rocks/qenv, @push.rocks/smartai, @push.rocks/smartfile, @push.rocks/smartgit, @push.rocks/smartlog, @push.rocks/smartpath, @push.rocks/smartshell, typedoc, typescript).
|
||||||
|
- Added pnpm-workspace.yaml with onlyBuiltDependencies (esbuild, mongodb-memory-server, puppeteer, sharp).
|
||||||
|
- Added AiDoc.stop() to properly stop the OpenAI provider (resource/client shutdown).
|
||||||
|
- Updated packageManager field in package.json to a newer pnpm version/hash.
|
||||||
|
|
||||||
|
## 2025-05-14 - 1.5.0 - feat(docs)
|
||||||
|
Update project metadata and documentation to reflect comprehensive AI-enhanced features and improved installation and usage instructions
|
||||||
|
|
||||||
|
- Revised descriptions in package.json and npmextra.json to emphasize comprehensive documentation capabilities
|
||||||
|
- Expanded README with detailed installation options and extended usage examples for both CLI and API-like integrations
|
||||||
|
- Added new dependency (gpt-tokenizer) to support token counting for AI context building
|
||||||
|
- Adjusted keywords to better reflect project functionalities such as commit message automation and context trimming
|
||||||
|
|
||||||
|
## 2025-05-13 - 1.4.5 - fix(dependencies)
|
||||||
|
Upgrade various dependency versions and update package manager configuration
|
||||||
|
|
||||||
|
- Bump @git.zone/tsbuild from ^2.1.80 to ^2.3.2
|
||||||
|
- Upgrade @push.rocks/tapbundle from ^5.0.23 to ^6.0.3
|
||||||
|
- Update @types/node from ^22.8.1 to ^22.15.17
|
||||||
|
- Bump @push.rocks/smartai from ^0.4.2 to ^0.5.4
|
||||||
|
- Upgrade @push.rocks/smartlog from ^3.0.7 to ^3.0.9
|
||||||
|
- Update typedoc from ^0.27.9 to ^0.28.4
|
||||||
|
- Bump typescript from ^5.5.2 to ^5.8.3
|
||||||
|
- Add packageManager field with pnpm@10.10.0 specification
|
||||||
|
|
||||||
|
## 2025-02-25 - 1.4.4 - fix(dependencies)
|
||||||
|
Update dependencies to latest versions
|
||||||
|
|
||||||
|
- Updated '@push.rocks/smartai' from '^0.0.17' to '^0.4.2'
|
||||||
|
- Updated 'typedoc' from '^0.26.1' to '^0.27.9'
|
||||||
|
|
||||||
|
## 2025-01-14 - 1.4.3 - fix(aidocs_classes)
|
||||||
|
Improve readme generation instructions to ensure correct markdown formatting.
|
||||||
|
|
||||||
|
- Added guidance to avoid using backticks at the beginning and end of readme generation to prevent markdown issues.
|
||||||
|
- Clarified that the output is directly written to readme.md and backticks should only be used for code blocks.
|
||||||
|
|
||||||
|
## 2024-10-28 - 1.4.2 - fix(cli)
|
||||||
|
Ensure async completion for aidoc readme and description generation
|
||||||
|
|
||||||
|
- Added await statements for asynchronous methods buildReadme and buildDescription in the aidoc command.
|
||||||
|
|
||||||
|
## 2024-10-28 - 1.4.1 - fix(readme)
|
||||||
|
Correct async call to getModuleSubDirs in readme generation.
|
||||||
|
|
||||||
|
- Fixed an issue with asynchronous handling in readme generation for submodules.
|
||||||
|
- Ensured that getModuleSubDirs function is called with await to handle promises properly.
|
||||||
|
|
||||||
|
## 2024-10-28 - 1.4.0 - feat(aidocs)
|
||||||
|
Added support for building readmes for sub-modules in aidocs
|
||||||
|
|
||||||
|
- Updated the `Readme` class to handle monorepo projects by generating readmes for sub-modules.
|
||||||
|
- Integrated `tspublish` to identify sub-modules for readme generation.
|
||||||
|
|
||||||
|
## 2024-06-24 - 1.3.12 - fix(aidocs)
|
||||||
|
Fix changelog generation by handling leading newlines
|
||||||
|
|
||||||
|
- Fixed handling of leading newlines in the changelog to ensure proper formatting.
|
||||||
|
|
||||||
|
## 2024-06-23 - 1.3.11 - fix(core)
|
||||||
|
Fixed new changelog formatting issue to retain consistent spacing.
|
||||||
|
|
||||||
|
- Adjusted the new changelog generation to ensure consistent spacing for improved readability.
|
||||||
|
|
||||||
|
## 2024-06-23 - 1.3.10 - fix(aidocs_classes)
|
||||||
|
Fix changelog format to remove extra newline
|
||||||
|
|
||||||
|
- Updated `ts/aidocs_classes/commit.ts` to fix the changelog format.
|
||||||
|
|
||||||
|
## 2024-06-23 - 1.3.9 - fix(aidoc)
|
||||||
|
Fix changelog generation by properly stripping markdown code fences
|
||||||
|
|
||||||
|
- Corrected the changelog generation code to ensure markdown code fences are properly stripped.
|
||||||
|
|
||||||
|
|
||||||
|
## 2024-06-23 - 1.3.8 - fix(changelog)
|
||||||
|
Fix changelog generation by properly stripping markdown code fences
|
||||||
|
|
||||||
|
- Corrected the changelog generation code to ensure markdown code fences are properly stripped.
|
||||||
|
|
||||||
|
## 2024-06-23 - 1.3.7 - fix(aidoc)
|
||||||
|
Update to include package-lock.json in uncommitted changes check
|
||||||
|
|
||||||
|
- Modified the getUncommittedDiff method call in commit.ts to include package-lock.json along with pnpm-lock.yaml
|
||||||
|
|
||||||
|
|
||||||
|
## 2024-06-23 - 1.3.6 - fix(commit)
|
||||||
|
Fixed issue with retrieving uncommitted diffs in git repository
|
||||||
|
|
||||||
|
- Revised logic to correctly handle uncommitted changes by using an array for `getUncommittedDiff` method
|
||||||
|
- Ensured proper handling and representation of uncommitted changes in the output
|
||||||
|
|
||||||
|
|
||||||
|
## 2024-06-23 - 1.3.5 - fix(aidocs_classes)
|
||||||
|
Refactor and enhance changelog formatting
|
||||||
|
|
||||||
|
- Updated the `commit.ts` file to improve the changelog formatting and ensure consistency.
|
||||||
|
- Enhanced the changelog instructions to include summarizing messages for omitted commits.
|
||||||
|
- Removed unnecessary console logging in `projectcontext.ts`.
|
||||||
|
|
||||||
|
|
||||||
|
```markdown
|
||||||
|
## 2024-06-23 - 1.3.3 - fix(aidocs_classes)
|
||||||
|
Fix changelog formatting issue in commit class
|
||||||
|
|
||||||
|
## 2024-06-23 - 1.3.2 - fix(aidocs_classes)
|
||||||
|
Fix minor bugs and update dependencies in aidocs_classes
|
||||||
|
|
||||||
|
## 2024-06-23 - 1.3.1 - fix(aidocs_classes)
|
||||||
|
Fix typo in INextCommitObject interface and update date format in changelog generation.
|
||||||
|
|
||||||
|
## 2024-06-23 - 1.3.0 - fix(aidocs_classes)
|
||||||
|
Fix typo in INextCommitObject interface
|
||||||
|
|
||||||
|
## 2024-06-23 - 1.2.4 - feat(core)
|
||||||
|
Added smarttime dependency and improved changelog generation
|
||||||
|
|
||||||
|
## 2024-06-23 - 1.2.3 - fix(logging)
|
||||||
|
Refactor logger initialization to use commitinfo data
|
||||||
|
|
||||||
|
## 2024-06-23 - 1.2.2 - fix(aidocs)
|
||||||
|
Fix bug in AiDoc class causing undefined token handling
|
||||||
|
|
||||||
|
## 2024-06-23 - 1.2.0 - fix(core)
|
||||||
|
Fixed usage of plugins in project context and readme generation
|
||||||
|
|
||||||
|
## 2024-06-23 - 1.1.42 - feat(aidocs_classes)
|
||||||
|
Enhance changelog generation by supporting complete generation in the absence of previous changelog files
|
||||||
|
|
||||||
|
## 2024-06-23 - 1.1.41 - fix(aidocs_classes)
|
||||||
|
Improve commit message generation by handling empty diffs and updating changelog instructions
|
||||||
|
```
|
||||||
4
cli.child.ts
Normal file
4
cli.child.ts
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
process.env.CLI_CALL = 'true';
|
||||||
|
import * as cliTool from './ts/index.js';
|
||||||
|
cliTool.runCli();
|
||||||
3
cli.js
3
cli.js
@@ -1,3 +1,4 @@
|
|||||||
#!/usr/bin/env node
|
#!/usr/bin/env node
|
||||||
process.env.CLI_CALL = 'true';
|
process.env.CLI_CALL = 'true';
|
||||||
require('./dist/index');
|
const cliTool = await import('./dist_ts/index.js');
|
||||||
|
cliTool.runCli();
|
||||||
|
|||||||
@@ -1,4 +1,5 @@
|
|||||||
#!/usr/bin/env node
|
#!/usr/bin/env node
|
||||||
process.env.CLI_CALL = 'true';
|
process.env.CLI_CALL = 'true';
|
||||||
require('@gitzone/tsrun');
|
|
||||||
require('./ts/index');
|
import * as tsrun from '@git.zone/tsrun';
|
||||||
|
tsrun.runPath('./cli.child.js', import.meta.url);
|
||||||
|
|||||||
2
license
2
license
@@ -1,4 +1,4 @@
|
|||||||
Copyright (c) 2019 Lossless GmbH (hello@lossless.com)
|
Copyright (c) 2019 Task Venture Capital GmbH (hello@task.vc)
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
of this software and associated documentation files (the "Software"), to deal
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
|||||||
@@ -1,17 +0,0 @@
|
|||||||
{
|
|
||||||
"gitzone": {
|
|
||||||
"module": {
|
|
||||||
"githost": "gitlab.com",
|
|
||||||
"gitscope": "gitzone",
|
|
||||||
"gitrepo": "tsdoc",
|
|
||||||
"shortDescription": "a tool for better documentation",
|
|
||||||
"npmPackagename": "@gitzone/tsdoc",
|
|
||||||
"license": "MIT",
|
|
||||||
"projectDomain": "git.zone"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"npmci": {
|
|
||||||
"npmGlobalTools": [],
|
|
||||||
"npmAccessLevel": "public"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
1773
package-lock.json
generated
1773
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
98
package.json
98
package.json
@@ -1,37 +1,85 @@
|
|||||||
{
|
{
|
||||||
"name": "@gitzone/tsdoc",
|
"name": "@git.zone/tsdoc",
|
||||||
"version": "1.0.10",
|
"version": "2.0.2",
|
||||||
"private": false,
|
"private": false,
|
||||||
"description": "a tool for better documentation",
|
"description": "A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.",
|
||||||
"main": "dist/index.js",
|
"type": "module",
|
||||||
"typings": "dist/index.d.ts",
|
"exports": {
|
||||||
"author": "Lossless GmbH",
|
".": "./dist_ts/index.js"
|
||||||
|
},
|
||||||
|
"author": "Task Venture Capital GmbH",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"bin": {
|
"bin": {
|
||||||
"tsdoc": "cli.js"
|
"tsdoc": "cli.js"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "(tstest test/) && (node ./cli.ts.js) && rm -rf public/",
|
"test": "(tstest test/ --verbose --logfile --timeout 600) && npm run testCli",
|
||||||
"build": "(tsbuild)",
|
"testCli": "(node ./cli.ts.js) && (node ./cli.ts.js aidocs)",
|
||||||
"buildMkdocs": "(cd mkdocs/originalrepo && docker rmi -f mkdocs && docker build -t mkdocs .)",
|
"build": "(tsbuild --web --allowimplicitany)",
|
||||||
"format": "(gitzone format)"
|
"buildDocs": "tsdoc"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@gitzone/tsbuild": "^2.0.22",
|
"@git.zone/tsbuild": "^4.3.0",
|
||||||
"@gitzone/tstest": "^1.0.15",
|
"@git.zone/tsrun": "^2.0.1",
|
||||||
"@pushrocks/tapbundle": "^3.0.7",
|
"@git.zone/tstest": "^3.5.1",
|
||||||
"@types/node": "^12.0.0",
|
"@types/node": "^25.5.0"
|
||||||
"tslint": "^5.11.0",
|
|
||||||
"tslint-config-prettier": "^1.15.0"
|
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@pushrocks/early": "^3.0.3",
|
"@git.zone/tspublish": "^1.11.3",
|
||||||
"@pushrocks/smartcli": "^3.0.7",
|
"@push.rocks/early": "^4.0.4",
|
||||||
"@pushrocks/smartfile": "^7.0.2",
|
"@push.rocks/smartconfig": "^6.0.1",
|
||||||
"@pushrocks/smartlog": "^2.0.19",
|
"@push.rocks/qenv": "^6.1.3",
|
||||||
"@pushrocks/smartlog-destination-local": "^7.0.5",
|
"@push.rocks/smartagent": "^3.0.2",
|
||||||
"@pushrocks/smartshell": "^2.0.13",
|
"@push.rocks/smartai": "^2.0.0",
|
||||||
"typedoc": "^0.14.2",
|
"@push.rocks/smartcli": "^4.0.20",
|
||||||
"typescript": "^3.4.5"
|
"@push.rocks/smartdelay": "^3.0.5",
|
||||||
}
|
"@push.rocks/smartfile": "^13.1.2",
|
||||||
|
"@push.rocks/smartfs": "^1.5.0",
|
||||||
|
"@push.rocks/smartgit": "^3.3.1",
|
||||||
|
"@push.rocks/smartinteract": "^2.0.16",
|
||||||
|
"@push.rocks/smartlog": "^3.2.1",
|
||||||
|
"@push.rocks/smartlog-destination-local": "^9.0.2",
|
||||||
|
"@push.rocks/smartpath": "^6.0.0",
|
||||||
|
"@push.rocks/smartshell": "^3.3.8",
|
||||||
|
"@push.rocks/smarttime": "^4.2.3",
|
||||||
|
"typedoc": "^0.28.18",
|
||||||
|
"typescript": "^6.0.2"
|
||||||
|
},
|
||||||
|
"files": [
|
||||||
|
"ts/**/*",
|
||||||
|
"ts_web/**/*",
|
||||||
|
"dist/**/*",
|
||||||
|
"dist_*/**/*",
|
||||||
|
"dist_ts/**/*",
|
||||||
|
"dist_ts_web/**/*",
|
||||||
|
"assets/**/*",
|
||||||
|
"cli.js",
|
||||||
|
".smartconfig.json",
|
||||||
|
"readme.md"
|
||||||
|
],
|
||||||
|
"browserslist": [
|
||||||
|
"last 1 chrome versions"
|
||||||
|
],
|
||||||
|
"keywords": [
|
||||||
|
"TypeScript",
|
||||||
|
"documentation",
|
||||||
|
"AI",
|
||||||
|
"CLI",
|
||||||
|
"README",
|
||||||
|
"TypeDoc",
|
||||||
|
"commit messages",
|
||||||
|
"automation",
|
||||||
|
"code analysis",
|
||||||
|
"context trimming",
|
||||||
|
"developer tools"
|
||||||
|
],
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "git+https://gitlab.com/gitzone/tsdoc.git"
|
||||||
|
},
|
||||||
|
"bugs": {
|
||||||
|
"url": "https://gitlab.com/gitzone/tsdoc/issues"
|
||||||
|
},
|
||||||
|
"homepage": "https://gitlab.com/gitzone/tsdoc#readme",
|
||||||
|
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748"
|
||||||
}
|
}
|
||||||
|
|||||||
9768
pnpm-lock.yaml
generated
Normal file
9768
pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
14
readme.hints.md
Normal file
14
readme.hints.md
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
* module needs to be installed globally
|
||||||
|
* alternatively can be used through npx, if installed locally
|
||||||
|
* cli parameters are concluded from ./ts/cli.ts
|
||||||
|
* this module is not intended for API use.
|
||||||
|
* Read carefully through the TypeScript files. Don't make stuff up.
|
||||||
|
|
||||||
|
## Token Budgeting (commit.ts)
|
||||||
|
* OpenAI has a 272,000 token context limit
|
||||||
|
* The smartagent infrastructure adds ~180,000 tokens of overhead (system messages, tool descriptions, conversation history)
|
||||||
|
* TOKEN_BUDGET constants in commit.ts control the available tokens for diff content
|
||||||
|
* Dynamic calculation: 272K - 10K (safety) - 180K (overhead) - 2K (prompt) = 80K tokens for diff
|
||||||
|
* If token limit errors occur, consider:
|
||||||
|
- Splitting large commits into smaller ones
|
||||||
|
- Adjusting SMARTAGENT_OVERHEAD if actual overhead is different
|
||||||
276
readme.md
276
readme.md
@@ -1,26 +1,264 @@
|
|||||||
# @gitzone/tsdoc
|
# @git.zone/tsdoc
|
||||||
a tool for better documentation
|
|
||||||
|
|
||||||
## Availabililty and Links
|
AI-Powered Documentation & Commit Intelligence for TypeScript Projects 🚀
|
||||||
* [npmjs.org (npm package)](https://www.npmjs.com/package/@gitzone/tsdoc)
|
|
||||||
* [gitlab.com (source)](https://gitlab.com/gitzone/tsdoc)
|
|
||||||
* [github.com (source mirror)](https://github.com/gitzone/tsdoc)
|
|
||||||
* [docs (typedoc)](https://gitzone.gitlab.io/tsdoc/)
|
|
||||||
|
|
||||||
## Status for master
|
## Issue Reporting and Security
|
||||||
[](https://gitlab.com/gitzone/tsdoc/commits/master)
|
|
||||||
[](https://gitlab.com/gitzone/tsdoc/commits/master)
|
For reporting bugs, issues, or security vulnerabilities, please visit [community.foss.global/](https://community.foss.global/). This is the central community hub for all issue reporting. Developers who sign and comply with our contribution agreement and go through identification can also get a [code.foss.global/](https://code.foss.global/) account to submit Pull Requests directly.
|
||||||
[](https://www.npmjs.com/package/@gitzone/tsdoc)
|
|
||||||
[](https://snyk.io/test/npm/@gitzone/tsdoc)
|
## Install
|
||||||
[](https://nodejs.org/dist/latest-v10.x/docs/api/)
|
|
||||||
[](https://nodejs.org/dist/latest-v10.x/docs/api/)
|
```bash
|
||||||
[](https://prettier.io/)
|
# Global installation (recommended for CLI usage)
|
||||||
|
pnpm add -g @git.zone/tsdoc
|
||||||
|
|
||||||
|
# Or use with npx (no install needed)
|
||||||
|
npx @git.zone/tsdoc
|
||||||
|
|
||||||
|
# Or install locally as a project dependency
|
||||||
|
pnpm add @git.zone/tsdoc
|
||||||
|
```
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
For further information read the linked docs at the top of this readme.
|
`@git.zone/tsdoc` is a TypeScript documentation powerhouse that combines traditional [TypeDoc](https://typedoc.org/) API docs with AI-powered documentation workflows. It uses OpenAI models via `@push.rocks/smartai` and autonomous agents via `@push.rocks/smartagent` to generate READMEs, project descriptions, keywords, and semantic commit messages — all by intelligently exploring your project's codebase with scoped filesystem tools.
|
||||||
|
|
||||||
> MIT licensed | **©** [Lossless GmbH](https://lossless.gmbh)
|
### CLI Commands
|
||||||
| By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy.html)
|
|
||||||
|
|
||||||
[](https://maintainedby.lossless.com)
|
| Command | Description |
|
||||||
|
|---------|-------------|
|
||||||
|
| `tsdoc` | 🔍 Auto-detects project type and runs TypeDoc |
|
||||||
|
| `tsdoc aidoc` | 🤖 Generates AI-powered README + description/keywords |
|
||||||
|
| `tsdoc readme` | 📝 Generates AI-powered README only |
|
||||||
|
| `tsdoc description` | 🏷️ Generates AI-powered description and keywords only |
|
||||||
|
| `tsdoc commit` | 💬 Generates a semantic commit message from uncommitted changes |
|
||||||
|
| `tsdoc typedoc` | 📚 Generates traditional TypeDoc API documentation |
|
||||||
|
|
||||||
|
### 🤖 AI-Powered Documentation (`aidoc`)
|
||||||
|
|
||||||
|
The `aidoc` command is the all-in-one workflow that combines README generation and description/keyword generation:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# In your project root
|
||||||
|
tsdoc aidoc
|
||||||
|
```
|
||||||
|
|
||||||
|
This will:
|
||||||
|
|
||||||
|
1. Spin up an AI agent with read-only filesystem access scoped to your project
|
||||||
|
2. The agent autonomously explores your project structure, reads source files, and understands the API
|
||||||
|
3. Generate a comprehensive `readme.md` with install instructions, usage examples, and architecture overview
|
||||||
|
4. Update `package.json` and `.smartconfig.json` with an AI-generated description and keywords
|
||||||
|
|
||||||
|
You can also run these steps individually:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate only the README
|
||||||
|
tsdoc readme
|
||||||
|
|
||||||
|
# Generate only the description and keywords
|
||||||
|
tsdoc description
|
||||||
|
```
|
||||||
|
|
||||||
|
### 💬 Smart Commit Messages (`commit`)
|
||||||
|
|
||||||
|
The `commit` command analyzes your uncommitted changes and produces a structured commit object following [Conventional Commits](https://www.conventionalcommits.org/):
|
||||||
|
|
||||||
|
```bash
|
||||||
|
tsdoc commit
|
||||||
|
```
|
||||||
|
|
||||||
|
Output is a JSON object:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"recommendedNextVersionLevel": "feat",
|
||||||
|
"recommendedNextVersionScope": "core",
|
||||||
|
"recommendedNextVersionMessage": "add smart diff processing for large changesets",
|
||||||
|
"recommendedNextVersionDetails": [
|
||||||
|
"implemented intelligent diff sampling with head/tail extraction",
|
||||||
|
"added file prioritization by importance score"
|
||||||
|
],
|
||||||
|
"recommendedNextVersion": "1.13.0",
|
||||||
|
"changelog": "# Changelog\n\n## 2026-03-24 - 1.13.0 - core\n..."
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Under the hood, the commit flow:
|
||||||
|
|
||||||
|
- **Excludes noise**: Lock files, build artifacts (`dist/`, `dist_*/`), IDE directories, caches, and source maps are filtered out before processing
|
||||||
|
- **Prioritizes what matters**: Source files rank higher than test files, which rank higher than config, docs, and build artifacts
|
||||||
|
- **Handles large diffs gracefully**: The `DiffProcessor` categorizes files by size — small files (< 300 lines) are included in full, medium files (< 800 lines) get head/tail sampling, and large files are metadata-only
|
||||||
|
- **Respects token budgets**: Dynamically calculates available tokens based on the model's context limit minus overhead
|
||||||
|
- **Auto-generates changelogs**: If no `changelog.md` exists, one is created from the full git history
|
||||||
|
|
||||||
|
### 📚 TypeDoc Generation (`typedoc`)
|
||||||
|
|
||||||
|
For traditional API documentation:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Generate to default ./public directory
|
||||||
|
tsdoc typedoc
|
||||||
|
|
||||||
|
# Generate to a specific subdirectory
|
||||||
|
tsdoc typedoc --publicSubdir docs
|
||||||
|
```
|
||||||
|
|
||||||
|
TypeDoc generation auto-detects your source directories (`ts/` and `ts_web/`) and creates a temporary tsconfig for compilation.
|
||||||
|
|
||||||
|
### 🏗️ Monorepo Support
|
||||||
|
|
||||||
|
When generating READMEs, tsdoc automatically detects monorepo submodules via `@git.zone/tspublish` conventions. Each submodule directory containing a `tspublish.json` gets its own generated README with the legal section appended.
|
||||||
|
|
||||||
|
### Programmatic API
|
||||||
|
|
||||||
|
You can use tsdoc programmatically in your own tools:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { AiDoc } from '@git.zone/tsdoc';
|
||||||
|
|
||||||
|
const aidoc = new AiDoc();
|
||||||
|
|
||||||
|
// Initialize — prompts for OpenAI token on first run, then persists it
|
||||||
|
await aidoc.start();
|
||||||
|
|
||||||
|
// Generate a comprehensive README for a project
|
||||||
|
const readmeContent = await aidoc.buildReadme('/path/to/project');
|
||||||
|
|
||||||
|
// Generate description and keywords, updating package.json and .smartconfig.json
|
||||||
|
await aidoc.buildDescription('/path/to/project');
|
||||||
|
|
||||||
|
// Generate a structured commit message object from uncommitted changes
|
||||||
|
const commitObj = await aidoc.buildNextCommitObject('/path/to/project');
|
||||||
|
console.log(commitObj.recommendedNextVersionLevel); // 'fix' | 'feat' | 'BREAKING CHANGE'
|
||||||
|
console.log(commitObj.recommendedNextVersionMessage);
|
||||||
|
console.log(commitObj.changelog);
|
||||||
|
|
||||||
|
// Get gathered project files (package.json, source files, tests, config)
|
||||||
|
const context = await aidoc.getProjectContext('/path/to/project');
|
||||||
|
|
||||||
|
// Get token count for a project's context
|
||||||
|
const tokenCount = await aidoc.getProjectContextTokenCount('/path/to/project');
|
||||||
|
|
||||||
|
// Estimate tokens in arbitrary text
|
||||||
|
const tokens = aidoc.countTokens('some text here');
|
||||||
|
|
||||||
|
await aidoc.stop();
|
||||||
|
```
|
||||||
|
|
||||||
|
You can also pass the OpenAI token directly via the constructor:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const aidoc = new AiDoc({ OPENAI_TOKEN: 'sk-...' });
|
||||||
|
await aidoc.start();
|
||||||
|
```
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### OpenAI Token
|
||||||
|
|
||||||
|
An OpenAI API key is required for all AI features. It can be provided in three ways (checked in order):
|
||||||
|
|
||||||
|
1. **Environment variable**: Set `OPENAI_TOKEN` in your environment or `.env` file
|
||||||
|
2. **Constructor argument**: Pass `{ OPENAI_TOKEN: 'sk-...' }` to `new AiDoc()`
|
||||||
|
3. **Interactive prompt**: On first run, tsdoc will prompt for the token and persist it
|
||||||
|
|
||||||
|
The token is persisted at `~/.smartconfig/kv/@git.zone/tsdoc.json` for subsequent runs.
|
||||||
|
|
||||||
|
### .smartconfig.json
|
||||||
|
|
||||||
|
tsdoc uses `.smartconfig.json` for project metadata. The `tsdoc` key holds legal information that gets appended to generated READMEs:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"tsdoc": {
|
||||||
|
"legal": "\n## License and Legal Information\n\n..."
|
||||||
|
},
|
||||||
|
"gitzone": {
|
||||||
|
"module": {
|
||||||
|
"githost": "gitlab.com",
|
||||||
|
"gitscope": "gitzone",
|
||||||
|
"gitrepo": "tsdoc",
|
||||||
|
"npmPackagename": "@git.zone/tsdoc",
|
||||||
|
"description": "...",
|
||||||
|
"keywords": ["..."]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
The `description` command writes updated description/keywords to both `gitzone.module` in `.smartconfig.json` and to `package.json`.
|
||||||
|
|
||||||
|
## Architecture
|
||||||
|
|
||||||
|
### Core Components
|
||||||
|
|
||||||
|
```
|
||||||
|
@git.zone/tsdoc
|
||||||
|
├── AiDoc # Main orchestrator — manages AI model, delegates to task classes
|
||||||
|
├── TypeDoc # Traditional TypeDoc API documentation generation
|
||||||
|
├── ProjectContext # Gathers project files (package.json, source, tests, config)
|
||||||
|
├── DiffProcessor # Intelligent git diff processing with prioritization & sampling
|
||||||
|
├── Readme # AI agent-driven README generation with filesystem tools
|
||||||
|
├── Commit # AI agent-driven commit message generation with diff analysis
|
||||||
|
├── Description # AI agent-driven description and keyword generation
|
||||||
|
└── CLI # Command-line interface built on @push.rocks/smartcli
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🧠 AI Agent Architecture
|
||||||
|
|
||||||
|
Each documentation task (readme, commit, description) runs an autonomous AI agent via `@push.rocks/smartagent`'s `runAgent()`:
|
||||||
|
|
||||||
|
1. **System prompt** defines the agent's role, constraints, and output format
|
||||||
|
2. **Filesystem tools** give the agent scoped, read-only access to the project directory
|
||||||
|
3. **Autonomous exploration** — the agent decides which files to read, in what order
|
||||||
|
4. **Structured output** — README markdown, commit JSON, or description JSON
|
||||||
|
|
||||||
|
The agents use `@push.rocks/smartai`'s `getModel()` to create a language model instance backed by OpenAI.
|
||||||
|
|
||||||
|
### ⚡ Diff Processing Pipeline
|
||||||
|
|
||||||
|
The `DiffProcessor` handles large git diffs without blowing up token budgets:
|
||||||
|
|
||||||
|
| File Category | Threshold | Treatment |
|
||||||
|
|---------------|-----------|-----------|
|
||||||
|
| **Small** | < 300 lines changed | Included in full |
|
||||||
|
| **Medium** | < 800 lines changed | Head (75 lines) + tail (75 lines) sampling |
|
||||||
|
| **Large** | ≥ 800 lines changed | Metadata only (filepath + stats) |
|
||||||
|
|
||||||
|
Files are scored by importance:
|
||||||
|
- **100** — Source files (`src/`, `lib/`, `app/`, `components/`, `pages/`, `api/`)
|
||||||
|
- **80** — Test files (`test/`, `*.test.ts`, `*.spec.ts`)
|
||||||
|
- **70** — Interface/type files, entry points (`index.ts`, `mod.ts`)
|
||||||
|
- **60** — Configuration files (`.json`, `.yaml`, `.config.ts`)
|
||||||
|
- **40** — Documentation (`.md`, `.txt`)
|
||||||
|
- **10** — Build artifacts (`dist/`, `build/`, `.next/`)
|
||||||
|
|
||||||
|
Token budget is calculated dynamically: `context_limit - safety_margin - overhead - prompt_size`.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
- **Node.js** >= 18
|
||||||
|
- **TypeScript** project with a `ts/` source directory
|
||||||
|
- **OpenAI API key** for AI features
|
||||||
|
|
||||||
|
## License and Legal Information
|
||||||
|
|
||||||
|
This repository contains open-source code licensed under the MIT License. A copy of the license can be found in the [LICENSE](./license) file.
|
||||||
|
|
||||||
|
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
### Trademarks
|
||||||
|
|
||||||
|
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH or third parties, and are not included within the scope of the MIT license granted herein.
|
||||||
|
|
||||||
|
Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines or the guidelines of the respective third-party owners, and any usage must be approved in writing. Third-party trademarks used herein are the property of their respective owners and used only in a descriptive manner, e.g. for an implementation of an API or similar.
|
||||||
|
|
||||||
|
### Company Information
|
||||||
|
|
||||||
|
Task Venture Capital GmbH
|
||||||
|
Registered at District Court Bremen HRB 35230 HB, Germany
|
||||||
|
|
||||||
|
For any legal inquiries or further information, please contact us via email at hello@task.vc.
|
||||||
|
|
||||||
|
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||||
|
|||||||
314
readme.plan.md
Normal file
314
readme.plan.md
Normal file
@@ -0,0 +1,314 @@
|
|||||||
|
# TSDocs Context Optimization Plan
|
||||||
|
|
||||||
|
## Problem Statement
|
||||||
|
|
||||||
|
For large TypeScript projects, the context generated for AI-based documentation creation becomes too large, potentially exceeding even o4-mini's 200K token limit. This affects the ability to effectively generate:
|
||||||
|
|
||||||
|
- Project documentation (README.md)
|
||||||
|
- API descriptions and keywords
|
||||||
|
- Commit messages and changelogs
|
||||||
|
|
||||||
|
Current implementation simply includes all TypeScript files and key project files, but lacks intelligent selection, prioritization, or content reduction mechanisms.
|
||||||
|
|
||||||
|
## Analysis of Approaches
|
||||||
|
|
||||||
|
### 1. Smart Content Selection
|
||||||
|
|
||||||
|
**Description:** Intelligently select only files that are necessary for the specific task being performed, using heuristic rules.
|
||||||
|
|
||||||
|
**Advantages:**
|
||||||
|
- Simple to implement
|
||||||
|
- Predictable behavior
|
||||||
|
- Can be fine-tuned for different operations
|
||||||
|
|
||||||
|
**Disadvantages:**
|
||||||
|
- Requires manual tuning of rules
|
||||||
|
- May miss important context in complex projects
|
||||||
|
- Static approach lacks adaptability
|
||||||
|
|
||||||
|
**Implementation Complexity:** Medium
|
||||||
|
|
||||||
|
### 2. File Prioritization
|
||||||
|
|
||||||
|
**Description:** Rank files by relevance using git history, file size, import/export analysis, and relationship to the current task.
|
||||||
|
|
||||||
|
**Advantages:**
|
||||||
|
- Adaptively includes the most relevant files first
|
||||||
|
- Maintains context for frequently changed or central files
|
||||||
|
- Can leverage git history for additional signals
|
||||||
|
|
||||||
|
**Disadvantages:**
|
||||||
|
- Complexity in determining accurate relevance scores
|
||||||
|
- Requires analyzing project structure
|
||||||
|
- May require scanning imports/exports for dependency analysis
|
||||||
|
|
||||||
|
**Implementation Complexity:** High
|
||||||
|
|
||||||
|
### 3. Chunking Strategy
|
||||||
|
|
||||||
|
**Description:** Process the project in logical segments, generating intermediate results that are then combined to create the final output.
|
||||||
|
|
||||||
|
**Advantages:**
|
||||||
|
- Can handle projects of any size
|
||||||
|
- Focused context for each specific part
|
||||||
|
- May improve quality by focusing on specific areas deeply
|
||||||
|
|
||||||
|
**Disadvantages:**
|
||||||
|
- Complex orchestration of multiple AI calls
|
||||||
|
- Challenge in maintaining consistency across chunks
|
||||||
|
- May increase time and cost for processing
|
||||||
|
|
||||||
|
**Implementation Complexity:** High
|
||||||
|
|
||||||
|
### 4. Dynamic Context Trimming
|
||||||
|
|
||||||
|
**Description:** Automatically reduce context by removing non-essential code while preserving structure. Techniques include:
|
||||||
|
- Removing implementation details but keeping interfaces and type definitions
|
||||||
|
- Truncating large functions while keeping signatures
|
||||||
|
- Removing comments and whitespace (except JSDoc)
|
||||||
|
- Keeping only imports/exports for context files
|
||||||
|
|
||||||
|
**Advantages:**
|
||||||
|
- Preserves full project structure
|
||||||
|
- Flexible token usage based on importance
|
||||||
|
- Good balance between completeness and token efficiency
|
||||||
|
|
||||||
|
**Disadvantages:**
|
||||||
|
- Potential to remove important implementation details
|
||||||
|
- Risk of missing context needed for specific tasks
|
||||||
|
- Complex rules for what to trim vs keep
|
||||||
|
|
||||||
|
**Implementation Complexity:** Medium
|
||||||
|
|
||||||
|
### 5. Embeddings-Based Retrieval
|
||||||
|
|
||||||
|
**Description:** Create vector embeddings of project files and retrieve only the most relevant ones for a specific task using semantic similarity.
|
||||||
|
|
||||||
|
**Advantages:**
|
||||||
|
- Highly adaptive to different types of requests
|
||||||
|
- Leverages semantic understanding of content
|
||||||
|
- Can scale to extremely large projects
|
||||||
|
|
||||||
|
**Disadvantages:**
|
||||||
|
- Requires setting up and managing embeddings database
|
||||||
|
- Added complexity of running vector similarity searches
|
||||||
|
- Higher resource requirements for maintaining embeddings
|
||||||
|
|
||||||
|
**Implementation Complexity:** Very High
|
||||||
|
|
||||||
|
### 6. Task-Specific Contexts
|
||||||
|
|
||||||
|
**Description:** Create separate optimized contexts for different tasks (readme, commit messages, etc.) with distinct file selection and processing strategies.
|
||||||
|
|
||||||
|
**Advantages:**
|
||||||
|
- Highly optimized for each specific task
|
||||||
|
- Efficient token usage for each operation
|
||||||
|
- Improved quality through task-focused contexts
|
||||||
|
|
||||||
|
**Disadvantages:**
|
||||||
|
- Maintenance of multiple context building strategies
|
||||||
|
- More complex configuration
|
||||||
|
- Potential duplication in implementation
|
||||||
|
|
||||||
|
**Implementation Complexity:** Medium
|
||||||
|
|
||||||
|
### 7. Recursive Summarization
|
||||||
|
|
||||||
|
**Description:** Summarize larger files first, then include these summaries in the final context along with smaller files included in full.
|
||||||
|
|
||||||
|
**Advantages:**
|
||||||
|
- Can handle arbitrary project sizes
|
||||||
|
- Preserves essential information from all files
|
||||||
|
- Balanced approach to token usage
|
||||||
|
|
||||||
|
**Disadvantages:**
|
||||||
|
- Quality loss from summarization
|
||||||
|
- Increased processing time from multiple AI calls
|
||||||
|
- Complex orchestration logic
|
||||||
|
|
||||||
|
**Implementation Complexity:** High
|
||||||
|
|
||||||
|
## Implementation Strategy
|
||||||
|
|
||||||
|
We propose a phased implementation approach, starting with the most impactful and straightforward approaches, then building toward more complex solutions as needed:
|
||||||
|
|
||||||
|
### Phase 1: Foundation (1-2 weeks)
|
||||||
|
|
||||||
|
1. **Implement Dynamic Context Trimming**
|
||||||
|
- Create a `ContextProcessor` class that takes SmartFile objects and applies trimming rules
|
||||||
|
- Implement configurable trimming rules (remove implementations, keep signatures)
|
||||||
|
- Add a configuration option to control trimming aggressiveness
|
||||||
|
- Support preserving JSDoc comments while removing other comments
|
||||||
|
|
||||||
|
2. **Enhance Token Monitoring**
|
||||||
|
- Track token usage per file to identify problematic files
|
||||||
|
- Implement token budgeting to stay within limits
|
||||||
|
- Add detailed token reporting for optimization
|
||||||
|
|
||||||
|
### Phase 2: Smart Selection (2-3 weeks)
|
||||||
|
|
||||||
|
3. **Implement Task-Specific Contexts**
|
||||||
|
- Create specialized context builders for readme, commit messages, and descriptions
|
||||||
|
- Customize file selection rules for each task
|
||||||
|
- Add configuration options for task-specific settings
|
||||||
|
|
||||||
|
4. **Add Smart Content Selection**
|
||||||
|
- Implement heuristic rules for file importance
|
||||||
|
- Create configuration for inclusion/exclusion patterns
|
||||||
|
- Add ability to focus on specific directories or modules
|
||||||
|
|
||||||
|
### Phase 3: Advanced Techniques (3-4 weeks)
|
||||||
|
|
||||||
|
5. **Implement File Prioritization**
|
||||||
|
- Add git history analysis to identify frequently changed files
|
||||||
|
- Implement dependency analysis to identify central files
|
||||||
|
- Create a scoring system for file relevance
|
||||||
|
|
||||||
|
6. **Add Optional Recursive Summarization**
|
||||||
|
- Implement file summarization for large files
|
||||||
|
- Create a hybrid approach that mixes full files and summaries
|
||||||
|
- Add configuration to control summarization thresholds
|
||||||
|
|
||||||
|
### Phase 4: Research-Based Approaches (Future Consideration)
|
||||||
|
|
||||||
|
7. **Research and Evaluate Embeddings-Based Retrieval**
|
||||||
|
- Prototype embeddings creation for TypeScript files
|
||||||
|
- Evaluate performance and accuracy
|
||||||
|
- Implement if benefits justify the complexity
|
||||||
|
|
||||||
|
8. **Explore Chunking Strategies**
|
||||||
|
- Research effective chunking approaches for documentation
|
||||||
|
- Prototype and evaluate performance
|
||||||
|
- Implement if benefits justify the complexity
|
||||||
|
|
||||||
|
## Technical Design
|
||||||
|
|
||||||
|
### Core Components
|
||||||
|
|
||||||
|
1. **ContextBuilder** - Enhanced version of current ProjectContext
|
||||||
|
```typescript
|
||||||
|
interface IContextBuilder {
|
||||||
|
buildContext(): Promise<string>;
|
||||||
|
getTokenCount(): number;
|
||||||
|
setContextMode(mode: 'normal' | 'trimmed' | 'summarized'): void;
|
||||||
|
setTokenBudget(maxTokens: number): void;
|
||||||
|
setPrioritizationStrategy(strategy: IPrioritizationStrategy): void;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
2. **FileProcessor** - Handles per-file processing and trimming
|
||||||
|
```typescript
|
||||||
|
interface IFileProcessor {
|
||||||
|
processFile(file: SmartFile): Promise<string>;
|
||||||
|
setProcessingMode(mode: 'full' | 'trim' | 'summarize'): void;
|
||||||
|
getTokenCount(): number;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
3. **PrioritizationStrategy** - Ranks files by importance
|
||||||
|
```typescript
|
||||||
|
interface IPrioritizationStrategy {
|
||||||
|
rankFiles(files: SmartFile[], context: string): Promise<SmartFile[]>;
|
||||||
|
setImportanceMetrics(metrics: IImportanceMetrics): void;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
4. **TaskContextFactory** - Creates optimized contexts for specific tasks
|
||||||
|
```typescript
|
||||||
|
interface ITaskContextFactory {
|
||||||
|
createContextForReadme(projectDir: string): Promise<string>;
|
||||||
|
createContextForCommit(projectDir: string, diff: string): Promise<string>;
|
||||||
|
createContextForDescription(projectDir: string): Promise<string>;
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### Configuration Options
|
||||||
|
|
||||||
|
The system will support configuration via a new section in `npmextra.json`:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"tsdoc": {
|
||||||
|
"context": {
|
||||||
|
"maxTokens": 190000,
|
||||||
|
"defaultMode": "dynamic",
|
||||||
|
"taskSpecificSettings": {
|
||||||
|
"readme": {
|
||||||
|
"mode": "full",
|
||||||
|
"includePaths": ["src/", "lib/"],
|
||||||
|
"excludePaths": ["test/", "examples/"]
|
||||||
|
},
|
||||||
|
"commit": {
|
||||||
|
"mode": "trimmed",
|
||||||
|
"focusOnChangedFiles": true
|
||||||
|
},
|
||||||
|
"description": {
|
||||||
|
"mode": "summarized",
|
||||||
|
"includePackageInfo": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"trimming": {
|
||||||
|
"removeImplementations": true,
|
||||||
|
"preserveInterfaces": true,
|
||||||
|
"preserveTypeDefs": true,
|
||||||
|
"preserveJSDoc": true,
|
||||||
|
"maxFunctionLines": 5
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
## Cost-Benefit Analysis
|
||||||
|
|
||||||
|
### Cost Considerations
|
||||||
|
|
||||||
|
1. **Development costs**
|
||||||
|
- Initial implementation of foundational components (~30-40 hours)
|
||||||
|
- Testing and validation across different project sizes (~10-15 hours)
|
||||||
|
- Documentation and configuration examples (~5 hours)
|
||||||
|
|
||||||
|
2. **Operational costs**
|
||||||
|
- Potential increased processing time for context preparation
|
||||||
|
- Additional API calls for summarization or embeddings approaches
|
||||||
|
- Monitoring and maintenance of the system
|
||||||
|
|
||||||
|
### Benefits
|
||||||
|
|
||||||
|
1. **Scalability**
|
||||||
|
- Support for projects of any size, up to and beyond o4-mini's 200K token limit
|
||||||
|
- Future-proof design that can adapt to different models and token limits
|
||||||
|
|
||||||
|
2. **Quality improvements**
|
||||||
|
- More focused contexts lead to better AI outputs
|
||||||
|
- Task-specific optimization improves relevance
|
||||||
|
- Consistent performance regardless of project size
|
||||||
|
|
||||||
|
3. **User experience**
|
||||||
|
- Predictable behavior for all project sizes
|
||||||
|
- Transparent token usage reporting
|
||||||
|
- Configuration options for different usage patterns
|
||||||
|
|
||||||
|
## First Deliverable
|
||||||
|
|
||||||
|
For immediate improvements, we recommend implementing Dynamic Context Trimming and Task-Specific Contexts first, as these offer the best balance of impact and implementation complexity.
|
||||||
|
|
||||||
|
### Implementation Plan for Dynamic Context Trimming
|
||||||
|
|
||||||
|
1. Create a basic `ContextTrimmer` class that processes TypeScript files:
|
||||||
|
- Remove function bodies but keep signatures
|
||||||
|
- Preserve interface and type definitions
|
||||||
|
- Keep imports and exports
|
||||||
|
- Preserve JSDoc comments
|
||||||
|
|
||||||
|
2. Integrate with the existing ProjectContext class:
|
||||||
|
- Add a trimming mode option
|
||||||
|
- Apply trimming during the context building process
|
||||||
|
- Track and report token savings
|
||||||
|
|
||||||
|
3. Modify the CLI to support trimming options:
|
||||||
|
- Add a `--trim` flag to enable trimming
|
||||||
|
- Add a `--trim-level` option for controlling aggressiveness
|
||||||
|
- Show token usage with and without trimming
|
||||||
|
|
||||||
|
This approach could reduce token usage by 40-70% while preserving the essential structure of the codebase, making it suitable for large projects while maintaining high-quality AI outputs.
|
||||||
42
test/test.aidoc.nonci.ts
Normal file
42
test/test.aidoc.nonci.ts
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as qenv from '@push.rocks/qenv';
|
||||||
|
let testQenv = new qenv.Qenv('./', '.nogit/');
|
||||||
|
|
||||||
|
import * as tsdocs from '../ts/index.js';
|
||||||
|
|
||||||
|
let aidocs: tsdocs.AiDoc;
|
||||||
|
|
||||||
|
tap.test('should create an AIdocs class', async () => {
|
||||||
|
aidocs = new tsdocs.AiDoc({
|
||||||
|
OPENAI_TOKEN: await testQenv.getEnvVarOnDemand('OPENAI_TOKEN'),
|
||||||
|
});
|
||||||
|
expect(aidocs).toBeInstanceOf(tsdocs.AiDoc);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should start AIdocs', async () => {
|
||||||
|
await aidocs.start();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.skip.test('should start AIdocs', async () => {
|
||||||
|
await aidocs.buildReadme('./');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.skip.test('should start AIdocs', async () => {
|
||||||
|
await aidocs.buildDescription('./');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should build commit object', async () => {
|
||||||
|
const commitObject = await aidocs.buildNextCommitObject('./');
|
||||||
|
console.log(commitObject);
|
||||||
|
expect(commitObject).not.toBeUndefined();
|
||||||
|
expect(commitObject).toHaveProperty('recommendedNextVersion');
|
||||||
|
expect(commitObject).toHaveProperty('recommendedNextVersionLevel');
|
||||||
|
expect(commitObject).toHaveProperty('recommendedNextVersionScope');
|
||||||
|
expect(commitObject).toHaveProperty('recommendedNextVersionMessage');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should stop AIdocs', async () => {
|
||||||
|
await aidocs.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
304
test/test.diffprocessor.node.ts
Normal file
304
test/test.diffprocessor.node.ts
Normal file
@@ -0,0 +1,304 @@
|
|||||||
|
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||||
|
import { DiffProcessor } from '../ts/classes.diffprocessor.js';
|
||||||
|
|
||||||
|
// Sample diff strings for testing
|
||||||
|
const createSmallDiff = (filepath: string, addedLines = 5, removedLines = 3): string => {
|
||||||
|
const lines: string[] = [];
|
||||||
|
lines.push(`--- a/${filepath}`);
|
||||||
|
lines.push(`+++ b/${filepath}`);
|
||||||
|
lines.push(`@@ -1,10 +1,12 @@`);
|
||||||
|
|
||||||
|
for (let i = 0; i < removedLines; i++) {
|
||||||
|
lines.push(`-removed line ${i + 1}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < addedLines; i++) {
|
||||||
|
lines.push(`+added line ${i + 1}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
lines.push(' unchanged line');
|
||||||
|
|
||||||
|
return lines.join('\n');
|
||||||
|
};
|
||||||
|
|
||||||
|
const createMediumDiff = (filepath: string): string => {
|
||||||
|
const lines: string[] = [];
|
||||||
|
lines.push(`--- a/${filepath}`);
|
||||||
|
lines.push(`+++ b/${filepath}`);
|
||||||
|
lines.push(`@@ -1,100 +1,150 @@`);
|
||||||
|
|
||||||
|
// 150 lines of changes
|
||||||
|
for (let i = 0; i < 75; i++) {
|
||||||
|
lines.push(`+added line ${i + 1}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < 75; i++) {
|
||||||
|
lines.push(`-removed line ${i + 1}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return lines.join('\n');
|
||||||
|
};
|
||||||
|
|
||||||
|
const createLargeDiff = (filepath: string): string => {
|
||||||
|
const lines: string[] = [];
|
||||||
|
lines.push(`--- a/${filepath}`);
|
||||||
|
lines.push(`+++ b/${filepath}`);
|
||||||
|
lines.push(`@@ -1,1000 +1,1500 @@`);
|
||||||
|
|
||||||
|
// 2500 lines of changes
|
||||||
|
for (let i = 0; i < 1250; i++) {
|
||||||
|
lines.push(`+added line ${i + 1}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
for (let i = 0; i < 1250; i++) {
|
||||||
|
lines.push(`-removed line ${i + 1}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return lines.join('\n');
|
||||||
|
};
|
||||||
|
|
||||||
|
const createDeletedFileDiff = (filepath: string): string => {
|
||||||
|
return `--- a/${filepath}
|
||||||
|
+++ /dev/null
|
||||||
|
@@ -1,5 +0,0 @@
|
||||||
|
-deleted line 1
|
||||||
|
-deleted line 2
|
||||||
|
-deleted line 3
|
||||||
|
-deleted line 4
|
||||||
|
-deleted line 5`;
|
||||||
|
};
|
||||||
|
|
||||||
|
const createAddedFileDiff = (filepath: string): string => {
|
||||||
|
return `--- /dev/null
|
||||||
|
+++ b/${filepath}
|
||||||
|
@@ -0,0 +1,5 @@
|
||||||
|
+added line 1
|
||||||
|
+added line 2
|
||||||
|
+added line 3
|
||||||
|
+added line 4
|
||||||
|
+added line 5`;
|
||||||
|
};
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should parse small diff correctly', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const smallDiff = createSmallDiff('src/test.ts', 5, 3);
|
||||||
|
|
||||||
|
const result = processor.processDiffs([smallDiff]);
|
||||||
|
|
||||||
|
expect(result.totalFiles).toEqual(1);
|
||||||
|
expect(result.fullDiffs.length).toEqual(1);
|
||||||
|
expect(result.summarizedDiffs.length).toEqual(0);
|
||||||
|
expect(result.metadataOnly.length).toEqual(0);
|
||||||
|
expect(result.totalTokens).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should summarize medium diff', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const mediumDiff = createMediumDiff('src/medium-file.ts');
|
||||||
|
|
||||||
|
const result = processor.processDiffs([mediumDiff]);
|
||||||
|
|
||||||
|
expect(result.totalFiles).toEqual(1);
|
||||||
|
expect(result.fullDiffs.length).toEqual(0);
|
||||||
|
expect(result.summarizedDiffs.length).toEqual(1);
|
||||||
|
expect(result.metadataOnly.length).toEqual(0);
|
||||||
|
|
||||||
|
// Verify the summarized diff contains the sample
|
||||||
|
const formatted = processor.formatForContext(result);
|
||||||
|
expect(formatted).toInclude('SUMMARIZED DIFFS');
|
||||||
|
expect(formatted).toInclude('lines omitted');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should handle large diff as metadata only', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const largeDiff = createLargeDiff('dist/bundle.js');
|
||||||
|
|
||||||
|
const result = processor.processDiffs([largeDiff]);
|
||||||
|
|
||||||
|
expect(result.totalFiles).toEqual(1);
|
||||||
|
expect(result.fullDiffs.length).toEqual(0);
|
||||||
|
expect(result.summarizedDiffs.length).toEqual(0);
|
||||||
|
expect(result.metadataOnly.length).toEqual(1);
|
||||||
|
|
||||||
|
const formatted = processor.formatForContext(result);
|
||||||
|
expect(formatted).toInclude('METADATA ONLY');
|
||||||
|
expect(formatted).toInclude('dist/bundle.js');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should prioritize source files over build artifacts', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const diffs = [
|
||||||
|
createSmallDiff('dist/bundle.js'),
|
||||||
|
createSmallDiff('src/important.ts'),
|
||||||
|
createSmallDiff('build/output.js'),
|
||||||
|
createSmallDiff('src/core.ts'),
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = processor.processDiffs(diffs);
|
||||||
|
|
||||||
|
expect(result.totalFiles).toEqual(4);
|
||||||
|
|
||||||
|
// Source files should be included fully first
|
||||||
|
const formatted = processor.formatForContext(result);
|
||||||
|
const srcImportantIndex = formatted.indexOf('src/important.ts');
|
||||||
|
const srcCoreIndex = formatted.indexOf('src/core.ts');
|
||||||
|
const distBundleIndex = formatted.indexOf('dist/bundle.js');
|
||||||
|
const buildOutputIndex = formatted.indexOf('build/output.js');
|
||||||
|
|
||||||
|
// Source files should appear before build artifacts
|
||||||
|
expect(srcImportantIndex).toBeLessThan(distBundleIndex);
|
||||||
|
expect(srcCoreIndex).toBeLessThan(buildOutputIndex);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should respect token budget', async () => {
|
||||||
|
const processor = new DiffProcessor({
|
||||||
|
maxDiffTokens: 500, // Very small budget to force metadata-only
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create multiple large diffs that will exceed budget
|
||||||
|
const diffs = [
|
||||||
|
createLargeDiff('src/file1.ts'),
|
||||||
|
createLargeDiff('src/file2.ts'),
|
||||||
|
createLargeDiff('src/file3.ts'),
|
||||||
|
createLargeDiff('src/file4.ts'),
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = processor.processDiffs(diffs);
|
||||||
|
|
||||||
|
expect(result.totalTokens).toBeLessThanOrEqual(500);
|
||||||
|
// With such a small budget and large files, most should be metadata only
|
||||||
|
expect(result.metadataOnly.length).toBeGreaterThanOrEqual(2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should handle deleted files', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const deletedDiff = createDeletedFileDiff('src/old-file.ts');
|
||||||
|
|
||||||
|
const result = processor.processDiffs([deletedDiff]);
|
||||||
|
|
||||||
|
expect(result.totalFiles).toEqual(1);
|
||||||
|
// Small deleted file should be included fully
|
||||||
|
expect(result.fullDiffs.length).toEqual(1);
|
||||||
|
|
||||||
|
const formatted = processor.formatForContext(result);
|
||||||
|
expect(formatted).toInclude('src/old-file.ts');
|
||||||
|
// Verify the file appears in the output
|
||||||
|
expect(formatted).toInclude('FULL DIFFS');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should handle added files', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const addedDiff = createAddedFileDiff('src/new-file.ts');
|
||||||
|
|
||||||
|
const result = processor.processDiffs([addedDiff]);
|
||||||
|
|
||||||
|
expect(result.totalFiles).toEqual(1);
|
||||||
|
// Small added file should be included fully
|
||||||
|
expect(result.fullDiffs.length).toEqual(1);
|
||||||
|
|
||||||
|
const formatted = processor.formatForContext(result);
|
||||||
|
expect(formatted).toInclude('src/new-file.ts');
|
||||||
|
// Verify the file appears in the output
|
||||||
|
expect(formatted).toInclude('FULL DIFFS');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should handle mixed file sizes', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const diffs = [
|
||||||
|
createSmallDiff('src/small.ts'),
|
||||||
|
createMediumDiff('src/medium.ts'),
|
||||||
|
createLargeDiff('dist/large.js'),
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = processor.processDiffs(diffs);
|
||||||
|
|
||||||
|
expect(result.totalFiles).toEqual(3);
|
||||||
|
expect(result.fullDiffs.length).toEqual(1); // small file
|
||||||
|
expect(result.summarizedDiffs.length).toEqual(1); // medium file
|
||||||
|
expect(result.metadataOnly.length).toEqual(1); // large file
|
||||||
|
|
||||||
|
const formatted = processor.formatForContext(result);
|
||||||
|
expect(formatted).toInclude('FULL DIFFS (1 files)');
|
||||||
|
expect(formatted).toInclude('SUMMARIZED DIFFS (1 files)');
|
||||||
|
expect(formatted).toInclude('METADATA ONLY (1 files)');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should handle empty diff array', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const result = processor.processDiffs([]);
|
||||||
|
|
||||||
|
expect(result.totalFiles).toEqual(0);
|
||||||
|
expect(result.fullDiffs.length).toEqual(0);
|
||||||
|
expect(result.summarizedDiffs.length).toEqual(0);
|
||||||
|
expect(result.metadataOnly.length).toEqual(0);
|
||||||
|
expect(result.totalTokens).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should generate comprehensive summary', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const diffs = [
|
||||||
|
createSmallDiff('src/file1.ts'),
|
||||||
|
createSmallDiff('src/file2.ts'),
|
||||||
|
createMediumDiff('src/file3.ts'),
|
||||||
|
createLargeDiff('dist/bundle.js'),
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = processor.processDiffs(diffs);
|
||||||
|
const formatted = processor.formatForContext(result);
|
||||||
|
|
||||||
|
expect(formatted).toInclude('GIT DIFF SUMMARY');
|
||||||
|
expect(formatted).toInclude('Files changed: 4 total');
|
||||||
|
expect(formatted).toInclude('included in full');
|
||||||
|
expect(formatted).toInclude('summarized');
|
||||||
|
expect(formatted).toInclude('metadata only');
|
||||||
|
expect(formatted).toInclude('Estimated tokens:');
|
||||||
|
expect(formatted).toInclude('END OF GIT DIFF');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should handle custom options', async () => {
|
||||||
|
const processor = new DiffProcessor({
|
||||||
|
maxDiffTokens: 50000,
|
||||||
|
smallFileLines: 30,
|
||||||
|
mediumFileLines: 150,
|
||||||
|
sampleHeadLines: 10,
|
||||||
|
sampleTailLines: 10,
|
||||||
|
});
|
||||||
|
|
||||||
|
const mediumDiff = createMediumDiff('src/file.ts'); // 150 lines
|
||||||
|
const result = processor.processDiffs([mediumDiff]);
|
||||||
|
|
||||||
|
// With custom settings, this should be summarized (exactly at the mediumFileLines threshold)
|
||||||
|
expect(result.summarizedDiffs.length).toEqual(1);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should prioritize test files appropriately', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const diffs = [
|
||||||
|
createSmallDiff('src/core.ts'),
|
||||||
|
createSmallDiff('test/core.test.ts'),
|
||||||
|
createSmallDiff('config.json'),
|
||||||
|
];
|
||||||
|
|
||||||
|
const result = processor.processDiffs(diffs);
|
||||||
|
const formatted = processor.formatForContext(result);
|
||||||
|
|
||||||
|
// Source files should come before test files
|
||||||
|
const srcIndex = formatted.indexOf('src/core.ts');
|
||||||
|
const testIndex = formatted.indexOf('test/core.test.ts');
|
||||||
|
|
||||||
|
expect(srcIndex).toBeLessThan(testIndex);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('DiffProcessor should handle files with no changes gracefully', async () => {
|
||||||
|
const processor = new DiffProcessor();
|
||||||
|
const emptyDiff = `--- a/src/file.ts
|
||||||
|
+++ b/src/file.ts
|
||||||
|
@@ -1,1 +1,1 @@`;
|
||||||
|
|
||||||
|
const result = processor.processDiffs([emptyDiff]);
|
||||||
|
|
||||||
|
expect(result.totalFiles).toEqual(1);
|
||||||
|
expect(result.fullDiffs.length).toEqual(1); // Still included as a small file
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
@@ -1,8 +0,0 @@
|
|||||||
import { expect, tap } from '@pushrocks/tapbundle';
|
|
||||||
import * as tsdoc from '../ts/index';
|
|
||||||
|
|
||||||
tap.test('first test', async () => {
|
|
||||||
console.log('test');
|
|
||||||
});
|
|
||||||
|
|
||||||
tap.start();
|
|
||||||
8
ts/00_commitinfo_data.ts
Normal file
8
ts/00_commitinfo_data.ts
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
/**
|
||||||
|
* autocreated commitinfo by @push.rocks/commitinfo
|
||||||
|
*/
|
||||||
|
export const commitinfo = {
|
||||||
|
name: '@git.zone/tsdoc',
|
||||||
|
version: '2.0.2',
|
||||||
|
description: 'A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.'
|
||||||
|
}
|
||||||
299
ts/aidocs_classes/commit.ts
Normal file
299
ts/aidocs_classes/commit.ts
Normal file
@@ -0,0 +1,299 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import { AiDoc } from '../classes.aidoc.js';
|
||||||
|
import { ProjectContext } from './projectcontext.js';
|
||||||
|
import { DiffProcessor } from '../classes.diffprocessor.js';
|
||||||
|
import { logger } from '../logging.js';
|
||||||
|
|
||||||
|
// Token budget configuration for OpenAI API limits
|
||||||
|
const TOKEN_BUDGET = {
|
||||||
|
OPENAI_CONTEXT_LIMIT: 272000, // OpenAI's configured limit
|
||||||
|
SAFETY_MARGIN: 10000, // Buffer to avoid hitting exact limit
|
||||||
|
SMARTAGENT_OVERHEAD: 180000, // System msgs, tools, history, formatting
|
||||||
|
TASK_PROMPT_OVERHEAD: 2000, // Task prompt template size
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate max tokens available for diff content based on total budget
|
||||||
|
*/
|
||||||
|
function calculateMaxDiffTokens(): number {
|
||||||
|
const available = TOKEN_BUDGET.OPENAI_CONTEXT_LIMIT
|
||||||
|
- TOKEN_BUDGET.SAFETY_MARGIN
|
||||||
|
- TOKEN_BUDGET.SMARTAGENT_OVERHEAD
|
||||||
|
- TOKEN_BUDGET.TASK_PROMPT_OVERHEAD;
|
||||||
|
return Math.max(available, 30000);
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface INextCommitObject {
|
||||||
|
recommendedNextVersionLevel: 'fix' | 'feat' | 'BREAKING CHANGE'; // the recommended next version level of the project
|
||||||
|
recommendedNextVersionScope: string; // the recommended scope name of the next version, like "core" or "cli", or specific class names.
|
||||||
|
recommendedNextVersionMessage: string; // the commit message. Don't put fix() feat() or BREAKING CHANGE in the message. Please just the message itself.
|
||||||
|
recommendedNextVersionDetails: string[]; // detailed bullet points for the changelog
|
||||||
|
recommendedNextVersion: string; // the recommended next version of the project, x.x.x
|
||||||
|
changelog?: string; // the changelog for the next version
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Commit {
|
||||||
|
private aiDocsRef: AiDoc;
|
||||||
|
private projectDir: string;
|
||||||
|
|
||||||
|
constructor(aiDocsRef: AiDoc, projectDirArg: string) {
|
||||||
|
this.aiDocsRef = aiDocsRef;
|
||||||
|
this.projectDir = projectDirArg;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async buildNextCommitObject(): Promise<INextCommitObject> {
|
||||||
|
const smartgitInstance = new plugins.smartgit.Smartgit();
|
||||||
|
await smartgitInstance.init();
|
||||||
|
const gitRepo = await plugins.smartgit.GitRepo.fromOpeningRepoDir(
|
||||||
|
smartgitInstance,
|
||||||
|
this.projectDir
|
||||||
|
);
|
||||||
|
|
||||||
|
// Define comprehensive exclusion patterns
|
||||||
|
// smartgit@3.3.0+ supports glob patterns natively
|
||||||
|
const excludePatterns = [
|
||||||
|
// Lock files
|
||||||
|
'pnpm-lock.yaml',
|
||||||
|
'package-lock.json',
|
||||||
|
'npm-shrinkwrap.json',
|
||||||
|
'yarn.lock',
|
||||||
|
'deno.lock',
|
||||||
|
'bun.lockb',
|
||||||
|
|
||||||
|
// Build artifacts (main culprit for large diffs!)
|
||||||
|
'dist/**',
|
||||||
|
'dist_*/**', // dist_ts, dist_web, etc.
|
||||||
|
'build/**',
|
||||||
|
'.next/**',
|
||||||
|
'out/**',
|
||||||
|
'public/dist/**',
|
||||||
|
|
||||||
|
// Compiled/bundled files
|
||||||
|
'**/*.js.map',
|
||||||
|
'**/*.d.ts.map',
|
||||||
|
'**/*.min.js',
|
||||||
|
'**/*.bundle.js',
|
||||||
|
'**/*.chunk.js',
|
||||||
|
|
||||||
|
// IDE/Editor directories
|
||||||
|
'.claude/**',
|
||||||
|
'.cursor/**',
|
||||||
|
'.vscode/**',
|
||||||
|
'.idea/**',
|
||||||
|
'**/*.swp',
|
||||||
|
'**/*.swo',
|
||||||
|
|
||||||
|
// Logs and caches
|
||||||
|
'.nogit/**',
|
||||||
|
'**/*.log',
|
||||||
|
'.cache/**',
|
||||||
|
'.rpt2_cache/**',
|
||||||
|
'coverage/**',
|
||||||
|
'.nyc_output/**',
|
||||||
|
];
|
||||||
|
|
||||||
|
// Pass glob patterns directly to smartgit - it handles matching internally
|
||||||
|
const diffStringArray = await gitRepo.getUncommittedDiff(excludePatterns);
|
||||||
|
|
||||||
|
// Process diffs intelligently using DiffProcessor
|
||||||
|
let processedDiffString: string;
|
||||||
|
|
||||||
|
if (diffStringArray.length > 0) {
|
||||||
|
// Diagnostic logging for raw diff statistics
|
||||||
|
const totalChars = diffStringArray.join('\n\n').length;
|
||||||
|
const estimatedTokens = Math.ceil(totalChars / 4);
|
||||||
|
|
||||||
|
console.log(`Raw git diff statistics:`);
|
||||||
|
console.log(` Files changed: ${diffStringArray.length}`);
|
||||||
|
console.log(` Total characters: ${totalChars.toLocaleString()}`);
|
||||||
|
console.log(` Estimated tokens: ${estimatedTokens.toLocaleString()}`);
|
||||||
|
console.log(` Exclusion patterns: ${excludePatterns.length}`);
|
||||||
|
|
||||||
|
// Calculate available tokens for diff based on total budget
|
||||||
|
const maxDiffTokens = calculateMaxDiffTokens();
|
||||||
|
console.log(`Token budget: ${maxDiffTokens.toLocaleString()} tokens for diff (limit: ${TOKEN_BUDGET.OPENAI_CONTEXT_LIMIT.toLocaleString()}, overhead: ${(TOKEN_BUDGET.SMARTAGENT_OVERHEAD + TOKEN_BUDGET.TASK_PROMPT_OVERHEAD).toLocaleString()})`);
|
||||||
|
|
||||||
|
// Use DiffProcessor to intelligently handle large diffs
|
||||||
|
const diffProcessor = new DiffProcessor({
|
||||||
|
maxDiffTokens, // Dynamic based on total budget
|
||||||
|
smallFileLines: 300, // Most source files are under 300 lines
|
||||||
|
mediumFileLines: 800, // Only very large files get head/tail treatment
|
||||||
|
sampleHeadLines: 75, // When sampling, show more context
|
||||||
|
sampleTailLines: 75, // When sampling, show more context
|
||||||
|
});
|
||||||
|
|
||||||
|
const processedDiff = diffProcessor.processDiffs(diffStringArray);
|
||||||
|
processedDiffString = diffProcessor.formatForContext(processedDiff);
|
||||||
|
|
||||||
|
console.log(`Processed diff statistics:`);
|
||||||
|
console.log(` Full diffs: ${processedDiff.fullDiffs.length} files`);
|
||||||
|
console.log(` Summarized: ${processedDiff.summarizedDiffs.length} files`);
|
||||||
|
console.log(` Metadata only: ${processedDiff.metadataOnly.length} files`);
|
||||||
|
console.log(` Final tokens: ${processedDiff.totalTokens.toLocaleString()}`);
|
||||||
|
|
||||||
|
if (estimatedTokens > 50000) {
|
||||||
|
console.log(`DiffProcessor reduced token usage: ${estimatedTokens.toLocaleString()} -> ${processedDiff.totalTokens.toLocaleString()}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate total tokens won't exceed limit
|
||||||
|
const totalEstimatedTokens = processedDiff.totalTokens
|
||||||
|
+ TOKEN_BUDGET.SMARTAGENT_OVERHEAD
|
||||||
|
+ TOKEN_BUDGET.TASK_PROMPT_OVERHEAD;
|
||||||
|
|
||||||
|
if (totalEstimatedTokens > TOKEN_BUDGET.OPENAI_CONTEXT_LIMIT - TOKEN_BUDGET.SAFETY_MARGIN) {
|
||||||
|
console.log(`Warning: Estimated tokens (${totalEstimatedTokens.toLocaleString()}) approaching limit`);
|
||||||
|
console.log(` Consider splitting into smaller commits`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
processedDiffString = 'No changes.';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use runAgent for commit message generation with filesystem tool
|
||||||
|
const fsTools = plugins.smartagentTools.filesystemTool({ rootDir: this.projectDir });
|
||||||
|
|
||||||
|
const commitSystemPrompt = `
|
||||||
|
You create commit messages for git commits following semantic versioning conventions.
|
||||||
|
|
||||||
|
You have access to filesystem tools to explore the project if needed.
|
||||||
|
|
||||||
|
IMPORTANT RULES:
|
||||||
|
- Only READ files (package.json, source files) for context
|
||||||
|
- Do NOT write, delete, or modify any files
|
||||||
|
- Version level (fix/feat/BREAKING CHANGE) must match the scope of changes
|
||||||
|
- Commit message must be clear, professional, and follow conventional commit conventions
|
||||||
|
- Do NOT include personal information, licensing details, or AI mentions (Claude/Codex)
|
||||||
|
- JSON structure must be valid with all required fields
|
||||||
|
- Scope must accurately reflect the changed modules/files
|
||||||
|
`;
|
||||||
|
|
||||||
|
const commitTaskPrompt = `
|
||||||
|
Project directory: ${this.projectDir}
|
||||||
|
|
||||||
|
You have access to filesystem tools to explore the project if needed:
|
||||||
|
- Use list_directory to see project structure
|
||||||
|
- Use read_file to read package.json or source files for context
|
||||||
|
|
||||||
|
Analyze the git diff below to understand what changed and generate a commit message.
|
||||||
|
|
||||||
|
You should not include any licensing information or personal information.
|
||||||
|
Never mention CLAUDE code, or codex.
|
||||||
|
|
||||||
|
Your final response must be ONLY valid JSON - the raw JSON object, nothing else.
|
||||||
|
No explanations, no summaries, no markdown - just the JSON object that can be parsed with JSON.parse().
|
||||||
|
|
||||||
|
Here is the structure of the JSON you must return:
|
||||||
|
|
||||||
|
{
|
||||||
|
"recommendedNextVersionLevel": "fix" | "feat" | "BREAKING CHANGE",
|
||||||
|
"recommendedNextVersionScope": "string",
|
||||||
|
"recommendedNextVersionMessage": "string (ONLY the description body WITHOUT the type(scope): prefix - e.g. 'bump dependency to ^1.2.6' NOT 'fix(deps): bump dependency to ^1.2.6')",
|
||||||
|
"recommendedNextVersionDetails": ["string"],
|
||||||
|
"recommendedNextVersion": "x.x.x"
|
||||||
|
}
|
||||||
|
|
||||||
|
For recommendedNextVersionDetails, only add entries that have obvious value to the reader.
|
||||||
|
|
||||||
|
Here is the git diff showing what changed:
|
||||||
|
|
||||||
|
${processedDiffString}
|
||||||
|
|
||||||
|
Analyze these changes and output the JSON commit message object.
|
||||||
|
`;
|
||||||
|
|
||||||
|
logger.log('info', 'Starting commit message generation with agent...');
|
||||||
|
|
||||||
|
const commitResult = await plugins.smartagent.runAgent({
|
||||||
|
model: this.aiDocsRef.model,
|
||||||
|
prompt: commitTaskPrompt,
|
||||||
|
system: commitSystemPrompt,
|
||||||
|
tools: fsTools,
|
||||||
|
maxSteps: 10,
|
||||||
|
onToolCall: (toolName) => logger.log('info', `[Commit] Tool call: ${toolName}`),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Extract JSON from result - handle cases where AI adds text around it
|
||||||
|
let jsonString = commitResult.text
|
||||||
|
.replace(/```json\n?/gi, '')
|
||||||
|
.replace(/```\n?/gi, '');
|
||||||
|
|
||||||
|
// Try to find JSON object in the result
|
||||||
|
const jsonMatch = jsonString.match(/\{[\s\S]*\}/);
|
||||||
|
if (!jsonMatch) {
|
||||||
|
throw new Error(`Could not find JSON object in result: ${jsonString.substring(0, 100)}...`);
|
||||||
|
}
|
||||||
|
jsonString = jsonMatch[0];
|
||||||
|
|
||||||
|
const resultObject: INextCommitObject = JSON.parse(jsonString);
|
||||||
|
|
||||||
|
const previousChangelogPath = plugins.path.join(this.projectDir, 'changelog.md');
|
||||||
|
let previousChangelog: plugins.smartfile.SmartFile;
|
||||||
|
if (await plugins.fsInstance.file(previousChangelogPath).exists()) {
|
||||||
|
previousChangelog = await plugins.smartfileFactory.fromFilePath(previousChangelogPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!previousChangelog) {
|
||||||
|
// lets build the changelog based on that
|
||||||
|
const commitMessages = await gitRepo.getAllCommitMessages();
|
||||||
|
console.log(JSON.stringify(commitMessages, null, 2));
|
||||||
|
|
||||||
|
const changelogSystemPrompt = `
|
||||||
|
You generate changelog.md files for software projects.
|
||||||
|
|
||||||
|
RULES:
|
||||||
|
- Changelog must follow proper markdown format with ## headers for each version
|
||||||
|
- Entries must be chronologically ordered (newest first)
|
||||||
|
- Version ranges for trivial commits should be properly summarized
|
||||||
|
- No duplicate or empty entries
|
||||||
|
- Format: ## yyyy-mm-dd - x.x.x - scope
|
||||||
|
`;
|
||||||
|
|
||||||
|
const changelogTaskPrompt = `
|
||||||
|
You are building a changelog.md file for the project.
|
||||||
|
Omit commits and versions that lack relevant changes, but make sure to mention them as a range with a summarizing message instead.
|
||||||
|
|
||||||
|
A changelog entry should look like this:
|
||||||
|
|
||||||
|
## yyyy-mm-dd - x.x.x - scope here
|
||||||
|
main description here
|
||||||
|
|
||||||
|
- detailed bullet points follow
|
||||||
|
|
||||||
|
You are given:
|
||||||
|
* the commit messages of the project
|
||||||
|
|
||||||
|
Only return the changelog file content, so it can be written directly to changelog.md.
|
||||||
|
|
||||||
|
Here are the commit messages:
|
||||||
|
|
||||||
|
${JSON.stringify(commitMessages, null, 2)}
|
||||||
|
`;
|
||||||
|
|
||||||
|
const changelogResult = await plugins.smartagent.runAgent({
|
||||||
|
model: this.aiDocsRef.model,
|
||||||
|
prompt: changelogTaskPrompt,
|
||||||
|
system: changelogSystemPrompt,
|
||||||
|
maxSteps: 1,
|
||||||
|
onToolCall: (toolName) => logger.log('info', `[Changelog] Tool call: ${toolName}`),
|
||||||
|
});
|
||||||
|
|
||||||
|
previousChangelog = plugins.smartfileFactory.fromString(
|
||||||
|
previousChangelogPath,
|
||||||
|
changelogResult.text.replaceAll('```markdown', '').replaceAll('```', ''),
|
||||||
|
'utf8'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
let oldChangelog = previousChangelog.contents.toString().replace('# Changelog\n\n', '');
|
||||||
|
if (oldChangelog.startsWith('\n')) {
|
||||||
|
oldChangelog = oldChangelog.replace('\n', '');
|
||||||
|
}
|
||||||
|
let newDateString = new plugins.smarttime.ExtendedDate().exportToHyphedSortableDate();
|
||||||
|
let newChangelog = `# Changelog\n\n${`## ${newDateString} - {{nextVersion}} - {{nextVersionScope}}
|
||||||
|
{{nextVersionMessage}}
|
||||||
|
|
||||||
|
{{nextVersionDetails}}`}\n\n${oldChangelog}`;
|
||||||
|
resultObject.changelog = newChangelog;
|
||||||
|
|
||||||
|
return resultObject;
|
||||||
|
}
|
||||||
|
}
|
||||||
105
ts/aidocs_classes/description.ts
Normal file
105
ts/aidocs_classes/description.ts
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
import type { AiDoc } from '../classes.aidoc.js';
|
||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import { ProjectContext } from './projectcontext.js';
|
||||||
|
import { logger } from '../logging.js';
|
||||||
|
|
||||||
|
interface IDescriptionInterface {
|
||||||
|
description: string;
|
||||||
|
keywords: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
export class Description {
|
||||||
|
// INSTANCE
|
||||||
|
private aiDocsRef: AiDoc;
|
||||||
|
private projectDir: string;
|
||||||
|
|
||||||
|
constructor(aiDocsRef: AiDoc, projectDirArg: string) {
|
||||||
|
this.aiDocsRef = aiDocsRef;
|
||||||
|
this.projectDir = projectDirArg;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async build() {
|
||||||
|
// Use runAgent with filesystem tool for agent-driven exploration
|
||||||
|
const fsTools = plugins.smartagentTools.filesystemTool({ rootDir: this.projectDir });
|
||||||
|
|
||||||
|
const descriptionSystemPrompt = `
|
||||||
|
You create project descriptions and keywords for npm packages.
|
||||||
|
|
||||||
|
You have access to filesystem tools to explore the project.
|
||||||
|
|
||||||
|
IMPORTANT RULES:
|
||||||
|
- Only READ files (package.json, .smartconfig.json, source files in ts/)
|
||||||
|
- Do NOT write, delete, or modify any files
|
||||||
|
- Your final response must be valid JSON only
|
||||||
|
- Description must be a clear, concise one-sentence summary
|
||||||
|
- Keywords must be relevant to the project's use cases
|
||||||
|
- Both description and keywords fields must be present
|
||||||
|
- Do NOT wrap JSON in markdown code blocks
|
||||||
|
`;
|
||||||
|
|
||||||
|
const descriptionTaskPrompt = `
|
||||||
|
PROJECT DIRECTORY: ${this.projectDir}
|
||||||
|
|
||||||
|
Use the filesystem tools to explore the project and understand what it does:
|
||||||
|
1. First, use list_directory to see the project structure
|
||||||
|
2. Read package.json to understand the package name and current description
|
||||||
|
3. Read .smartconfig.json if it exists for additional metadata
|
||||||
|
4. Read key source files in ts/ directory to understand the implementation
|
||||||
|
|
||||||
|
Then generate a description and keywords based on your exploration.
|
||||||
|
|
||||||
|
Your FINAL response must be valid JSON adhering to this interface:
|
||||||
|
{
|
||||||
|
description: string; // a sensible short, one sentence description of the project
|
||||||
|
keywords: string[]; // an array of tags that describe the project based on use cases
|
||||||
|
}
|
||||||
|
|
||||||
|
Important: Answer only in valid JSON.
|
||||||
|
Your answer should be parseable with JSON.parse() without modifying anything.
|
||||||
|
Don't wrap the JSON in \`\`\`json\`\`\` - just return the raw JSON object.
|
||||||
|
`;
|
||||||
|
|
||||||
|
logger.log('info', 'Starting description generation with agent...');
|
||||||
|
|
||||||
|
const descriptionResult = await plugins.smartagent.runAgent({
|
||||||
|
model: this.aiDocsRef.model,
|
||||||
|
prompt: descriptionTaskPrompt,
|
||||||
|
system: descriptionSystemPrompt,
|
||||||
|
tools: fsTools,
|
||||||
|
maxSteps: 15,
|
||||||
|
onToolCall: (toolName) => logger.log('info', `[Description] Tool call: ${toolName}`),
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(descriptionResult.text);
|
||||||
|
const resultObject: IDescriptionInterface = JSON.parse(
|
||||||
|
descriptionResult.text.replace('```json', '').replace('```', ''),
|
||||||
|
);
|
||||||
|
|
||||||
|
// Use ProjectContext to get file handles for writing
|
||||||
|
const projectContext = new ProjectContext(this.projectDir);
|
||||||
|
const files = await projectContext.gatherFiles();
|
||||||
|
|
||||||
|
// Update smartconfig.json
|
||||||
|
const smartconfigJson = files.smartfilesNpmextraJSON;
|
||||||
|
const smartconfigJsonContent = JSON.parse(smartconfigJson.contents.toString());
|
||||||
|
|
||||||
|
smartconfigJsonContent['gitzone'].module.description = resultObject.description;
|
||||||
|
smartconfigJsonContent['gitzone'].module.keywords = resultObject.keywords;
|
||||||
|
|
||||||
|
smartconfigJson.contents = Buffer.from(JSON.stringify(smartconfigJsonContent, null, 2));
|
||||||
|
await smartconfigJson.write();
|
||||||
|
|
||||||
|
// Update package.json
|
||||||
|
const packageJson = files.smartfilePackageJSON;
|
||||||
|
const packageJsonContent = JSON.parse(packageJson.contents.toString());
|
||||||
|
packageJsonContent.description = resultObject.description;
|
||||||
|
packageJsonContent.keywords = resultObject.keywords;
|
||||||
|
packageJson.contents = Buffer.from(JSON.stringify(packageJsonContent, null, 2));
|
||||||
|
await packageJson.write();
|
||||||
|
|
||||||
|
console.log(`\n======================\n`);
|
||||||
|
console.log(JSON.stringify(resultObject, null, 2));
|
||||||
|
console.log(`\n======================\n`);
|
||||||
|
return descriptionResult.text;
|
||||||
|
}
|
||||||
|
}
|
||||||
4
ts/aidocs_classes/index.ts
Normal file
4
ts/aidocs_classes/index.ts
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
export * from './commit.js';
|
||||||
|
export * from './description.js';
|
||||||
|
export * from './projectcontext.js';
|
||||||
|
export * from './readme.js';
|
||||||
118
ts/aidocs_classes/projectcontext.ts
Normal file
118
ts/aidocs_classes/projectcontext.ts
Normal file
@@ -0,0 +1,118 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
|
||||||
|
export class ProjectContext {
|
||||||
|
public static async fromDir(dirArg: string) {}
|
||||||
|
|
||||||
|
// INSTANCE
|
||||||
|
public projectDir: string;
|
||||||
|
private tokenCount: number = 0;
|
||||||
|
private contextString: string = '';
|
||||||
|
|
||||||
|
constructor(projectDirArg: string) {
|
||||||
|
this.projectDir = projectDirArg;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async gatherFiles() {
|
||||||
|
const smartfilePackageJSON = await plugins.smartfileFactory.fromFilePath(
|
||||||
|
plugins.path.join(this.projectDir, 'package.json'),
|
||||||
|
this.projectDir,
|
||||||
|
);
|
||||||
|
const smartfilesReadme = await plugins.smartfileFactory.fromFilePath(
|
||||||
|
plugins.path.join(this.projectDir, 'readme.md'),
|
||||||
|
this.projectDir,
|
||||||
|
);
|
||||||
|
|
||||||
|
const smartfilesReadmeHints = await plugins.smartfileFactory.fromFilePath(
|
||||||
|
plugins.path.join(this.projectDir, 'readme.hints.md'),
|
||||||
|
this.projectDir,
|
||||||
|
);
|
||||||
|
const smartfilesNpmextraJSON = await plugins.smartfileFactory.fromFilePath(
|
||||||
|
plugins.path.join(this.projectDir, '.smartconfig.json'),
|
||||||
|
this.projectDir,
|
||||||
|
);
|
||||||
|
const smartfilesMod = await plugins.smartfileFactory.virtualDirectoryFromPath(
|
||||||
|
this.projectDir,
|
||||||
|
).then(vd => vd.filter(f => f.relative.startsWith('ts') && f.relative.endsWith('.ts')).listFiles());
|
||||||
|
const smartfilesTest = await plugins.smartfileFactory.virtualDirectoryFromPath(
|
||||||
|
this.projectDir,
|
||||||
|
).then(vd => vd.filter(f => f.relative.startsWith('test/') && f.relative.endsWith('.ts')).listFiles());
|
||||||
|
return {
|
||||||
|
smartfilePackageJSON,
|
||||||
|
smartfilesReadme,
|
||||||
|
smartfilesReadmeHints,
|
||||||
|
smartfilesNpmextraJSON,
|
||||||
|
smartfilesMod,
|
||||||
|
smartfilesTest,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
public async convertFilesToContext(filesArg: plugins.smartfile.SmartFile[]) {
|
||||||
|
filesArg.map((fileArg) => {
|
||||||
|
// console.log(` -> ${fileArg.relative}`);
|
||||||
|
});
|
||||||
|
return filesArg
|
||||||
|
.map((smartfile) => {
|
||||||
|
return `
|
||||||
|
====== START OF FILE ${smartfile.relative} ======
|
||||||
|
|
||||||
|
${smartfile.contents.toString()}
|
||||||
|
|
||||||
|
====== END OF FILE ${smartfile.relative} ======
|
||||||
|
`;
|
||||||
|
})
|
||||||
|
.join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Estimate token count for a string
|
||||||
|
* Uses a rough estimate of 4 characters per token
|
||||||
|
* @param text The text to estimate tokens for
|
||||||
|
* @returns Estimated number of tokens
|
||||||
|
*/
|
||||||
|
public countTokens(text: string): number {
|
||||||
|
// Rough estimate: ~4 characters per token for English text
|
||||||
|
return Math.ceil(text.length / 4);
|
||||||
|
}
|
||||||
|
|
||||||
|
private async buildContext(dirArg: string) {
|
||||||
|
const files = await this.gatherFiles();
|
||||||
|
let context = await this.convertFilesToContext([
|
||||||
|
files.smartfilePackageJSON,
|
||||||
|
files.smartfilesReadme,
|
||||||
|
files.smartfilesReadmeHints,
|
||||||
|
files.smartfilesNpmextraJSON,
|
||||||
|
...files.smartfilesMod,
|
||||||
|
...files.smartfilesTest,
|
||||||
|
]);
|
||||||
|
// Count tokens in the context
|
||||||
|
this.contextString = context;
|
||||||
|
this.tokenCount = this.countTokens(context);
|
||||||
|
|
||||||
|
// console.log(context);
|
||||||
|
return context;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the token count for the current context
|
||||||
|
* @returns The number of tokens in the context
|
||||||
|
*/
|
||||||
|
public getTokenCount(): number {
|
||||||
|
return this.tokenCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get both the context string and its token count
|
||||||
|
* @returns An object containing the context string and token count
|
||||||
|
*/
|
||||||
|
public getContextWithTokenCount(): { context: string; tokenCount: number } {
|
||||||
|
return {
|
||||||
|
context: this.contextString,
|
||||||
|
tokenCount: this.tokenCount
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
public async update() {
|
||||||
|
const result = await this.buildContext(this.projectDir);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
||||||
191
ts/aidocs_classes/readme.ts
Normal file
191
ts/aidocs_classes/readme.ts
Normal file
@@ -0,0 +1,191 @@
|
|||||||
|
import type { AiDoc } from '../classes.aidoc.js';
|
||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import * as paths from '../paths.js';
|
||||||
|
import { ProjectContext } from './projectcontext.js';
|
||||||
|
import { logger } from '../logging.js';
|
||||||
|
|
||||||
|
export class Readme {
|
||||||
|
// INSTANCE
|
||||||
|
private aiDocsRef: AiDoc;
|
||||||
|
private projectDir: string;
|
||||||
|
|
||||||
|
constructor(aiDocsRef: AiDoc, projectDirArg: string) {
|
||||||
|
this.aiDocsRef = aiDocsRef;
|
||||||
|
this.projectDir = projectDirArg;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async build() {
|
||||||
|
let finalReadmeString = ``;
|
||||||
|
|
||||||
|
// First check legal info before introducing any cost
|
||||||
|
const projectContext = new ProjectContext(this.projectDir);
|
||||||
|
const smartconfigJson = JSON.parse(
|
||||||
|
(await projectContext.gatherFiles()).smartfilesNpmextraJSON.contents.toString()
|
||||||
|
);
|
||||||
|
const legalInfo = smartconfigJson?.['tsdoc']?.legal;
|
||||||
|
if (!legalInfo) {
|
||||||
|
const error = new Error(`No legal information found in .smartconfig.json`);
|
||||||
|
console.log(error);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use runAgent with filesystem tool for agent-driven exploration
|
||||||
|
const fsTools = plugins.smartagentTools.filesystemTool({ rootDir: this.projectDir });
|
||||||
|
|
||||||
|
const readmeSystemPrompt = `
|
||||||
|
You create markdown READMEs for npm projects. You only output the markdown readme.
|
||||||
|
|
||||||
|
You have access to filesystem tools to explore the project. Use them to understand the codebase.
|
||||||
|
|
||||||
|
IMPORTANT RULES:
|
||||||
|
- Only READ files within the project directory
|
||||||
|
- Do NOT write, delete, or modify any files
|
||||||
|
- README must follow proper markdown format
|
||||||
|
- Must contain Install and Usage sections
|
||||||
|
- Code examples must use correct TypeScript/ESM syntax
|
||||||
|
- Documentation must be comprehensive and helpful
|
||||||
|
- Do NOT include licensing information (added separately)
|
||||||
|
- Do NOT use CommonJS syntax - only ESM
|
||||||
|
- Do NOT include "in conclusion" or similar filler
|
||||||
|
`;
|
||||||
|
|
||||||
|
const readmeTaskPrompt = `
|
||||||
|
PROJECT DIRECTORY: ${this.projectDir}
|
||||||
|
|
||||||
|
Use the filesystem tools to explore the project and understand what it does:
|
||||||
|
1. First, use list_directory to see the project structure
|
||||||
|
2. Read package.json to understand the package name, description, and dependencies
|
||||||
|
3. Read the existing readme.md if it exists (use it as a base, improve and expand)
|
||||||
|
4. Read readme.hints.md if it exists (contains hints for documentation)
|
||||||
|
5. Read key source files in ts/ directory to understand the API and implementation
|
||||||
|
6. Focus on exported classes, interfaces, and functions
|
||||||
|
|
||||||
|
Then generate a comprehensive README following this template:
|
||||||
|
|
||||||
|
# Project Name
|
||||||
|
[The name from package.json and description]
|
||||||
|
|
||||||
|
## Install
|
||||||
|
[Short text on how to install the project]
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
[
|
||||||
|
Give code examples here.
|
||||||
|
Construct sensible scenarios for the user.
|
||||||
|
Make sure to show a complete set of features of the module.
|
||||||
|
Don't omit use cases.
|
||||||
|
ALWAYS USE ESM SYNTAX AND TYPESCRIPT.
|
||||||
|
Write at least 4000 words. More if necessary.
|
||||||
|
If there is already a readme, take the Usage section as base. Remove outdated content, expand and improve.
|
||||||
|
Check for completeness.
|
||||||
|
Don't include any licensing information. This will be added later.
|
||||||
|
Avoid "in conclusion" statements.
|
||||||
|
]
|
||||||
|
`;
|
||||||
|
|
||||||
|
logger.log('info', 'Starting README generation with agent...');
|
||||||
|
|
||||||
|
const readmeResult = await plugins.smartagent.runAgent({
|
||||||
|
model: this.aiDocsRef.model,
|
||||||
|
prompt: readmeTaskPrompt,
|
||||||
|
system: readmeSystemPrompt,
|
||||||
|
tools: fsTools,
|
||||||
|
maxSteps: 25,
|
||||||
|
onToolCall: (toolName) => logger.log('info', `[README] Tool call: ${toolName}`),
|
||||||
|
});
|
||||||
|
|
||||||
|
// Clean up markdown formatting if wrapped in code blocks
|
||||||
|
let resultMessage = readmeResult.text
|
||||||
|
.replace(/^```markdown\n?/i, '')
|
||||||
|
.replace(/\n?```$/i, '');
|
||||||
|
|
||||||
|
finalReadmeString += resultMessage + '\n' + legalInfo;
|
||||||
|
|
||||||
|
console.log(`\n======================\n`);
|
||||||
|
console.log(resultMessage);
|
||||||
|
console.log(`\n======================\n`);
|
||||||
|
|
||||||
|
const readme = (await projectContext.gatherFiles()).smartfilesReadme;
|
||||||
|
readme.contents = Buffer.from(finalReadmeString);
|
||||||
|
await readme.write();
|
||||||
|
|
||||||
|
// lets care about monorepo aspects
|
||||||
|
const tsPublishInstance = new plugins.tspublish.TsPublish();
|
||||||
|
const subModules = await tsPublishInstance.getModuleSubDirs(paths.cwd);
|
||||||
|
logger.log('info', `Found ${Object.keys(subModules).length} sub modules`);
|
||||||
|
|
||||||
|
for (const subModule of Object.keys(subModules)) {
|
||||||
|
logger.log('info', `Building readme for ${subModule}`);
|
||||||
|
|
||||||
|
const subModulePath = plugins.path.join(paths.cwd, subModule);
|
||||||
|
const tspublishData = await plugins.fsInstance
|
||||||
|
.file(plugins.path.join(subModulePath, 'tspublish.json'))
|
||||||
|
.encoding('utf8')
|
||||||
|
.read();
|
||||||
|
|
||||||
|
const subModuleFsTools = plugins.smartagentTools.filesystemTool({ rootDir: subModulePath });
|
||||||
|
|
||||||
|
const subModuleSystemPrompt = `
|
||||||
|
You create markdown READMEs for npm projects. You only output the markdown readme.
|
||||||
|
|
||||||
|
IMPORTANT RULES:
|
||||||
|
- Only READ files within the submodule directory
|
||||||
|
- Do NOT write, delete, or modify any files
|
||||||
|
- README must be comprehensive, well-formatted markdown with ESM TypeScript examples
|
||||||
|
- Do NOT include licensing information (added separately)
|
||||||
|
`;
|
||||||
|
|
||||||
|
const subModulePrompt = `
|
||||||
|
SUB MODULE: ${subModule}
|
||||||
|
SUB MODULE DIRECTORY: ${subModulePath}
|
||||||
|
|
||||||
|
IMPORTANT: YOU ARE CREATING THE README FOR THIS SUB MODULE: ${subModule}
|
||||||
|
The Sub Module will be published with:
|
||||||
|
${JSON.stringify(tspublishData, null, 2)}
|
||||||
|
|
||||||
|
Use the filesystem tools to explore the submodule:
|
||||||
|
1. Use list_directory to see the submodule structure
|
||||||
|
2. Read package.json to understand the submodule
|
||||||
|
3. Read source files in ts/ directory to understand the implementation
|
||||||
|
|
||||||
|
Generate a README following the template:
|
||||||
|
|
||||||
|
# Project Name
|
||||||
|
[name and description from package.json]
|
||||||
|
|
||||||
|
## Install
|
||||||
|
[installation instructions]
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
[
|
||||||
|
Code examples with complete features.
|
||||||
|
ESM TypeScript syntax only.
|
||||||
|
Write at least 4000 words.
|
||||||
|
No licensing information.
|
||||||
|
No "in conclusion".
|
||||||
|
]
|
||||||
|
|
||||||
|
Don't use \`\`\` at the beginning or end. Only for code blocks.
|
||||||
|
`;
|
||||||
|
|
||||||
|
const subModuleResult = await plugins.smartagent.runAgent({
|
||||||
|
model: this.aiDocsRef.model,
|
||||||
|
prompt: subModulePrompt,
|
||||||
|
system: subModuleSystemPrompt,
|
||||||
|
tools: subModuleFsTools,
|
||||||
|
maxSteps: 20,
|
||||||
|
onToolCall: (toolName) => logger.log('info', `[README:${subModule}] Tool call: ${toolName}`),
|
||||||
|
});
|
||||||
|
|
||||||
|
const subModuleReadmeString = subModuleResult.text
|
||||||
|
.replace(/^```markdown\n?/i, '')
|
||||||
|
.replace(/\n?```$/i, '') + '\n' + legalInfo;
|
||||||
|
await plugins.fsInstance
|
||||||
|
.file(plugins.path.join(subModulePath, 'readme.md'))
|
||||||
|
.encoding('utf8')
|
||||||
|
.write(subModuleReadmeString);
|
||||||
|
logger.log('success', `Built readme for ${subModule}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
return resultMessage;
|
||||||
|
}
|
||||||
|
}
|
||||||
154
ts/classes.aidoc.ts
Normal file
154
ts/classes.aidoc.ts
Normal file
@@ -0,0 +1,154 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
|
||||||
|
import * as aiDocsClasses from './aidocs_classes/index.js';
|
||||||
|
|
||||||
|
export class AiDoc {
|
||||||
|
private openaiToken: string;
|
||||||
|
|
||||||
|
public smartconfigKV: plugins.smartconfig.KeyValueStore;
|
||||||
|
public qenvInstance: plugins.qenv.Qenv;
|
||||||
|
public aidocInteract: plugins.smartinteract.SmartInteract;
|
||||||
|
public model: plugins.smartai.LanguageModelV3;
|
||||||
|
|
||||||
|
argvArg: any;
|
||||||
|
|
||||||
|
constructor(argvArg?: any) {
|
||||||
|
this.argvArg = argvArg;
|
||||||
|
}
|
||||||
|
|
||||||
|
private printSanitizedToken() {
|
||||||
|
// Check if the token length is greater than the sum of startLength and endLength
|
||||||
|
let printToken: string;
|
||||||
|
if (this.openaiToken.length > 6) {
|
||||||
|
// Extract the beginning and end parts of the token
|
||||||
|
const start = this.openaiToken.substring(0, 3);
|
||||||
|
const end = this.openaiToken.substring(this.openaiToken.length - 3);
|
||||||
|
printToken = `${start}...${end}`;
|
||||||
|
} else {
|
||||||
|
// If the token is not long enough, return it as is
|
||||||
|
printToken = this.openaiToken;
|
||||||
|
}
|
||||||
|
console.log(`OpenAI Token on record: ${printToken}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async start() {
|
||||||
|
// lets care about prerequisites
|
||||||
|
this.aidocInteract = new plugins.smartinteract.SmartInteract();
|
||||||
|
this.qenvInstance = new plugins.qenv.Qenv();
|
||||||
|
if (!(await this.qenvInstance.getEnvVarOnDemand('OPENAI_TOKEN'))) {
|
||||||
|
// Migrate old KV store path to new path if needed
|
||||||
|
const homeDir = plugins.smartpath.get.home();
|
||||||
|
const oldKvPath = plugins.path.join(homeDir, '.smartconfig/kv/tsdoc.json');
|
||||||
|
const newKvDir = plugins.path.join(homeDir, '.smartconfig/kv/@git.zone');
|
||||||
|
const newKvPath = plugins.path.join(newKvDir, 'tsdoc.json');
|
||||||
|
if (
|
||||||
|
await plugins.fsInstance.file(oldKvPath).exists() &&
|
||||||
|
!(await plugins.fsInstance.file(newKvPath).exists())
|
||||||
|
) {
|
||||||
|
console.log('Migrating tsdoc KeyValueStore to @git.zone/tsdoc...');
|
||||||
|
await plugins.fsInstance.directory(newKvDir).recursive().create();
|
||||||
|
await plugins.fsInstance.file(oldKvPath).copy(newKvPath);
|
||||||
|
await plugins.fsInstance.file(oldKvPath).delete();
|
||||||
|
console.log('Migration complete: tsdoc.json -> @git.zone/tsdoc.json');
|
||||||
|
}
|
||||||
|
|
||||||
|
this.smartconfigKV = new plugins.smartconfig.KeyValueStore({
|
||||||
|
typeArg: 'userHomeDir',
|
||||||
|
identityArg: '@git.zone/tsdoc',
|
||||||
|
mandatoryKeys: ['OPENAI_TOKEN'],
|
||||||
|
});
|
||||||
|
|
||||||
|
const missingKeys = await this.smartconfigKV.getMissingMandatoryKeys();
|
||||||
|
if (missingKeys.length > 0) {
|
||||||
|
// lets try argv
|
||||||
|
if (this.argvArg?.OPENAI_TOKEN) {
|
||||||
|
this.openaiToken = this.argvArg.OPENAI_TOKEN;
|
||||||
|
} else {
|
||||||
|
// lets try smartinteract
|
||||||
|
// wait for a second until OpenAI fixes punycode problem...
|
||||||
|
await plugins.smartdelay.delayFor(1000);
|
||||||
|
const answerObject = await this.aidocInteract.askQuestion({
|
||||||
|
type: 'input',
|
||||||
|
message: `Please provide your OpenAI token. This will be persisted in your home directory.`,
|
||||||
|
name: 'OPENAI_TOKEN',
|
||||||
|
default: '',
|
||||||
|
});
|
||||||
|
this.openaiToken = answerObject.value;
|
||||||
|
}
|
||||||
|
|
||||||
|
this.printSanitizedToken();
|
||||||
|
await this.smartconfigKV.writeKey('OPENAI_TOKEN', this.openaiToken);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!this.openaiToken && this.smartconfigKV) {
|
||||||
|
this.openaiToken = await this.smartconfigKV.readKey('OPENAI_TOKEN');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create model using getModel()
|
||||||
|
this.model = plugins.smartai.getModel({
|
||||||
|
provider: 'openai',
|
||||||
|
model: 'gpt-5.4',
|
||||||
|
apiKey: this.openaiToken,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public async stop() {
|
||||||
|
// No lifecycle management needed with getModel() API
|
||||||
|
}
|
||||||
|
|
||||||
|
public getOpenaiToken(): string {
|
||||||
|
return this.openaiToken;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async buildReadme(projectDirArg: string) {
|
||||||
|
const readmeInstance = new aiDocsClasses.Readme(this, projectDirArg);
|
||||||
|
return await readmeInstance.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async buildDescription(projectDirArg: string) {
|
||||||
|
const descriptionInstance = new aiDocsClasses.Description(this, projectDirArg);
|
||||||
|
return await descriptionInstance.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async buildNextCommitObject(projectDirArg: string) {
|
||||||
|
const commitInstance = new aiDocsClasses.Commit(this, projectDirArg);
|
||||||
|
return await commitInstance.buildNextCommitObject();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getProjectContext(projectDirArg: string) {
|
||||||
|
const projectContextInstance = new aiDocsClasses.ProjectContext(projectDirArg);
|
||||||
|
return await projectContextInstance.gatherFiles();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the context with token count information
|
||||||
|
* @param projectDirArg The path to the project directory
|
||||||
|
* @returns An object containing the context string and its token count
|
||||||
|
*/
|
||||||
|
public async getProjectContextWithTokenCount(projectDirArg: string) {
|
||||||
|
const projectContextInstance = new aiDocsClasses.ProjectContext(projectDirArg);
|
||||||
|
await projectContextInstance.update();
|
||||||
|
return projectContextInstance.getContextWithTokenCount();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get just the token count for a project's context
|
||||||
|
* @param projectDirArg The path to the project directory
|
||||||
|
* @returns The number of tokens in the project context
|
||||||
|
*/
|
||||||
|
public async getProjectContextTokenCount(projectDirArg: string) {
|
||||||
|
const projectContextInstance = new aiDocsClasses.ProjectContext(projectDirArg);
|
||||||
|
await projectContextInstance.update();
|
||||||
|
return projectContextInstance.getTokenCount();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Estimate token count in a text string
|
||||||
|
* @param text The text to estimate tokens for
|
||||||
|
* @returns Estimated number of tokens
|
||||||
|
*/
|
||||||
|
public countTokens(text: string): number {
|
||||||
|
const projectContextInstance = new aiDocsClasses.ProjectContext('');
|
||||||
|
return projectContextInstance.countTokens(text);
|
||||||
|
}
|
||||||
|
}
|
||||||
353
ts/classes.diffprocessor.ts
Normal file
353
ts/classes.diffprocessor.ts
Normal file
@@ -0,0 +1,353 @@
|
|||||||
|
/**
|
||||||
|
* Intelligent git diff processor that handles large diffs by sampling and prioritization
|
||||||
|
* instead of blind truncation.
|
||||||
|
*/
|
||||||
|
|
||||||
|
export interface IDiffFileInfo {
|
||||||
|
filepath: string;
|
||||||
|
status: 'added' | 'modified' | 'deleted';
|
||||||
|
linesAdded: number;
|
||||||
|
linesRemoved: number;
|
||||||
|
totalLines: number;
|
||||||
|
estimatedTokens: number;
|
||||||
|
diffContent: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IProcessedDiff {
|
||||||
|
summary: string; // Human-readable overview
|
||||||
|
fullDiffs: string[]; // Small files included fully
|
||||||
|
summarizedDiffs: string[]; // Medium files with head/tail
|
||||||
|
metadataOnly: string[]; // Large files, just stats
|
||||||
|
totalFiles: number;
|
||||||
|
totalTokens: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IDiffProcessorOptions {
|
||||||
|
maxDiffTokens?: number; // Maximum tokens for entire diff section (default: 100000)
|
||||||
|
smallFileLines?: number; // Files <= this are included fully (default: 50)
|
||||||
|
mediumFileLines?: number; // Files <= this are summarized (default: 200)
|
||||||
|
sampleHeadLines?: number; // Lines to show at start of medium files (default: 20)
|
||||||
|
sampleTailLines?: number; // Lines to show at end of medium files (default: 20)
|
||||||
|
}
|
||||||
|
|
||||||
|
export class DiffProcessor {
|
||||||
|
private options: Required<IDiffProcessorOptions>;
|
||||||
|
|
||||||
|
constructor(options: IDiffProcessorOptions = {}) {
|
||||||
|
this.options = {
|
||||||
|
maxDiffTokens: options.maxDiffTokens ?? 100000,
|
||||||
|
smallFileLines: options.smallFileLines ?? 50,
|
||||||
|
mediumFileLines: options.mediumFileLines ?? 200,
|
||||||
|
sampleHeadLines: options.sampleHeadLines ?? 20,
|
||||||
|
sampleTailLines: options.sampleTailLines ?? 20,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Process an array of git diffs into a structured, token-efficient format
|
||||||
|
*/
|
||||||
|
public processDiffs(diffStringArray: string[]): IProcessedDiff {
|
||||||
|
// Parse all diffs into file info objects
|
||||||
|
const fileInfos: IDiffFileInfo[] = diffStringArray
|
||||||
|
.map(diffString => this.parseDiffFile(diffString))
|
||||||
|
.filter(info => info !== null) as IDiffFileInfo[];
|
||||||
|
|
||||||
|
// Prioritize files (source files first, build artifacts last)
|
||||||
|
const prioritized = this.prioritizeFiles(fileInfos);
|
||||||
|
|
||||||
|
const result: IProcessedDiff = {
|
||||||
|
summary: '',
|
||||||
|
fullDiffs: [],
|
||||||
|
summarizedDiffs: [],
|
||||||
|
metadataOnly: [],
|
||||||
|
totalFiles: prioritized.length,
|
||||||
|
totalTokens: 0,
|
||||||
|
};
|
||||||
|
|
||||||
|
let tokensUsed = 0;
|
||||||
|
const tokenBudget = this.options.maxDiffTokens;
|
||||||
|
|
||||||
|
// Categorize and include files based on size and token budget
|
||||||
|
for (const fileInfo of prioritized) {
|
||||||
|
const remainingBudget = tokenBudget - tokensUsed;
|
||||||
|
|
||||||
|
if (remainingBudget <= 0) {
|
||||||
|
// Budget exhausted - rest are metadata only
|
||||||
|
result.metadataOnly.push(this.formatMetadataOnly(fileInfo));
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (fileInfo.totalLines <= this.options.smallFileLines) {
|
||||||
|
// Small file - include fully if budget allows
|
||||||
|
if (fileInfo.estimatedTokens <= remainingBudget) {
|
||||||
|
const statusPrefix = this.getFileStatusPrefix(fileInfo);
|
||||||
|
result.fullDiffs.push(`${statusPrefix}${fileInfo.diffContent}`);
|
||||||
|
tokensUsed += fileInfo.estimatedTokens;
|
||||||
|
} else {
|
||||||
|
result.metadataOnly.push(this.formatMetadataOnly(fileInfo));
|
||||||
|
}
|
||||||
|
} else if (fileInfo.totalLines <= this.options.mediumFileLines) {
|
||||||
|
// Medium file - try to include summary with head/tail
|
||||||
|
const summary = this.extractDiffSample(
|
||||||
|
fileInfo,
|
||||||
|
this.options.sampleHeadLines,
|
||||||
|
this.options.sampleTailLines
|
||||||
|
);
|
||||||
|
const summaryTokens = Math.ceil(summary.length / 4); // Rough estimate
|
||||||
|
|
||||||
|
if (summaryTokens <= remainingBudget) {
|
||||||
|
result.summarizedDiffs.push(summary);
|
||||||
|
tokensUsed += summaryTokens;
|
||||||
|
} else {
|
||||||
|
result.metadataOnly.push(this.formatMetadataOnly(fileInfo));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Large file - metadata only
|
||||||
|
result.metadataOnly.push(this.formatMetadataOnly(fileInfo));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
result.totalTokens = tokensUsed;
|
||||||
|
result.summary = this.generateSummary(result);
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format the processed diff for inclusion in context
|
||||||
|
*/
|
||||||
|
public formatForContext(processed: IProcessedDiff): string {
|
||||||
|
const sections: string[] = [];
|
||||||
|
|
||||||
|
// Summary section
|
||||||
|
sections.push('====== GIT DIFF SUMMARY ======');
|
||||||
|
sections.push(processed.summary);
|
||||||
|
sections.push('');
|
||||||
|
|
||||||
|
// Full diffs section
|
||||||
|
if (processed.fullDiffs.length > 0) {
|
||||||
|
sections.push(`====== FULL DIFFS (${processed.fullDiffs.length} files) ======`);
|
||||||
|
sections.push(processed.fullDiffs.join('\n\n'));
|
||||||
|
sections.push('');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Summarized diffs section
|
||||||
|
if (processed.summarizedDiffs.length > 0) {
|
||||||
|
sections.push(`====== SUMMARIZED DIFFS (${processed.summarizedDiffs.length} files) ======`);
|
||||||
|
sections.push(processed.summarizedDiffs.join('\n\n'));
|
||||||
|
sections.push('');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Metadata only section
|
||||||
|
if (processed.metadataOnly.length > 0) {
|
||||||
|
sections.push(`====== METADATA ONLY (${processed.metadataOnly.length} files) ======`);
|
||||||
|
sections.push(processed.metadataOnly.join('\n'));
|
||||||
|
sections.push('');
|
||||||
|
}
|
||||||
|
|
||||||
|
sections.push('====== END OF GIT DIFF ======');
|
||||||
|
|
||||||
|
return sections.join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse a single git diff string into file information
|
||||||
|
*/
|
||||||
|
private parseDiffFile(diffString: string): IDiffFileInfo | null {
|
||||||
|
if (!diffString || diffString.trim().length === 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const lines = diffString.split('\n');
|
||||||
|
let filepath = '';
|
||||||
|
let status: 'added' | 'modified' | 'deleted' = 'modified';
|
||||||
|
let linesAdded = 0;
|
||||||
|
let linesRemoved = 0;
|
||||||
|
|
||||||
|
// Parse diff header to extract filepath and status
|
||||||
|
for (const line of lines) {
|
||||||
|
if (line.startsWith('--- a/')) {
|
||||||
|
filepath = line.substring(6);
|
||||||
|
} else if (line.startsWith('+++ b/')) {
|
||||||
|
const newPath = line.substring(6);
|
||||||
|
if (newPath === '/dev/null') {
|
||||||
|
status = 'deleted';
|
||||||
|
} else if (filepath === '/dev/null') {
|
||||||
|
status = 'added';
|
||||||
|
filepath = newPath;
|
||||||
|
} else {
|
||||||
|
filepath = newPath;
|
||||||
|
}
|
||||||
|
} else if (line.startsWith('+') && !line.startsWith('+++')) {
|
||||||
|
linesAdded++;
|
||||||
|
} else if (line.startsWith('-') && !line.startsWith('---')) {
|
||||||
|
linesRemoved++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const totalLines = linesAdded + linesRemoved;
|
||||||
|
const estimatedTokens = Math.ceil(diffString.length / 4);
|
||||||
|
|
||||||
|
return {
|
||||||
|
filepath,
|
||||||
|
status,
|
||||||
|
linesAdded,
|
||||||
|
linesRemoved,
|
||||||
|
totalLines,
|
||||||
|
estimatedTokens,
|
||||||
|
diffContent: diffString,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Prioritize files by importance (source files before build artifacts)
|
||||||
|
*/
|
||||||
|
private prioritizeFiles(files: IDiffFileInfo[]): IDiffFileInfo[] {
|
||||||
|
return files.sort((a, b) => {
|
||||||
|
const scoreA = this.getFileImportanceScore(a.filepath);
|
||||||
|
const scoreB = this.getFileImportanceScore(b.filepath);
|
||||||
|
return scoreB - scoreA; // Higher score first
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate importance score for a file path
|
||||||
|
*/
|
||||||
|
private getFileImportanceScore(filepath: string): number {
|
||||||
|
// Source files - highest priority
|
||||||
|
if (filepath.match(/^(src|lib|app|components|pages|api)\//)) {
|
||||||
|
return 100;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Test files - high priority
|
||||||
|
if (filepath.match(/\.(test|spec)\.(ts|js|tsx|jsx)$/) || filepath.startsWith('test/')) {
|
||||||
|
return 80;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Configuration files - medium-high priority
|
||||||
|
if (filepath.match(/\.(json|yaml|yml|toml|config\.(ts|js))$/)) {
|
||||||
|
return 60;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Documentation - medium priority
|
||||||
|
if (filepath.match(/\.(md|txt|rst)$/)) {
|
||||||
|
return 40;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build artifacts - low priority
|
||||||
|
if (filepath.match(/^(dist|build|out|\.next|public\/dist)\//)) {
|
||||||
|
return 10;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start with default priority
|
||||||
|
let score = 50;
|
||||||
|
|
||||||
|
// Boost interface/type files - they're usually small but critical
|
||||||
|
if (filepath.includes('interfaces/') || filepath.includes('.types.')) {
|
||||||
|
score += 20;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Boost entry points
|
||||||
|
if (filepath.endsWith('index.ts') || filepath.endsWith('mod.ts')) {
|
||||||
|
score += 15;
|
||||||
|
}
|
||||||
|
|
||||||
|
return score;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract head and tail lines from a diff, omitting the middle
|
||||||
|
*/
|
||||||
|
private extractDiffSample(fileInfo: IDiffFileInfo, headLines: number, tailLines: number): string {
|
||||||
|
const lines = fileInfo.diffContent.split('\n');
|
||||||
|
const totalLines = lines.length;
|
||||||
|
|
||||||
|
if (totalLines <= headLines + tailLines) {
|
||||||
|
// File is small enough to include fully
|
||||||
|
return fileInfo.diffContent;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract file metadata from diff header
|
||||||
|
const headerLines: string[] = [];
|
||||||
|
let bodyStartIndex = 0;
|
||||||
|
for (let i = 0; i < lines.length; i++) {
|
||||||
|
if (lines[i].startsWith('@@')) {
|
||||||
|
headerLines.push(...lines.slice(0, i + 1));
|
||||||
|
bodyStartIndex = i + 1;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const bodyLines = lines.slice(bodyStartIndex);
|
||||||
|
const head = bodyLines.slice(0, headLines);
|
||||||
|
const tail = bodyLines.slice(-tailLines);
|
||||||
|
const omittedLines = bodyLines.length - headLines - tailLines;
|
||||||
|
|
||||||
|
const statusEmoji = fileInfo.status === 'added' ? '➕' :
|
||||||
|
fileInfo.status === 'deleted' ? '➖' : '📝';
|
||||||
|
|
||||||
|
const parts: string[] = [];
|
||||||
|
parts.push(`${statusEmoji} FILE: ${fileInfo.filepath}`);
|
||||||
|
parts.push(`CHANGES: +${fileInfo.linesAdded} lines, -${fileInfo.linesRemoved} lines (${fileInfo.totalLines} total)`);
|
||||||
|
parts.push('');
|
||||||
|
parts.push(...headerLines);
|
||||||
|
parts.push(...head);
|
||||||
|
parts.push('');
|
||||||
|
parts.push(`[... ${omittedLines} lines omitted - use Read tool to see full file ...]`);
|
||||||
|
parts.push('');
|
||||||
|
parts.push(...tail);
|
||||||
|
|
||||||
|
return parts.join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get file status prefix with emoji
|
||||||
|
*/
|
||||||
|
private getFileStatusPrefix(fileInfo: IDiffFileInfo): string {
|
||||||
|
const statusEmoji = fileInfo.status === 'added' ? '➕' :
|
||||||
|
fileInfo.status === 'deleted' ? '➖' : '📝';
|
||||||
|
return `${statusEmoji} `;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract filepath from diff content
|
||||||
|
*/
|
||||||
|
private extractFilepathFromDiff(diffContent: string): string {
|
||||||
|
const lines = diffContent.split('\n');
|
||||||
|
for (const line of lines) {
|
||||||
|
if (line.startsWith('+++ b/')) {
|
||||||
|
return line.substring(6);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 'unknown';
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format file info as metadata only
|
||||||
|
*/
|
||||||
|
private formatMetadataOnly(fileInfo: IDiffFileInfo): string {
|
||||||
|
const statusEmoji = fileInfo.status === 'added' ? '➕' :
|
||||||
|
fileInfo.status === 'deleted' ? '➖' : '📝';
|
||||||
|
return `${statusEmoji} ${fileInfo.filepath} (+${fileInfo.linesAdded}, -${fileInfo.linesRemoved})`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate human-readable summary of processed diff
|
||||||
|
*/
|
||||||
|
private generateSummary(result: IProcessedDiff): string {
|
||||||
|
const parts: string[] = [];
|
||||||
|
parts.push(`Files changed: ${result.totalFiles} total`);
|
||||||
|
parts.push(`- ${result.fullDiffs.length} included in full`);
|
||||||
|
parts.push(`- ${result.summarizedDiffs.length} summarized (head/tail shown)`);
|
||||||
|
parts.push(`- ${result.metadataOnly.length} metadata only`);
|
||||||
|
parts.push(`Estimated tokens: ~${result.totalTokens.toLocaleString()}`);
|
||||||
|
|
||||||
|
if (result.metadataOnly.length > 0) {
|
||||||
|
parts.push('');
|
||||||
|
parts.push('NOTE: Some files excluded to stay within token budget.');
|
||||||
|
parts.push('Use Read tool with specific file paths to see full content.');
|
||||||
|
}
|
||||||
|
|
||||||
|
return parts.join('\n');
|
||||||
|
}
|
||||||
|
}
|
||||||
58
ts/classes.typedoc.ts
Normal file
58
ts/classes.typedoc.ts
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import * as paths from './paths.js';
|
||||||
|
|
||||||
|
export class TypeDoc {
|
||||||
|
public smartshellInstance = new plugins.smartshell.Smartshell({
|
||||||
|
executor: 'bash',
|
||||||
|
pathDirectories: [paths.binDir],
|
||||||
|
});
|
||||||
|
|
||||||
|
// Static
|
||||||
|
public static async isTypeDocDir(dirPathArg: string): Promise<boolean> {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Instance
|
||||||
|
public typedocDirectory: string;
|
||||||
|
constructor(dirPathArg) {
|
||||||
|
this.typedocDirectory = dirPathArg;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async compile(options?: { publicSubdir?: string }) {
|
||||||
|
const data = {
|
||||||
|
compilerOptions: {
|
||||||
|
experimentalDecorators: true,
|
||||||
|
useDefineForClassFields: false,
|
||||||
|
target: 'ES2022',
|
||||||
|
module: 'NodeNext',
|
||||||
|
moduleResolution: 'NodeNext',
|
||||||
|
esModuleInterop: true,
|
||||||
|
verbatimModuleSyntax: true,
|
||||||
|
skipLibCheck: true,
|
||||||
|
},
|
||||||
|
include: [],
|
||||||
|
};
|
||||||
|
let startDirectory = '';
|
||||||
|
if (await plugins.fsInstance.directory(plugins.path.join(paths.cwd, './ts')).exists()) {
|
||||||
|
data.include.push(plugins.path.join(paths.cwd, './ts/**/*'));
|
||||||
|
startDirectory = 'ts';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (await plugins.fsInstance.directory(plugins.path.join(paths.cwd, './ts_web')).exists()) {
|
||||||
|
data.include.push(plugins.path.join(paths.cwd, './ts_web/**/*'));
|
||||||
|
if (!startDirectory) {
|
||||||
|
startDirectory = 'ts_web';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
await plugins.fsInstance.file(paths.tsconfigFile).encoding('utf8').write(JSON.stringify(data));
|
||||||
|
let targetDir = paths.publicDir;
|
||||||
|
if (options?.publicSubdir) {
|
||||||
|
targetDir = plugins.path.join(targetDir, options.publicSubdir);
|
||||||
|
}
|
||||||
|
await this.smartshellInstance.exec(
|
||||||
|
`typedoc --tsconfig ${paths.tsconfigFile} --out ${targetDir} ${startDirectory}/index.ts`,
|
||||||
|
);
|
||||||
|
await plugins.fsInstance.file(paths.tsconfigFile).delete();
|
||||||
|
}
|
||||||
|
}
|
||||||
80
ts/cli.ts
Normal file
80
ts/cli.ts
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import * as paths from './paths.js';
|
||||||
|
import { logger } from './logging.js';
|
||||||
|
|
||||||
|
import { TypeDoc } from './classes.typedoc.js';
|
||||||
|
import { AiDoc } from './classes.aidoc.js';
|
||||||
|
|
||||||
|
export const run = async () => {
|
||||||
|
const tsdocCli = new plugins.smartcli.Smartcli();
|
||||||
|
|
||||||
|
tsdocCli.standardCommand().subscribe(async (argvArg) => {
|
||||||
|
logger.log('warn', `Auto detecting environment!`);
|
||||||
|
switch (true) {
|
||||||
|
case await TypeDoc.isTypeDocDir(paths.cwd):
|
||||||
|
logger.log('ok', `Detected TypeDoc compliant directory at ${paths.cwd}`);
|
||||||
|
tsdocCli.triggerCommand('typedoc', argvArg);
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
logger.log('error', `Cannot determine docs format at ${paths.cwd}`);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tsdocCli.addCommand('typedoc').subscribe(async (argvArg) => {
|
||||||
|
const typeDocInstance = new TypeDoc(paths.cwd);
|
||||||
|
await typeDocInstance.compile({
|
||||||
|
publicSubdir: argvArg.publicSubdir,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tsdocCli.addCommand('aidoc').subscribe(async (argvArg) => {
|
||||||
|
const aidocInstance = new AiDoc();
|
||||||
|
await aidocInstance.start();
|
||||||
|
|
||||||
|
logger.log('info', `Generating new readme...`);
|
||||||
|
logger.log('info', `This may take some time...`);
|
||||||
|
await aidocInstance.buildReadme(paths.cwd);
|
||||||
|
logger.log('info', `Generating new keywords...`);
|
||||||
|
logger.log('info', `This may take some time...`);
|
||||||
|
await aidocInstance.buildDescription(paths.cwd);
|
||||||
|
});
|
||||||
|
|
||||||
|
tsdocCli.addCommand('readme').subscribe(async (argvArg) => {
|
||||||
|
const aidocInstance = new AiDoc();
|
||||||
|
await aidocInstance.start();
|
||||||
|
|
||||||
|
logger.log('info', `Generating new readme...`);
|
||||||
|
logger.log('info', `This may take some time...`);
|
||||||
|
await aidocInstance.buildReadme(paths.cwd);
|
||||||
|
});
|
||||||
|
|
||||||
|
tsdocCli.addCommand('description').subscribe(async (argvArg) => {
|
||||||
|
const aidocInstance = new AiDoc();
|
||||||
|
await aidocInstance.start();
|
||||||
|
|
||||||
|
logger.log('info', `Generating new description and keywords...`);
|
||||||
|
logger.log('info', `This may take some time...`);
|
||||||
|
await aidocInstance.buildDescription(paths.cwd);
|
||||||
|
});
|
||||||
|
|
||||||
|
tsdocCli.addCommand('commit').subscribe(async (argvArg) => {
|
||||||
|
const aidocInstance = new AiDoc();
|
||||||
|
await aidocInstance.start();
|
||||||
|
|
||||||
|
logger.log('info', `Generating commit message...`);
|
||||||
|
logger.log('info', `This may take some time...`);
|
||||||
|
const commitObject = await aidocInstance.buildNextCommitObject(paths.cwd);
|
||||||
|
|
||||||
|
logger.log('ok', `Commit message generated:`);
|
||||||
|
console.log(JSON.stringify(commitObject, null, 2));
|
||||||
|
});
|
||||||
|
|
||||||
|
tsdocCli.addCommand('test').subscribe((argvArg) => {
|
||||||
|
tsdocCli.triggerCommand('typedoc', argvArg);
|
||||||
|
process.on('exit', async () => {
|
||||||
|
await plugins.fsInstance.directory(paths.publicDir).recursive().delete();
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tsdocCli.startParse();
|
||||||
|
};
|
||||||
14
ts/index.ts
14
ts/index.ts
@@ -1,6 +1,12 @@
|
|||||||
import * as early from '@pushrocks/early';
|
import * as early from '@push.rocks/early';
|
||||||
early.start('tsdoc');
|
early.start('tsdoc');
|
||||||
import * as plugins from './tsdoc.plugins';
|
import * as plugins from './plugins.js';
|
||||||
import * as cli from './tsdoc.cli';
|
import * as cli from './cli.js';
|
||||||
early.stop();
|
early.stop();
|
||||||
cli.run();
|
|
||||||
|
export const runCli = async () => {
|
||||||
|
await cli.run();
|
||||||
|
};
|
||||||
|
|
||||||
|
// exports
|
||||||
|
export * from './classes.aidoc.js';
|
||||||
|
|||||||
6
ts/logging.ts
Normal file
6
ts/logging.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
import { commitinfo } from './00_commitinfo_data.js';
|
||||||
|
import * as plugins from './plugins.js';
|
||||||
|
|
||||||
|
export const logger = plugins.smartlog.Smartlog.createForCommitinfo(commitinfo);
|
||||||
|
|
||||||
|
logger.addLogDestination(new plugins.smartlogDestinationLocal.DestinationLocal());
|
||||||
16
ts/paths.ts
Normal file
16
ts/paths.ts
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
|
||||||
|
// dirs
|
||||||
|
export const packageDir = plugins.path.join(
|
||||||
|
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
|
||||||
|
'../',
|
||||||
|
);
|
||||||
|
export const cwd = process.cwd();
|
||||||
|
export const binDir = plugins.path.join(packageDir, './node_modules/.bin');
|
||||||
|
export const assetsDir = plugins.path.join(packageDir, './assets');
|
||||||
|
export const publicDir = plugins.path.join(cwd, './public');
|
||||||
|
export const tsDir = plugins.path.join(cwd, './ts');
|
||||||
|
|
||||||
|
// files
|
||||||
|
export const tsconfigFile = plugins.path.join(assetsDir, './tsconfig.json');
|
||||||
|
export const typedocOptionsFile = plugins.path.join(assetsDir, './typedoc.json');
|
||||||
58
ts/plugins.ts
Normal file
58
ts/plugins.ts
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
// node native
|
||||||
|
import * as path from 'path';
|
||||||
|
|
||||||
|
export { path };
|
||||||
|
|
||||||
|
// pushrocks scope
|
||||||
|
import * as smartconfig from '@push.rocks/smartconfig';
|
||||||
|
import * as qenv from '@push.rocks/qenv';
|
||||||
|
import * as smartagent from '@push.rocks/smartagent';
|
||||||
|
import * as smartagentTools from '@push.rocks/smartagent/tools';
|
||||||
|
import * as smartai from '@push.rocks/smartai';
|
||||||
|
import * as smartcli from '@push.rocks/smartcli';
|
||||||
|
import * as smartdelay from '@push.rocks/smartdelay';
|
||||||
|
import * as smartfile from '@push.rocks/smartfile';
|
||||||
|
import * as smartfs from '@push.rocks/smartfs';
|
||||||
|
import * as smartgit from '@push.rocks/smartgit';
|
||||||
|
import * as smartinteract from '@push.rocks/smartinteract';
|
||||||
|
import * as smartlog from '@push.rocks/smartlog';
|
||||||
|
import * as smartlogDestinationLocal from '@push.rocks/smartlog-destination-local';
|
||||||
|
import * as smartpath from '@push.rocks/smartpath';
|
||||||
|
import * as smartshell from '@push.rocks/smartshell';
|
||||||
|
import * as smarttime from '@push.rocks/smarttime';
|
||||||
|
|
||||||
|
export {
|
||||||
|
smartconfig,
|
||||||
|
qenv,
|
||||||
|
smartagent,
|
||||||
|
smartagentTools,
|
||||||
|
smartai,
|
||||||
|
smartcli,
|
||||||
|
smartdelay,
|
||||||
|
smartfile,
|
||||||
|
smartfs,
|
||||||
|
smartgit,
|
||||||
|
smartinteract,
|
||||||
|
smartlog,
|
||||||
|
smartlogDestinationLocal,
|
||||||
|
smartpath,
|
||||||
|
smartshell,
|
||||||
|
smarttime,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create a shared SmartFs instance for filesystem operations
|
||||||
|
const smartFsNodeProvider = new smartfs.SmartFsProviderNode();
|
||||||
|
export const fsInstance = new smartfs.SmartFs(smartFsNodeProvider);
|
||||||
|
|
||||||
|
// Create a shared SmartFileFactory for in-memory file operations
|
||||||
|
export const smartfileFactory = smartfile.SmartFileFactory.nodeFs();
|
||||||
|
|
||||||
|
// @git.zone scope
|
||||||
|
import * as tspublish from '@git.zone/tspublish';
|
||||||
|
|
||||||
|
export { tspublish };
|
||||||
|
|
||||||
|
// third party scope
|
||||||
|
import * as typedoc from 'typedoc';
|
||||||
|
|
||||||
|
export { typedoc };
|
||||||
@@ -1,67 +0,0 @@
|
|||||||
import * as plugins from './tsdoc.plugins';
|
|
||||||
import * as paths from './tsdoc.paths';
|
|
||||||
|
|
||||||
export class MkDocs {
|
|
||||||
public smartshellInstance = new plugins.smartshell.Smartshell({
|
|
||||||
executor: 'bash',
|
|
||||||
pathDirectories: [paths.binDir]
|
|
||||||
});
|
|
||||||
|
|
||||||
public static async isMkDocsDir(dirPathArg: string): Promise<boolean> {
|
|
||||||
const result = await plugins.smartfile.fs.fileExists(
|
|
||||||
plugins.path.join(dirPathArg, 'mkdocs.yml')
|
|
||||||
);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
public static async handleCommand(argvArg) {
|
|
||||||
const mkdocsInstance = new MkDocs(paths.cwd);
|
|
||||||
switch (true) {
|
|
||||||
case argvArg.serve:
|
|
||||||
await mkdocsInstance.serve();
|
|
||||||
break;
|
|
||||||
case argvArg.publish:
|
|
||||||
await mkdocsInstance.publish();
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
await mkdocsInstance.compile();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Instance
|
|
||||||
public typedocDirectory: string;
|
|
||||||
constructor(dirPathArg) {
|
|
||||||
this.typedocDirectory = dirPathArg;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async update() {
|
|
||||||
await this.smartshellInstance.exec(
|
|
||||||
`docker pull registry.gitlab.com/hosttoday/ht-docker-mkdocs`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
public async compile() {
|
|
||||||
await this.update();
|
|
||||||
await this.smartshellInstance.exec(`rm -rf public/`);
|
|
||||||
await this.smartshellInstance.exec(
|
|
||||||
`docker run --rm -it -p 8000:8000 -v ${
|
|
||||||
paths.cwd
|
|
||||||
}:/docs registry.gitlab.com/hosttoday/ht-docker-mkdocs build`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
public async serve() {
|
|
||||||
await this.update();
|
|
||||||
await this.smartshellInstance.exec(
|
|
||||||
`docker run --rm -it -p 8000:8000 -v ${
|
|
||||||
paths.cwd
|
|
||||||
}:/docs registry.gitlab.com/hosttoday/ht-docker-mkdocs`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
public async publish() {
|
|
||||||
await this.compile();
|
|
||||||
await this.smartshellInstance.exec(`gitzone commit`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
import * as plugins from './tsdoc.plugins';
|
|
||||||
import * as paths from './tsdoc.paths';
|
|
||||||
|
|
||||||
export class TypeDoc {
|
|
||||||
public smartshellInstance = new plugins.smartshell.Smartshell({
|
|
||||||
executor: 'bash',
|
|
||||||
pathDirectories: [paths.binDir]
|
|
||||||
});
|
|
||||||
|
|
||||||
// Static
|
|
||||||
public static async isTypeDocDir(dirPathArg: string): Promise<boolean> {
|
|
||||||
const result = await plugins.smartfile.fs.fileExists(
|
|
||||||
plugins.path.join(dirPathArg, 'mkdocs.yml')
|
|
||||||
);
|
|
||||||
return !result;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Instance
|
|
||||||
public typedocDirectory: string;
|
|
||||||
constructor(dirPathArg) {
|
|
||||||
this.typedocDirectory = dirPathArg;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async compile() {
|
|
||||||
await this.smartshellInstance.exec(
|
|
||||||
`typedoc --module "commonjs" --target "ES2016" --out public/ ts/`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
import * as plugins from './tsdoc.plugins';
|
|
||||||
import * as paths from './tsdoc.paths';
|
|
||||||
import { logger } from './tsdoc.logging';
|
|
||||||
|
|
||||||
import { TypeDoc } from './tsdoc.classes.typedoc';
|
|
||||||
import { MkDocs } from './tsdoc.classes.mkdocs';
|
|
||||||
|
|
||||||
export const run = async () => {
|
|
||||||
const tsdocCli = new plugins.smartcli.Smartcli();
|
|
||||||
tsdocCli.addCommand('typedoc').subscribe(async argvArg => {
|
|
||||||
const typeDocInstance = new TypeDoc(paths.cwd);
|
|
||||||
await typeDocInstance.compile();
|
|
||||||
});
|
|
||||||
|
|
||||||
tsdocCli.addCommand('mkdocs').subscribe(async argvArg => {
|
|
||||||
await MkDocs.handleCommand(argvArg);
|
|
||||||
});
|
|
||||||
|
|
||||||
tsdocCli.standardTask().subscribe(async argvArg => {
|
|
||||||
logger.log('warn', `Auto detecting environment!`);
|
|
||||||
switch (true) {
|
|
||||||
case await TypeDoc.isTypeDocDir(paths.cwd):
|
|
||||||
logger.log('ok', `Detected TypeDoc compliant directory at ${paths.cwd}`);
|
|
||||||
tsdocCli.trigger('typedoc');
|
|
||||||
break;
|
|
||||||
case await MkDocs.isMkDocsDir(paths.cwd):
|
|
||||||
logger.log('ok', `Detected MkDocs compliant directory at ${paths.cwd}`);
|
|
||||||
tsdocCli.trigger('mkdocs');
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
logger.log('error', `Cannot determine docs format at ${paths.cwd}`);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
tsdocCli.startParse();
|
|
||||||
};
|
|
||||||
@@ -1,15 +0,0 @@
|
|||||||
import * as plugins from './tsdoc.plugins';
|
|
||||||
|
|
||||||
export const logger = new plugins.smartlog.Smartlog({
|
|
||||||
logContext: {
|
|
||||||
company: 'Some Company',
|
|
||||||
companyunit: 'Some CompanyUnit',
|
|
||||||
containerName: 'Some Containername',
|
|
||||||
environment: 'local',
|
|
||||||
runtime: 'node',
|
|
||||||
zone: 'gitzone'
|
|
||||||
},
|
|
||||||
minimumLogLevel: 'silly'
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.addLogDestination(new plugins.smartlogDestinationLocal.DestinationLocal());
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
import * as plugins from './tsdoc.plugins';
|
|
||||||
|
|
||||||
export const packageDir = plugins.path.join(__dirname, '../');
|
|
||||||
export const cwd = process.cwd();
|
|
||||||
export const binDir = plugins.path.join(packageDir, './node_modules/.bin');
|
|
||||||
@@ -1,18 +0,0 @@
|
|||||||
// node native
|
|
||||||
import * as path from 'path';
|
|
||||||
|
|
||||||
export { path };
|
|
||||||
|
|
||||||
// pushrocks scope
|
|
||||||
import * as smartcli from '@pushrocks/smartcli';
|
|
||||||
import * as smartfile from '@pushrocks/smartfile';
|
|
||||||
import * as smartlog from '@pushrocks/smartlog';
|
|
||||||
import * as smartlogDestinationLocal from '@pushrocks/smartlog-destination-local';
|
|
||||||
import * as smartshell from '@pushrocks/smartshell';
|
|
||||||
|
|
||||||
export { smartcli, smartfile, smartlog, smartlogDestinationLocal, smartshell };
|
|
||||||
|
|
||||||
// third party scope
|
|
||||||
import * as typedoc from 'typedoc';
|
|
||||||
|
|
||||||
export { typedoc };
|
|
||||||
14
tsconfig.json
Normal file
14
tsconfig.json
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"experimentalDecorators": true,
|
||||||
|
"useDefineForClassFields": false,
|
||||||
|
"target": "ES2022",
|
||||||
|
"module": "NodeNext",
|
||||||
|
"moduleResolution": "NodeNext",
|
||||||
|
"esModuleInterop": true,
|
||||||
|
"verbatimModuleSyntax": true
|
||||||
|
},
|
||||||
|
"exclude": [
|
||||||
|
"dist_*/**/*.d.ts"
|
||||||
|
]
|
||||||
|
}
|
||||||
17
tslint.json
17
tslint.json
@@ -1,17 +0,0 @@
|
|||||||
{
|
|
||||||
"extends": ["tslint:latest", "tslint-config-prettier"],
|
|
||||||
"rules": {
|
|
||||||
"semicolon": [true, "always"],
|
|
||||||
"no-console": false,
|
|
||||||
"ordered-imports": false,
|
|
||||||
"object-literal-sort-keys": false,
|
|
||||||
"member-ordering": {
|
|
||||||
"options":{
|
|
||||||
"order": [
|
|
||||||
"static-method"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"defaultSeverity": "warning"
|
|
||||||
}
|
|
||||||
Reference in New Issue
Block a user