Compare commits

...

142 Commits

Author SHA1 Message Date
fe5121ec9c 1.5.2
Some checks failed
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-09-07 07:54:04 +00:00
c084b20390 fix(package): Bump dependencies, refine test script and imports, and overhaul README and docs 2025-09-07 07:54:04 +00:00
6f024536a8 1.5.1
Some checks failed
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-08-16 11:20:39 +00:00
2405fb3370 fix(aidoc): Bump dependencies, add pnpm workspace config, and add AiDoc.stop() 2025-08-16 11:20:39 +00:00
8561940b8c 1.5.0
Some checks failed
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-05-14 11:27:38 +00:00
ab273ea75c feat(docs): Update project metadata and documentation to reflect comprehensive AI-enhanced features and improved installation and usage instructions 2025-05-14 11:27:38 +00:00
620737566f 1.4.5
Some checks failed
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-05-13 21:20:10 +00:00
23453bf16b fix(dependencies): Upgrade various dependency versions and update package manager configuration 2025-05-13 21:20:10 +00:00
84947cfb80 1.4.4
Some checks failed
Default (tags) / security (push) Failing after 1s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-02-25 18:33:51 +00:00
1a9ac9091d fix(dependencies): Update dependencies to latest versions 2025-02-25 18:33:51 +00:00
88b93b8b83 1.4.3
Some checks failed
Default (tags) / security (push) Failing after 2s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-01-14 17:42:19 +01:00
77279a9135 fix(aidocs_classes): Improve readme generation instructions to ensure correct markdown formatting. 2025-01-14 17:42:19 +01:00
7426addbdd 1.4.2 2024-10-28 21:45:28 +01:00
58d060d729 fix(cli): Ensure async completion for aidoc readme and description generation 2024-10-28 21:45:28 +01:00
370cbfe6f3 1.4.1 2024-10-28 21:20:51 +01:00
2adb4e8cb0 fix(readme): Correct async call to getModuleSubDirs in readme generation. 2024-10-28 21:20:50 +01:00
e8608b1cae 1.4.0 2024-10-28 21:16:00 +01:00
33fa7fa337 feat(aidocs): Added support for building readmes for sub-modules in aidocs 2024-10-28 21:15:59 +01:00
2946bcaf49 1.3.12 2024-06-24 00:01:37 +02:00
d962e17c18 fix(aidocs): Fix changelog generation by handling leading newlines 2024-06-24 00:01:36 +02:00
a22c400355 1.3.11 2024-06-23 23:59:44 +02:00
08b7305ef0 fix(core): Fixed new changelog formatting issue to retain consistent spacing. 2024-06-23 23:59:43 +02:00
d7b462fda9 1.3.10 2024-06-23 23:58:32 +02:00
01e6c15626 fix(aidocs_classes): Fix changelog format to remove extra newline 2024-06-23 23:58:31 +02:00
94a066247f 1.3.9 2024-06-23 23:50:51 +02:00
7de157ccb3 fix(aidoc): Fix changelog generation by properly stripping markdown code fences 2024-06-23 23:50:51 +02:00
d783965b25 1.3.8 2024-06-23 23:29:37 +02:00
07f1413d5e fix(changelog): Fix changelog generation by properly stripping markdown code fences 2024-06-23 23:29:36 +02:00
d7bf45f6b5 1.3.7 2024-06-23 23:24:09 +02:00
3eb64bcb5d fix(aidoc): Update to include package-lock.json in uncommitted changes check 2024-06-23 23:24:09 +02:00
e24a027fdd 1.3.6 2024-06-23 23:20:02 +02:00
3f451cfcb1 fix(commit): Fixed issue with retrieving uncommitted diffs in git repository 2024-06-23 23:20:01 +02:00
e355c51c8d 1.3.5 2024-06-23 23:05:47 +02:00
b0fcaba2c3 fix(aidocs_classes): Refactor and enhance changelog formatting 2024-06-23 23:05:47 +02:00
4ea205e11b 1.3.4 2024-06-23 22:48:06 +02:00
f819e7b521 fix(aidocs_classes): Fix changelog formatting issue in commit class 2024-06-23 22:48:05 +02:00
d4903f32f0 1.3.3 2024-06-23 22:47:28 +02:00
34102a2544 fix(aidocs_classes): Fix minor bugs and update dependencies in aidocs_classes 2024-06-23 22:47:27 +02:00
5e2171dbfd 1.3.2 2024-06-23 19:55:40 +02:00
70d4af653a fix(aidocs_classes): Fix typo in INextCommitObject interface and update date format in changelog generation. 2024-06-23 19:55:39 +02:00
06f6fdef98 1.3.1 2024-06-23 19:47:44 +02:00
b6fb7bf029 fix(aidocs_classes): Fix typo in INextCommitObject interface 2024-06-23 19:47:43 +02:00
4c83725120 1.3.0 2024-06-23 18:38:35 +02:00
a060cd1a03 feat(core): Added smarttime dependency and improved changelog generation 2024-06-23 18:38:34 +02:00
e8372effc7 1.2.4 2024-06-23 17:37:20 +02:00
571249705e fix(logging): Refactor logger initialization to use commitinfo data 2024-06-23 17:37:19 +02:00
927cd961fd 1.2.3 2024-06-23 17:36:03 +02:00
63b4fcc232 fix(aidocs): Fix bug in AiDoc class causing undefined token handling 2024-06-23 17:36:02 +02:00
4188ed7f24 1.2.2 2024-06-23 16:46:59 +02:00
1feddc6e85 fix(aidocs): Fix bug in AiDoc class causing undefined token handling 2024-06-23 16:46:58 +02:00
499baebc18 1.2.1 2024-06-23 16:46:11 +02:00
01fc0d0c6e fix(core): Fixed usage of plugins in project context and readme generation 2024-06-23 16:46:10 +02:00
b6c9cea5d1 1.2.0 2024-06-23 16:45:21 +02:00
a949039192 feat(aidocs_classes): Enhance changelog generation by supporting complete generation in the absence of previous changelog files 2024-06-23 16:45:20 +02:00
11bde9d756 1.1.42 2024-06-23 16:43:24 +02:00
eac26521c6 fix(aidoc_classes): Improve commit message generation by handling empty diffs and updating changelog instructions 2024-06-23 16:43:23 +02:00
e1323569f5 1.1.41 2024-06-23 13:49:15 +02:00
41e4bd6689 fix(aidoc_classes): Improve commit message generation by handling empty diffs and updating changelog instructions 2024-06-23 13:49:14 +02:00
164a58ec59 1.1.40 2024-06-23 13:04:48 +02:00
e1c0f82fe8 fix(core): update 2024-06-23 13:04:47 +02:00
8a0046818b 1.1.39 2024-06-23 12:38:58 +02:00
97fa9db32f fix(core): update 2024-06-23 12:38:58 +02:00
d61de9b615 1.1.38 2024-06-23 12:27:27 +02:00
fba54035ea fix(core): update 2024-06-23 12:27:26 +02:00
9a3d8588a8 1.1.37 2024-06-23 12:20:07 +02:00
eb8f8fa70a fix(core): update 2024-06-23 12:20:06 +02:00
afe7b5e99e 1.1.36 2024-06-23 12:11:07 +02:00
e074562362 fix(core): update 2024-06-23 12:11:06 +02:00
240d6bb314 1.1.35 2024-06-23 12:03:26 +02:00
2d0839a1da fix(core): update 2024-06-23 12:03:25 +02:00
9f250ae2b3 1.1.34 2024-06-23 11:59:39 +02:00
1223bb8567 fix(core): update 2024-06-23 11:59:38 +02:00
9395cfc166 1.1.33 2024-06-22 21:21:53 +02:00
3b4c6bd97f fix(core): update 2024-06-22 21:21:52 +02:00
5d2c9e6158 1.1.32 2024-06-22 19:13:58 +02:00
89977038ec fix(core): update 2024-06-22 19:13:57 +02:00
b753c206b0 1.1.31 2024-06-22 13:20:56 +02:00
1965bd9b47 fix(core): update 2024-06-22 13:20:55 +02:00
138d71e8c5 1.1.30 2024-06-22 13:11:23 +02:00
15397e8609 fix(core): update 2024-06-22 13:11:22 +02:00
1489420e47 1.1.29 2024-05-17 17:41:50 +02:00
5e3b122b59 fix(core): update 2024-05-17 17:41:49 +02:00
02fa9215d3 1.1.28 2024-05-17 17:38:35 +02:00
32f12c67cf fix(core): update 2024-05-17 17:38:35 +02:00
be53225bb1 1.1.27 2024-04-20 23:14:14 +02:00
a5db530879 fix(core): update 2024-04-20 23:14:13 +02:00
c5b07c2504 1.1.26 2024-04-14 02:23:56 +02:00
1bd215d18d fix(core): update 2024-04-14 02:23:56 +02:00
e5a348f57c 1.1.25 2024-04-14 02:11:39 +02:00
d243880d55 fix(core): update 2024-04-14 02:11:38 +02:00
c1bd85fc58 1.1.24 2024-04-14 00:59:25 +02:00
b81220b2ba fix(core): update 2024-04-14 00:59:24 +02:00
ca26d9e98d 1.1.23 2024-04-14 00:52:04 +02:00
61b6161470 fix(core): update 2024-04-14 00:52:04 +02:00
463183bd3a 1.1.22 2024-04-14 00:47:50 +02:00
069a74d2b4 fix(core): update 2024-04-14 00:47:49 +02:00
87c1ae53b3 1.1.21 2024-04-14 00:40:57 +02:00
774aea55ff fix(core): update 2024-04-14 00:40:57 +02:00
ee7038e0d7 1.1.20 2024-04-13 16:22:34 +02:00
7c3bae4c6e fix(core): update 2024-04-13 16:22:33 +02:00
69d59e02f8 1.1.19 2024-04-13 16:19:14 +02:00
b4b6797fdf fix(core): update 2024-04-13 16:19:14 +02:00
4bbb154c4f 1.1.18 2024-04-12 15:35:10 +02:00
eec33e29d3 fix(core): update 2024-04-12 15:35:09 +02:00
c33a7d37ee 1.1.17 2024-04-12 15:28:55 +02:00
084b321e6a fix(core): update 2024-04-12 15:28:55 +02:00
cf1cfbd647 1.1.16 2024-04-12 15:07:56 +02:00
489349e45a fix(core): update 2024-04-12 15:07:56 +02:00
c0c627fedb 1.1.15 2024-04-03 13:34:26 +02:00
8d4b278a5d fix(core): update 2024-04-03 13:34:26 +02:00
a0969912eb 1.1.14 2024-03-31 15:09:31 +02:00
39d64ffcf3 fix(core): update 2024-03-31 15:09:30 +02:00
529297bd09 1.1.13 2022-09-16 08:24:42 +02:00
4c16cb9c3e fix(core): update 2022-09-16 08:24:41 +02:00
3a6cdf5fb5 1.1.12 2022-09-16 08:22:57 +02:00
2460c89151 fix(core): update 2022-09-16 08:22:57 +02:00
3dae706a67 1.1.11 2022-09-16 08:17:29 +02:00
c150052380 fix(core): update 2022-09-16 08:17:28 +02:00
1d00a95885 1.1.10 2022-06-07 18:01:35 +02:00
d9bfba1b5f fix(core): update 2022-06-07 18:01:34 +02:00
c56db7d1d0 1.1.9 2022-06-07 17:54:00 +02:00
a2bcd1a1c5 fix(core): update 2022-06-07 17:54:00 +02:00
795ce9b014 1.1.8 2021-03-08 01:26:43 +00:00
9a84009f47 fix(core): update 2021-03-08 01:26:42 +00:00
6efe00abd9 1.1.7 2021-03-08 01:06:19 +00:00
d81b9dd213 fix(core): update 2021-03-08 01:06:18 +00:00
751a5b8630 1.1.6 2021-03-06 19:21:14 +00:00
3c9e421351 fix(core): update 2021-03-06 19:21:13 +00:00
fe05144a56 1.1.5 2021-01-29 20:50:18 +00:00
ad7035e5e3 fix(core): update 2021-01-29 20:50:18 +00:00
49601f3bac 1.1.4 2020-11-27 12:55:24 +00:00
6c13622b33 fix(core): update 2020-11-27 12:55:23 +00:00
9021e9ae39 1.1.3 2020-11-27 12:37:24 +00:00
7289b77398 fix(core): update 2020-11-27 12:37:23 +00:00
ae90b8297f 1.1.2 2020-11-27 12:02:58 +00:00
d75a65ee46 fix(core): update 2020-11-27 12:02:57 +00:00
c28ff5212e 1.1.1 2020-11-24 20:27:51 +00:00
97bf1e3990 fix(core): update 2020-11-24 20:27:51 +00:00
769a22057f 1.0.22 2020-11-24 20:24:31 +00:00
b5910b6557 fix(core): update 2020-11-24 20:24:30 +00:00
f4ea7f0d0a 1.0.21 2019-09-23 16:26:38 +02:00
50eff3fbd5 fix(core): update 2019-09-23 16:26:38 +02:00
50 changed files with 15070 additions and 2207 deletions

View File

@@ -0,0 +1,66 @@
name: Default (not tags)
on:
push:
tags-ignore:
- '**'
env:
IMAGE: code.foss.global/hosttoday/ht-docker-node:npmci
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
jobs:
security:
runs-on: ubuntu-latest
continue-on-error: true
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Install pnpm and npmci
run: |
pnpm install -g pnpm
pnpm install -g @ship.zone/npmci
- name: Run npm prepare
run: npmci npm prepare
- name: Audit production dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --prod
continue-on-error: true
- name: Audit development dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --dev
continue-on-error: true
test:
if: ${{ always() }}
needs: security
runs-on: ubuntu-latest
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Test stable
run: |
npmci node install stable
npmci npm install
npmci npm test
- name: Test build
run: |
npmci node install stable
npmci npm install
npmci npm build

View File

@@ -0,0 +1,124 @@
name: Default (tags)
on:
push:
tags:
- '*'
env:
IMAGE: code.foss.global/hosttoday/ht-docker-node:npmci
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
jobs:
security:
runs-on: ubuntu-latest
continue-on-error: true
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @ship.zone/npmci
npmci npm prepare
- name: Audit production dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --prod
continue-on-error: true
- name: Audit development dependencies
run: |
npmci command npm config set registry https://registry.npmjs.org
npmci command pnpm audit --audit-level=high --dev
continue-on-error: true
test:
if: ${{ always() }}
needs: security
runs-on: ubuntu-latest
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @ship.zone/npmci
npmci npm prepare
- name: Test stable
run: |
npmci node install stable
npmci npm install
npmci npm test
- name: Test build
run: |
npmci node install stable
npmci npm install
npmci npm build
release:
needs: test
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
runs-on: ubuntu-latest
container:
image: ${{ env.IMAGE }}
steps:
- uses: actions/checkout@v3
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @ship.zone/npmci
npmci npm prepare
- name: Release
run: |
npmci node install stable
npmci npm publish
metadata:
needs: test
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
runs-on: ubuntu-latest
container:
image: ${{ env.IMAGE }}
continue-on-error: true
steps:
- uses: actions/checkout@v3
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @ship.zone/npmci
npmci npm prepare
- name: Code quality
run: |
npmci command npm install -g typescript
npmci npm install
- name: Trigger
run: npmci trigger
- name: Build docs and upload artifacts
run: |
npmci node install stable
npmci npm install
pnpm install -g @git.zone/tsdoc
npmci command tsdoc
continue-on-error: true

7
.gitignore vendored
View File

@@ -15,8 +15,7 @@ node_modules/
# builds
dist/
dist_web/
dist_serve/
dist_ts_web/
dist_*/
# custom
# custom
**/.claude/settings.local.json

View File

@@ -1,125 +0,0 @@
# gitzone standard
image: hosttoday/ht-docker-node:npmci
cache:
paths:
- .npmci_cache/
key: "$CI_BUILD_STAGE"
stages:
- security
- test
- release
- metadata
# ====================
# security stage
# ====================
mirror:
stage: security
script:
- npmci git mirror
tags:
- docker
- notpriv
snyk:
stage: security
script:
- npmci npm prepare
- npmci command npm install -g snyk
- npmci command npm install --ignore-scripts
- npmci command snyk test
tags:
- docker
- notpriv
# ====================
# test stage
# ====================
testLTS:
stage: test
script:
- npmci npm prepare
- npmci node install lts
- npmci npm install
- npmci npm test
coverage: /\d+.?\d+?\%\s*coverage/
tags:
- docker
- notpriv
testSTABLE:
stage: test
script:
- npmci npm prepare
- npmci node install stable
- npmci npm install
- npmci npm test
coverage: /\d+.?\d+?\%\s*coverage/
tags:
- docker
- notpriv
release:
stage: release
script:
- npmci node install stable
- npmci npm publish
only:
- tags
tags:
- docker
- notpriv
# ====================
# metadata stage
# ====================
codequality:
stage: metadata
image: docker:stable
allow_failure: true
services:
- docker:stable-dind
script:
- export SP_VERSION=$(echo "$CI_SERVER_VERSION" | sed 's/^\([0-9]*\)\.\([0-9]*\).*/\1-\2-stable/')
- docker run
--env SOURCE_CODE="$PWD"
--volume "$PWD":/code
--volume /var/run/docker.sock:/var/run/docker.sock
"registry.gitlab.com/gitlab-org/security-products/codequality:$SP_VERSION" /code
artifacts:
paths: [codeclimate.json]
tags:
- docker
- priv
trigger:
stage: metadata
script:
- npmci trigger
only:
- tags
tags:
- docker
- notpriv
pages:
image: hosttoday/ht-docker-node:npmci
stage: metadata
script:
- npmci command npm install -g @gitzone/tsdoc
- npmci npm prepare
- npmci npm install
- npmci command tsdoc
tags:
- docker
- notpriv
only:
- tags
artifacts:
expire_in: 1 week
paths:
- public
allow_failure: true

13
.snyk
View File

@@ -1,13 +0,0 @@
# Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities.
version: v1.13.5
# ignores vulnerabilities until expiry date; change duration by modifying expiry date
ignore:
SNYK-JS-MARKED-174116:
- typedoc > marked:
reason: None given
expires: '2019-06-13T06:50:33.594Z'
'npm:shelljs:20140723':
- typedoc > shelljs:
reason: None given
expires: '2019-06-13T06:50:33.594Z'
patch: {}

11
.vscode/launch.json vendored Normal file
View File

@@ -0,0 +1,11 @@
{
"version": "0.2.0",
"configurations": [
{
"command": "npm test",
"name": "Run npm test",
"request": "launch",
"type": "node-terminal"
}
]
}

26
.vscode/settings.json vendored Normal file
View File

@@ -0,0 +1,26 @@
{
"json.schemas": [
{
"fileMatch": ["/npmextra.json"],
"schema": {
"type": "object",
"properties": {
"npmci": {
"type": "object",
"description": "settings for npmci"
},
"gitzone": {
"type": "object",
"description": "settings for gitzone",
"properties": {
"projectType": {
"type": "string",
"enum": ["website", "element", "service", "npm", "wcc"]
}
}
}
}
}
}
]
}

0
assets/.gitkeep Normal file
View File

View File

@@ -1,7 +0,0 @@
{
"compilerOptions": {
"target": "es2017",
"module": "commonjs",
"esModuleInterop": true
}
}

146
changelog.md Normal file
View File

@@ -0,0 +1,146 @@
# Changelog
## 2025-09-07 - 1.5.2 - fix(package)
Bump dependencies, refine test script and imports, and overhaul README and docs
- Bumped multiple dependencies and devDependencies (including @git.zone/tspublish, @git.zone/tsbuild, @git.zone/tstest, @push.rocks/npmextra, @push.rocks/qenv, @push.rocks/smartfile, @push.rocks/smartlog, @push.rocks/smartshell, gpt-tokenizer, typedoc, etc.).
- Updated test script to run tstest with verbose, logfile and increased timeout; adjusted testCli script invocation.
- Fixed test import in test/test.aidoc.nonci.ts to use @git.zone/tstest tapbundle.
- Large README rewrite: reorganized and expanded content, added quick start, CLI commands, examples, configuration, troubleshooting and usage sections.
- Minor clarification added to commit prompt in ts/aidocs_classes/commit.ts (text cleanup and guidance).
## 2025-08-16 - 1.5.1 - fix(aidoc)
Bump dependencies, add pnpm workspace config, and add AiDoc.stop()
- Bumped multiple dependencies and devDependencies in package.json (notable upgrades: @git.zone/tsbuild, @git.zone/tspublish, @push.rocks/npmextra, @push.rocks/qenv, @push.rocks/smartai, @push.rocks/smartfile, @push.rocks/smartgit, @push.rocks/smartlog, @push.rocks/smartpath, @push.rocks/smartshell, typedoc, typescript).
- Added pnpm-workspace.yaml with onlyBuiltDependencies (esbuild, mongodb-memory-server, puppeteer, sharp).
- Added AiDoc.stop() to properly stop the OpenAI provider (resource/client shutdown).
- Updated packageManager field in package.json to a newer pnpm version/hash.
## 2025-05-14 - 1.5.0 - feat(docs)
Update project metadata and documentation to reflect comprehensive AI-enhanced features and improved installation and usage instructions
- Revised descriptions in package.json and npmextra.json to emphasize comprehensive documentation capabilities
- Expanded README with detailed installation options and extended usage examples for both CLI and API-like integrations
- Added new dependency (gpt-tokenizer) to support token counting for AI context building
- Adjusted keywords to better reflect project functionalities such as commit message automation and context trimming
## 2025-05-13 - 1.4.5 - fix(dependencies)
Upgrade various dependency versions and update package manager configuration
- Bump @git.zone/tsbuild from ^2.1.80 to ^2.3.2
- Upgrade @push.rocks/tapbundle from ^5.0.23 to ^6.0.3
- Update @types/node from ^22.8.1 to ^22.15.17
- Bump @push.rocks/smartai from ^0.4.2 to ^0.5.4
- Upgrade @push.rocks/smartlog from ^3.0.7 to ^3.0.9
- Update typedoc from ^0.27.9 to ^0.28.4
- Bump typescript from ^5.5.2 to ^5.8.3
- Add packageManager field with pnpm@10.10.0 specification
## 2025-02-25 - 1.4.4 - fix(dependencies)
Update dependencies to latest versions
- Updated '@push.rocks/smartai' from '^0.0.17' to '^0.4.2'
- Updated 'typedoc' from '^0.26.1' to '^0.27.9'
## 2025-01-14 - 1.4.3 - fix(aidocs_classes)
Improve readme generation instructions to ensure correct markdown formatting.
- Added guidance to avoid using backticks at the beginning and end of readme generation to prevent markdown issues.
- Clarified that the output is directly written to readme.md and backticks should only be used for code blocks.
## 2024-10-28 - 1.4.2 - fix(cli)
Ensure async completion for aidoc readme and description generation
- Added await statements for asynchronous methods buildReadme and buildDescription in the aidoc command.
## 2024-10-28 - 1.4.1 - fix(readme)
Correct async call to getModuleSubDirs in readme generation.
- Fixed an issue with asynchronous handling in readme generation for submodules.
- Ensured that getModuleSubDirs function is called with await to handle promises properly.
## 2024-10-28 - 1.4.0 - feat(aidocs)
Added support for building readmes for sub-modules in aidocs
- Updated the `Readme` class to handle monorepo projects by generating readmes for sub-modules.
- Integrated `tspublish` to identify sub-modules for readme generation.
## 2024-06-24 - 1.3.12 - fix(aidocs)
Fix changelog generation by handling leading newlines
- Fixed handling of leading newlines in the changelog to ensure proper formatting.
## 2024-06-23 - 1.3.11 - fix(core)
Fixed new changelog formatting issue to retain consistent spacing.
- Adjusted the new changelog generation to ensure consistent spacing for improved readability.
## 2024-06-23 - 1.3.10 - fix(aidocs_classes)
Fix changelog format to remove extra newline
- Updated `ts/aidocs_classes/commit.ts` to fix the changelog format.
## 2024-06-23 - 1.3.9 - fix(aidoc)
Fix changelog generation by properly stripping markdown code fences
- Corrected the changelog generation code to ensure markdown code fences are properly stripped.
## 2024-06-23 - 1.3.8 - fix(changelog)
Fix changelog generation by properly stripping markdown code fences
- Corrected the changelog generation code to ensure markdown code fences are properly stripped.
## 2024-06-23 - 1.3.7 - fix(aidoc)
Update to include package-lock.json in uncommitted changes check
- Modified the getUncommittedDiff method call in commit.ts to include package-lock.json along with pnpm-lock.yaml
## 2024-06-23 - 1.3.6 - fix(commit)
Fixed issue with retrieving uncommitted diffs in git repository
- Revised logic to correctly handle uncommitted changes by using an array for `getUncommittedDiff` method
- Ensured proper handling and representation of uncommitted changes in the output
## 2024-06-23 - 1.3.5 - fix(aidocs_classes)
Refactor and enhance changelog formatting
- Updated the `commit.ts` file to improve the changelog formatting and ensure consistency.
- Enhanced the changelog instructions to include summarizing messages for omitted commits.
- Removed unnecessary console logging in `projectcontext.ts`.
```markdown
## 2024-06-23 - 1.3.3 - fix(aidocs_classes)
Fix changelog formatting issue in commit class
## 2024-06-23 - 1.3.2 - fix(aidocs_classes)
Fix minor bugs and update dependencies in aidocs_classes
## 2024-06-23 - 1.3.1 - fix(aidocs_classes)
Fix typo in INextCommitObject interface and update date format in changelog generation.
## 2024-06-23 - 1.3.0 - fix(aidocs_classes)
Fix typo in INextCommitObject interface
## 2024-06-23 - 1.2.4 - feat(core)
Added smarttime dependency and improved changelog generation
## 2024-06-23 - 1.2.3 - fix(logging)
Refactor logger initialization to use commitinfo data
## 2024-06-23 - 1.2.2 - fix(aidocs)
Fix bug in AiDoc class causing undefined token handling
## 2024-06-23 - 1.2.0 - fix(core)
Fixed usage of plugins in project context and readme generation
## 2024-06-23 - 1.1.42 - feat(aidocs_classes)
Enhance changelog generation by supporting complete generation in the absence of previous changelog files
## 2024-06-23 - 1.1.41 - fix(aidocs_classes)
Improve commit message generation by handling empty diffs and updating changelog instructions
```

4
cli.child.ts Normal file
View File

@@ -0,0 +1,4 @@
#!/usr/bin/env node
process.env.CLI_CALL = 'true';
import * as cliTool from './ts/index.js';
cliTool.runCli();

3
cli.js
View File

@@ -1,3 +1,4 @@
#!/usr/bin/env node
process.env.CLI_CALL = 'true';
require('./dist/index');
const cliTool = await import('./dist_ts/index.js');
cliTool.runCli();

View File

@@ -1,4 +1,5 @@
#!/usr/bin/env node
process.env.CLI_CALL = 'true';
require('@gitzone/tsrun');
require('./ts/index');
import * as tsrun from '@git.zone/tsrun';
tsrun.runPath('./cli.child.js', import.meta.url);

View File

@@ -1,17 +1,35 @@
{
"gitzone": {
"projectType": "npm",
"module": {
"githost": "gitlab.com",
"gitscope": "gitzone",
"gitrepo": "tsdoc",
"shortDescription": "a tool for better documentation",
"npmPackagename": "@gitzone/tsdoc",
"npmPackagename": "@git.zone/tsdoc",
"license": "MIT",
"projectDomain": "git.zone"
"projectDomain": "git.zone",
"description": "A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.",
"keywords": [
"TypeScript",
"documentation",
"AI",
"CLI",
"README",
"TypeDoc",
"commit messages",
"automation",
"code analysis",
"context trimming",
"developer tools"
]
}
},
"npmci": {
"npmGlobalTools": [],
"npmAccessLevel": "public"
},
"tsdoc": {
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
}
}

1797
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,47 +1,84 @@
{
"name": "@gitzone/tsdoc",
"version": "1.0.20",
"name": "@git.zone/tsdoc",
"version": "1.5.2",
"private": false,
"description": "a tool for better documentation",
"main": "dist/index.js",
"typings": "dist/index.d.ts",
"author": "Lossless GmbH",
"description": "A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.",
"type": "module",
"exports": {
".": "./dist_ts/index.js"
},
"author": "Task Venture Capital GmbH",
"license": "MIT",
"bin": {
"tsdoc": "cli.js"
},
"scripts": {
"test": "(tstest test/) && (node ./cli.ts.js) && rm -rf public/",
"build": "(tsbuild)",
"buildMkdocs": "(cd mkdocs/originalrepo && docker rmi -f mkdocs && docker build -t mkdocs .)",
"format": "(gitzone format)"
"test": "(tstest test/ --verbose --logfile --timeout 600) && npm run testCli",
"testCli": "(node ./cli.ts.js) && (node ./cli.ts.js aidocs)",
"build": "(tsbuild --web --allowimplicitany)",
"buildDocs": "tsdoc"
},
"devDependencies": {
"@gitzone/tsbuild": "^2.0.22",
"@gitzone/tstest": "^1.0.24",
"@pushrocks/tapbundle": "^3.0.11",
"@types/node": "^12.6.8",
"tslint": "^5.18.0",
"tslint-config-prettier": "^1.15.0"
"@git.zone/tsbuild": "^2.6.8",
"@git.zone/tsrun": "^1.2.46",
"@git.zone/tstest": "^2.3.6",
"@types/node": "^22.15.17"
},
"dependencies": {
"@pushrocks/early": "^3.0.3",
"@pushrocks/smartcli": "^3.0.7",
"@pushrocks/smartfile": "^7.0.4",
"@pushrocks/smartlog": "^2.0.19",
"@pushrocks/smartlog-destination-local": "^8.0.2",
"@pushrocks/smartshell": "^2.0.23",
"typedoc": "^0.14.2",
"typescript": "^3.5.3"
"@git.zone/tspublish": "^1.10.3",
"@push.rocks/early": "^4.0.3",
"@push.rocks/npmextra": "^5.3.3",
"@push.rocks/qenv": "^6.1.3",
"@push.rocks/smartai": "^0.5.11",
"@push.rocks/smartcli": "^4.0.11",
"@push.rocks/smartdelay": "^3.0.5",
"@push.rocks/smartfile": "^11.2.7",
"@push.rocks/smartgit": "^3.2.1",
"@push.rocks/smartinteract": "^2.0.15",
"@push.rocks/smartlog": "^3.1.9",
"@push.rocks/smartlog-destination-local": "^9.0.2",
"@push.rocks/smartpath": "^6.0.0",
"@push.rocks/smartshell": "^3.3.0",
"@push.rocks/smarttime": "^4.0.6",
"gpt-tokenizer": "^3.0.1",
"typedoc": "^0.28.12",
"typescript": "^5.9.2"
},
"files": [
"ts/*",
"ts_web/*",
"dist/*",
"dist_web/*",
"assets/*",
"ts/**/*",
"ts_web/**/*",
"dist/**/*",
"dist_*/**/*",
"dist_ts/**/*",
"dist_ts_web/**/*",
"assets/**/*",
"cli.js",
"npmextra.json",
"readme.md"
]
],
"browserslist": [
"last 1 chrome versions"
],
"keywords": [
"TypeScript",
"documentation",
"AI",
"CLI",
"README",
"TypeDoc",
"commit messages",
"automation",
"code analysis",
"context trimming",
"developer tools"
],
"repository": {
"type": "git",
"url": "git+https://gitlab.com/gitzone/tsdoc.git"
},
"bugs": {
"url": "https://gitlab.com/gitzone/tsdoc/issues"
},
"homepage": "https://gitlab.com/gitzone/tsdoc#readme",
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748"
}

11893
pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

5
pnpm-workspace.yaml Normal file
View File

@@ -0,0 +1,5 @@
onlyBuiltDependencies:
- esbuild
- mongodb-memory-server
- puppeteer
- sharp

5
readme.hints.md Normal file
View File

@@ -0,0 +1,5 @@
* module needs to be installed globally
* alternatively can be used through npx, if installed locally
* cli parameters are concluded from ./ts/cli.ts
* this module is not intended for API use.
* Read carefully through the TypeScript files. Don't make stuff up.

315
readme.md
View File

@@ -1,26 +1,301 @@
# @gitzone/tsdoc
a tool for better documentation
# @git.zone/tsdoc 🚀
**AI-Powered Documentation for TypeScript Projects**
## Availabililty and Links
* [npmjs.org (npm package)](https://www.npmjs.com/package/@gitzone/tsdoc)
* [gitlab.com (source)](https://gitlab.com/gitzone/tsdoc)
* [github.com (source mirror)](https://github.com/gitzone/tsdoc)
* [docs (typedoc)](https://gitzone.gitlab.io/tsdoc/)
> Stop writing documentation. Let AI understand your code and do it for you.
## Status for master
[![build status](https://gitlab.com/gitzone/tsdoc/badges/master/build.svg)](https://gitlab.com/gitzone/tsdoc/commits/master)
[![coverage report](https://gitlab.com/gitzone/tsdoc/badges/master/coverage.svg)](https://gitlab.com/gitzone/tsdoc/commits/master)
[![npm downloads per month](https://img.shields.io/npm/dm/@gitzone/tsdoc.svg)](https://www.npmjs.com/package/@gitzone/tsdoc)
[![Known Vulnerabilities](https://snyk.io/test/npm/@gitzone/tsdoc/badge.svg)](https://snyk.io/test/npm/@gitzone/tsdoc)
[![TypeScript](https://img.shields.io/badge/TypeScript->=%203.x-blue.svg)](https://nodejs.org/dist/latest-v10.x/docs/api/)
[![node](https://img.shields.io/badge/node->=%2010.x.x-blue.svg)](https://nodejs.org/dist/latest-v10.x/docs/api/)
[![JavaScript Style Guide](https://img.shields.io/badge/code%20style-prettier-ff69b4.svg)](https://prettier.io/)
## What is tsdoc?
## Usage
`@git.zone/tsdoc` is a next-generation documentation tool that combines traditional TypeDoc generation with cutting-edge AI to create comprehensive, intelligent documentation for your TypeScript projects. It reads your code, understands it, and writes documentation that actually makes sense.
For further information read the linked docs at the top of this readme.
### ✨ Key Features
> MIT licensed | **©** [Lossless GmbH](https://lossless.gmbh)
| By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy.html)
- **🤖 AI-Enhanced Documentation** - Leverages GPT-5 and other models to generate contextual READMEs
- **📚 TypeDoc Integration** - Classic API documentation generation when you need it
- **💬 Smart Commit Messages** - AI analyzes your changes and suggests meaningful commit messages
- **🎯 Context Optimization** - Intelligent token management for efficient AI processing
- **📦 Zero Config** - Works out of the box with sensible defaults
- **🔧 Highly Configurable** - Customize every aspect when needed
[![repo-footer](https://gitzone.gitlab.io/assets/repo-footer.svg)](https://maintainedby.lossless.com)
## Installation
```bash
# Global installation (recommended)
npm install -g @git.zone/tsdoc
# Or use with npx
npx @git.zone/tsdoc
```
## Quick Start
### Generate AI-Powered Documentation
```bash
# In your project root
tsdoc aidoc
```
That's it! tsdoc will analyze your entire codebase and generate:
- A comprehensive README.md
- Updated package.json description and keywords
- Smart documentation based on your actual code
### Generate Traditional TypeDoc
```bash
tsdoc typedoc --publicSubdir docs
```
### Get Smart Commit Messages
```bash
tsdoc commit
```
## CLI Commands
| Command | Description |
|---------|-------------|
| `tsdoc` | Auto-detects and runs appropriate documentation |
| `tsdoc aidoc` | Generate AI-enhanced documentation |
| `tsdoc typedoc` | Generate TypeDoc documentation |
| `tsdoc commit` | Generate smart commit message |
| `tsdoc tokens` | Analyze token usage for AI context |
| `tsdoc context` | Display context information |
### Token Analysis
Understanding token usage helps optimize AI costs:
```bash
# Show token count for current project
tsdoc tokens
# Show detailed stats for all task types
tsdoc tokens --all
# Test with trimmed context
tsdoc tokens --trim
```
## Programmatic Usage
### Generate Documentation Programmatically
```typescript
import { AiDoc } from '@git.zone/tsdoc';
const generateDocs = async () => {
const aiDoc = new AiDoc({ OPENAI_TOKEN: 'your-token' });
await aiDoc.start();
// Generate README
await aiDoc.buildReadme('./');
// Update package.json description
await aiDoc.buildDescription('./');
// Get smart commit message
const commit = await aiDoc.buildNextCommitObject('./');
console.log(commit.recommendedNextVersionMessage);
};
```
### TypeDoc Generation
```typescript
import { TypeDoc } from '@git.zone/tsdoc';
const typeDoc = new TypeDoc(process.cwd());
await typeDoc.compile({ publicSubdir: 'docs' });
```
### Context Management
Control how tsdoc processes your codebase:
```typescript
import { EnhancedContext } from '@git.zone/tsdoc';
const context = new EnhancedContext('./');
await context.initialize();
// Set token budget
context.setTokenBudget(100000);
// Choose context mode
context.setContextMode('trimmed'); // 'full' | 'trimmed' | 'summarized'
// Build optimized context
const result = await context.buildContext('readme');
console.log(`Tokens used: ${result.tokenCount}`);
```
## Configuration
Configure tsdoc via `npmextra.json`:
```json
{
"tsdoc": {
"context": {
"maxTokens": 150000,
"contextMode": "trimmed",
"includePatterns": ["**/*.ts"],
"excludePatterns": ["**/*.test.ts"],
"trimming": {
"removeImplementations": true,
"preserveJSDoc": true,
"removeComments": true
}
}
}
}
```
## How It Works
1. **🔍 Code Analysis** - Scans your TypeScript files, package.json, and existing documentation
2. **✂️ Smart Trimming** - Optimizes code context to fit within AI token limits
3. **🧠 AI Processing** - Sends optimized context to AI for analysis
4. **📝 Generation** - Creates documentation that understands your code's purpose and structure
### Context Optimization
tsdoc employs sophisticated strategies to maximize the value of every token:
- **Intelligent Trimming** - Removes implementation details while preserving signatures
- **Priority Sorting** - Most important files first
- **Smart Summarization** - Condenses large files while maintaining context
- **Token Budgeting** - Ensures optimal use of AI context windows
## Environment Variables
| Variable | Description |
|----------|-------------|
| `OPENAI_TOKEN` | Your OpenAI API key for AI features |
## Use Cases
### 🚀 Continuous Integration
```yaml
# .github/workflows/docs.yml
- name: Generate Documentation
run: |
npm install -g @git.zone/tsdoc
tsdoc aidoc
```
### 🔄 Pre-Commit Hooks
```bash
# Generate commit message before each commit
tsdoc commit
```
### 📦 Package Publishing
```javascript
// Ensure docs are updated before publish
{
"scripts": {
"prepublishOnly": "tsdoc aidoc"
}
}
```
## Advanced Features
### Multi-Module Projects
tsdoc automatically detects and documents multi-module projects:
```typescript
const aiDoc = new AiDoc();
await aiDoc.start();
// Process main project
await aiDoc.buildReadme('./');
// Process submodules
for (const module of ['packages/core', 'packages/cli']) {
await aiDoc.buildReadme(module);
}
```
### Custom Context Building
Fine-tune what gets sent to AI:
```typescript
import { TaskContextFactory } from '@git.zone/tsdoc';
const factory = new TaskContextFactory('./');
await factory.initialize();
// Get optimized context for specific tasks
const readmeContext = await factory.getContext('readme');
const commitContext = await factory.getContext('commit');
```
## Performance
-**Fast** - Parallel file processing and smart caching
- 💾 **Efficient** - Minimal memory footprint with streaming
- 🎯 **Accurate** - Context optimization ensures AI gets the most relevant code
- 💰 **Cost-Effective** - Token optimization reduces AI API costs
## Requirements
- Node.js >= 18.0.0
- TypeScript project
- OpenAI API key (for AI features)
## Troubleshooting
### Token Limit Exceeded
If you hit token limits, try:
```bash
# Use trimmed mode
tsdoc aidoc --trim
# Check token usage
tsdoc tokens --all
```
### Missing API Key
Set your OpenAI key:
```bash
export OPENAI_TOKEN="your-key-here"
tsdoc aidoc
```
## Why tsdoc?
- **🎯 Actually Understands Your Code** - Not just parsing, but comprehension
- **⏱️ Saves Hours** - Generate complete documentation in seconds
- **🔄 Always Up-to-Date** - Regenerate documentation with every change
- **🎨 Beautiful Output** - Clean, professional documentation every time
- **🛠️ Developer-Friendly** - Built by developers, for developers
## License and Legal Information
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
### Trademarks
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
### Company Information
Task Venture Capital GmbH
Registered at District court Bremen HRB 35230 HB, Germany
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.

314
readme.plan.md Normal file
View File

@@ -0,0 +1,314 @@
# TSDocs Context Optimization Plan
## Problem Statement
For large TypeScript projects, the context generated for AI-based documentation creation becomes too large, potentially exceeding even o4-mini's 200K token limit. This affects the ability to effectively generate:
- Project documentation (README.md)
- API descriptions and keywords
- Commit messages and changelogs
Current implementation simply includes all TypeScript files and key project files, but lacks intelligent selection, prioritization, or content reduction mechanisms.
## Analysis of Approaches
### 1. Smart Content Selection
**Description:** Intelligently select only files that are necessary for the specific task being performed, using heuristic rules.
**Advantages:**
- Simple to implement
- Predictable behavior
- Can be fine-tuned for different operations
**Disadvantages:**
- Requires manual tuning of rules
- May miss important context in complex projects
- Static approach lacks adaptability
**Implementation Complexity:** Medium
### 2. File Prioritization
**Description:** Rank files by relevance using git history, file size, import/export analysis, and relationship to the current task.
**Advantages:**
- Adaptively includes the most relevant files first
- Maintains context for frequently changed or central files
- Can leverage git history for additional signals
**Disadvantages:**
- Complexity in determining accurate relevance scores
- Requires analyzing project structure
- May require scanning imports/exports for dependency analysis
**Implementation Complexity:** High
### 3. Chunking Strategy
**Description:** Process the project in logical segments, generating intermediate results that are then combined to create the final output.
**Advantages:**
- Can handle projects of any size
- Focused context for each specific part
- May improve quality by focusing on specific areas deeply
**Disadvantages:**
- Complex orchestration of multiple AI calls
- Challenge in maintaining consistency across chunks
- May increase time and cost for processing
**Implementation Complexity:** High
### 4. Dynamic Context Trimming
**Description:** Automatically reduce context by removing non-essential code while preserving structure. Techniques include:
- Removing implementation details but keeping interfaces and type definitions
- Truncating large functions while keeping signatures
- Removing comments and whitespace (except JSDoc)
- Keeping only imports/exports for context files
**Advantages:**
- Preserves full project structure
- Flexible token usage based on importance
- Good balance between completeness and token efficiency
**Disadvantages:**
- Potential to remove important implementation details
- Risk of missing context needed for specific tasks
- Complex rules for what to trim vs keep
**Implementation Complexity:** Medium
### 5. Embeddings-Based Retrieval
**Description:** Create vector embeddings of project files and retrieve only the most relevant ones for a specific task using semantic similarity.
**Advantages:**
- Highly adaptive to different types of requests
- Leverages semantic understanding of content
- Can scale to extremely large projects
**Disadvantages:**
- Requires setting up and managing embeddings database
- Added complexity of running vector similarity searches
- Higher resource requirements for maintaining embeddings
**Implementation Complexity:** Very High
### 6. Task-Specific Contexts
**Description:** Create separate optimized contexts for different tasks (readme, commit messages, etc.) with distinct file selection and processing strategies.
**Advantages:**
- Highly optimized for each specific task
- Efficient token usage for each operation
- Improved quality through task-focused contexts
**Disadvantages:**
- Maintenance of multiple context building strategies
- More complex configuration
- Potential duplication in implementation
**Implementation Complexity:** Medium
### 7. Recursive Summarization
**Description:** Summarize larger files first, then include these summaries in the final context along with smaller files included in full.
**Advantages:**
- Can handle arbitrary project sizes
- Preserves essential information from all files
- Balanced approach to token usage
**Disadvantages:**
- Quality loss from summarization
- Increased processing time from multiple AI calls
- Complex orchestration logic
**Implementation Complexity:** High
## Implementation Strategy
We propose a phased implementation approach, starting with the most impactful and straightforward approaches, then building toward more complex solutions as needed:
### Phase 1: Foundation (1-2 weeks)
1. **Implement Dynamic Context Trimming**
- Create a `ContextProcessor` class that takes SmartFile objects and applies trimming rules
- Implement configurable trimming rules (remove implementations, keep signatures)
- Add a configuration option to control trimming aggressiveness
- Support preserving JSDoc comments while removing other comments
2. **Enhance Token Monitoring**
- Track token usage per file to identify problematic files
- Implement token budgeting to stay within limits
- Add detailed token reporting for optimization
### Phase 2: Smart Selection (2-3 weeks)
3. **Implement Task-Specific Contexts**
- Create specialized context builders for readme, commit messages, and descriptions
- Customize file selection rules for each task
- Add configuration options for task-specific settings
4. **Add Smart Content Selection**
- Implement heuristic rules for file importance
- Create configuration for inclusion/exclusion patterns
- Add ability to focus on specific directories or modules
### Phase 3: Advanced Techniques (3-4 weeks)
5. **Implement File Prioritization**
- Add git history analysis to identify frequently changed files
- Implement dependency analysis to identify central files
- Create a scoring system for file relevance
6. **Add Optional Recursive Summarization**
- Implement file summarization for large files
- Create a hybrid approach that mixes full files and summaries
- Add configuration to control summarization thresholds
### Phase 4: Research-Based Approaches (Future Consideration)
7. **Research and Evaluate Embeddings-Based Retrieval**
- Prototype embeddings creation for TypeScript files
- Evaluate performance and accuracy
- Implement if benefits justify the complexity
8. **Explore Chunking Strategies**
- Research effective chunking approaches for documentation
- Prototype and evaluate performance
- Implement if benefits justify the complexity
## Technical Design
### Core Components
1. **ContextBuilder** - Enhanced version of current ProjectContext
```typescript
interface IContextBuilder {
buildContext(): Promise<string>;
getTokenCount(): number;
setContextMode(mode: 'normal' | 'trimmed' | 'summarized'): void;
setTokenBudget(maxTokens: number): void;
setPrioritizationStrategy(strategy: IPrioritizationStrategy): void;
}
```
2. **FileProcessor** - Handles per-file processing and trimming
```typescript
interface IFileProcessor {
processFile(file: SmartFile): Promise<string>;
setProcessingMode(mode: 'full' | 'trim' | 'summarize'): void;
getTokenCount(): number;
}
```
3. **PrioritizationStrategy** - Ranks files by importance
```typescript
interface IPrioritizationStrategy {
rankFiles(files: SmartFile[], context: string): Promise<SmartFile[]>;
setImportanceMetrics(metrics: IImportanceMetrics): void;
}
```
4. **TaskContextFactory** - Creates optimized contexts for specific tasks
```typescript
interface ITaskContextFactory {
createContextForReadme(projectDir: string): Promise<string>;
createContextForCommit(projectDir: string, diff: string): Promise<string>;
createContextForDescription(projectDir: string): Promise<string>;
}
```
### Configuration Options
The system will support configuration via a new section in `npmextra.json`:
```json
{
"tsdoc": {
"context": {
"maxTokens": 190000,
"defaultMode": "dynamic",
"taskSpecificSettings": {
"readme": {
"mode": "full",
"includePaths": ["src/", "lib/"],
"excludePaths": ["test/", "examples/"]
},
"commit": {
"mode": "trimmed",
"focusOnChangedFiles": true
},
"description": {
"mode": "summarized",
"includePackageInfo": true
}
},
"trimming": {
"removeImplementations": true,
"preserveInterfaces": true,
"preserveTypeDefs": true,
"preserveJSDoc": true,
"maxFunctionLines": 5
}
}
}
}
```
## Cost-Benefit Analysis
### Cost Considerations
1. **Development costs**
- Initial implementation of foundational components (~30-40 hours)
- Testing and validation across different project sizes (~10-15 hours)
- Documentation and configuration examples (~5 hours)
2. **Operational costs**
- Potential increased processing time for context preparation
- Additional API calls for summarization or embeddings approaches
- Monitoring and maintenance of the system
### Benefits
1. **Scalability**
- Support for projects of any size, up to and beyond o4-mini's 200K token limit
- Future-proof design that can adapt to different models and token limits
2. **Quality improvements**
- More focused contexts lead to better AI outputs
- Task-specific optimization improves relevance
- Consistent performance regardless of project size
3. **User experience**
- Predictable behavior for all project sizes
- Transparent token usage reporting
- Configuration options for different usage patterns
## First Deliverable
For immediate improvements, we recommend implementing Dynamic Context Trimming and Task-Specific Contexts first, as these offer the best balance of impact and implementation complexity.
### Implementation Plan for Dynamic Context Trimming
1. Create a basic `ContextTrimmer` class that processes TypeScript files:
- Remove function bodies but keep signatures
- Preserve interface and type definitions
- Keep imports and exports
- Preserve JSDoc comments
2. Integrate with the existing ProjectContext class:
- Add a trimming mode option
- Apply trimming during the context building process
- Track and report token savings
3. Modify the CLI to support trimming options:
- Add a `--trim` flag to enable trimming
- Add a `--trim-level` option for controlling aggressiveness
- Show token usage with and without trimming
This approach could reduce token usage by 40-70% while preserving the essential structure of the codebase, making it suitable for large projects while maintaining high-quality AI outputs.

39
test/test.aidoc.nonci.ts Normal file
View File

@@ -0,0 +1,39 @@
import { tap, expect } from '@git.zone/tstest/tapbundle';
import * as qenv from '@push.rocks/qenv';
let testQenv = new qenv.Qenv('./', '.nogit/');
import * as tsdocs from '../ts/index.js';
let aidocs: tsdocs.AiDoc;
tap.test('should create an AIdocs class', async () => {
aidocs = new tsdocs.AiDoc({
OPENAI_TOKEN: await testQenv.getEnvVarOnDemand('OPENAI_TOKEN'),
});
expect(aidocs).toBeInstanceOf(tsdocs.AiDoc);
});
tap.test('should start AIdocs', async () => {
await aidocs.start();
});
tap.skip.test('should start AIdocs', async () => {
await aidocs.buildReadme('./');
});
tap.skip.test('should start AIdocs', async () => {
await aidocs.buildDescription('./');
});
tap.test('should build commit object', async () => {
const commitObject = await aidocs.buildNextCommitObject('./');
console.log(commitObject);
expect(commitObject).not.toBeUndefined();
expect(commitObject).toHaveProperty('recommendedNextVersion');
expect(commitObject).toHaveProperty('recommendedNextVersionLevel');
expect(commitObject).toHaveProperty('recommendedNextVersionScope');
expect(commitObject).toHaveProperty('recommendedNextVersionMessage');
})
tap.start();

View File

@@ -1,5 +1,5 @@
import { expect, tap } from '@pushrocks/tapbundle';
import * as tsdoc from '../ts/index';
import { expect, tap } from '@push.rocks/tapbundle';
import * as tsdoc from '../ts/index.js';
tap.test('first test', async () => {
console.log('test');

8
ts/00_commitinfo_data.ts Normal file
View File

@@ -0,0 +1,8 @@
/**
* autocreated commitinfo by @push.rocks/commitinfo
*/
export const commitinfo = {
name: '@git.zone/tsdoc',
version: '1.5.2',
description: 'A comprehensive TypeScript documentation tool that leverages AI to generate and enhance project documentation, including dynamic README creation, API docs via TypeDoc, and smart commit message generation.'
}

146
ts/aidocs_classes/commit.ts Normal file
View File

@@ -0,0 +1,146 @@
import * as plugins from '../plugins.js';
import { AiDoc } from '../classes.aidoc.js';
import { ProjectContext } from './projectcontext.js';
export interface INextCommitObject {
recommendedNextVersionLevel: 'fix' | 'feat' | 'BREAKING CHANGE'; // the recommended next version level of the project
recommendedNextVersionScope: string; // the recommended scope name of the next version, like "core" or "cli", or specific class names.
recommendedNextVersionMessage: string; // the commit message. Don't put fix() feat() or BREAKING CHANGE in the message. Please just the message itself.
recommendedNextVersionDetails: string[]; // detailed bullet points for the changelog
recommendedNextVersion: string; // the recommended next version of the project, x.x.x
changelog?: string; // the changelog for the next version
}
export class Commit {
private aiDocsRef: AiDoc;
private projectDir: string;
constructor(aiDocsRef: AiDoc, projectDirArg: string) {
this.aiDocsRef = aiDocsRef;
this.projectDir = projectDirArg;
}
public async buildNextCommitObject(): Promise<INextCommitObject> {
const smartgitInstance = new plugins.smartgit.Smartgit();
await smartgitInstance.init();
const gitRepo = await plugins.smartgit.GitRepo.fromOpeningRepoDir(
smartgitInstance,
this.projectDir
);
const diffStringArray = await gitRepo.getUncommittedDiff([
'pnpm-lock.yaml',
'package-lock.json',
]);
// Use the new TaskContextFactory for optimized context
const taskContextFactory = new (await import('../context/index.js')).TaskContextFactory(this.projectDir);
await taskContextFactory.initialize();
// Generate context specifically for commit task
const contextResult = await taskContextFactory.createContextForCommit(
diffStringArray[0] ? diffStringArray.join('\n\n') : 'No changes.'
);
// Get the optimized context string
let contextString = contextResult.context;
// Log token usage statistics
console.log(`Token usage - Context: ${contextResult.tokenCount}, Files: ${contextResult.includedFiles.length + contextResult.trimmedFiles.length}, Savings: ${contextResult.tokenSavings}`);
// Check for token overflow against model limits
const MODEL_TOKEN_LIMIT = 200000; // o4-mini
if (contextResult.tokenCount > MODEL_TOKEN_LIMIT * 0.9) {
console.log(`⚠️ Warning: Context size (${contextResult.tokenCount} tokens) is close to or exceeds model limit (${MODEL_TOKEN_LIMIT} tokens).`);
console.log(`The model may not be able to process all information effectively.`);
}
let result = await this.aiDocsRef.openaiInstance.chat({
systemMessage: `
You create a commit message for a git commit.
The commit message should be based on the files in the project.
You should not include any licensing information.
You should not include any personal information.
Important: Answer only in valid JSON.
Your answer should be parseable with JSON.parse() without modifying anything.
Here is the structure of the JSON you should return:
interface {
recommendedNextVersionLevel: 'fix' | 'feat' | 'BREAKING CHANGE'; // the recommended next version level of the project
recommendedNextVersionScope: string; // the recommended scope name of the next version, like "core" or "cli", or specific class names.
recommendedNextVersionMessage: string; // the commit message. Don't put fix() feat() or BREAKING CHANGE in the message. Please just the message itself.
recommendedNextVersionDetails: string[]; // detailed bullet points for the changelog
recommendedNextVersion: string; // the recommended next version of the project, x.x.x
}
For the recommendedNextVersionDetails, please only add a detail entries to the array if it has an obvious value to the reader.
You are being given the files of the project. You should use them to create the commit message.
Also you are given a diff.
Never mention CLAUDE code, or codex.
`,
messageHistory: [],
userMessage: contextString,
});
// console.log(result.message);
const resultObject: INextCommitObject = JSON.parse(
result.message.replace('```json', '').replace('```', '')
);
const previousChangelogPath = plugins.path.join(this.projectDir, 'changelog.md');
let previousChangelog: plugins.smartfile.SmartFile;
if (await plugins.smartfile.fs.fileExists(previousChangelogPath)) {
previousChangelog = await plugins.smartfile.SmartFile.fromFilePath(previousChangelogPath);
}
if (!previousChangelog) {
// lets build the changelog based on that
const commitMessages = await gitRepo.getAllCommitMessages();
console.log(JSON.stringify(commitMessages, null, 2));
let result2 = await this.aiDocsRef.openaiInstance.chat({
messageHistory: [],
systemMessage: `
You are building a changelog.md file for the project.
Omit commits and versions that lack relevant changes, but make sure to mention them as a range with a summarizing message instead.
A changelog entry should look like this:
## yyyy-mm-dd - x.x.x - scope here
main descriptiom here
- detailed bullet points follow
You are given:
* the commit messages of the project
Only return the changelog file, so it can be written directly to changelog.md`,
userMessage: `
Here are the commit messages:
${JSON.stringify(commitMessages, null, 2)}
`,
});
previousChangelog = await plugins.smartfile.SmartFile.fromString(
previousChangelogPath,
result2.message.replaceAll('```markdown', '').replaceAll('```', ''),
'utf8'
);
}
let oldChangelog = previousChangelog.contents.toString().replace('# Changelog\n\n', '');
if (oldChangelog.startsWith('\n')) {
oldChangelog = oldChangelog.replace('\n', '');
}
let newDateString = new plugins.smarttime.ExtendedDate().exportToHyphedSortableDate();
let newChangelog = `# Changelog\n\n${`## ${newDateString} - {{nextVersion}} - {{nextVersionScope}}
{{nextVersionMessage}}
{{nextVersionDetails}}`}\n\n${oldChangelog}`;
resultObject.changelog = newChangelog;
return resultObject;
}
}

View File

@@ -0,0 +1,84 @@
import type { AiDoc } from '../classes.aidoc.js';
import * as plugins from '../plugins.js';
import { ProjectContext } from './projectcontext.js';
interface IDescriptionInterface {
description: string;
keywords: string[];
}
export class Description {
// INSTANCE
private aiDocsRef: AiDoc;
private projectDir: string;
constructor(aiDocsRef: AiDoc, projectDirArg: string) {
this.aiDocsRef = aiDocsRef;
this.projectDir = projectDirArg;
}
public async build() {
// Use the new TaskContextFactory for optimized context
const taskContextFactory = new (await import('../context/index.js')).TaskContextFactory(this.projectDir);
await taskContextFactory.initialize();
// Generate context specifically for description task
const contextResult = await taskContextFactory.createContextForDescription();
const contextString = contextResult.context;
// Log token usage statistics
console.log(`Token usage - Context: ${contextResult.tokenCount}, Files: ${contextResult.includedFiles.length + contextResult.trimmedFiles.length}, Savings: ${contextResult.tokenSavings}`);
let result = await this.aiDocsRef.openaiInstance.chat({
systemMessage: `
You create a json adhering the following interface:
{
description: string; // a sensible short, one sentence description of the project
keywords: string[]; // an array of tags that describe the project
}
The description should be based on what you understand from the project's files.
The keywords should be based on use cases you see from the files.
Don't be cheap about the way you think.
Important: Answer only in valid JSON.
You answer should be parseable with JSON.parse() without modifying anything.
Don't wrap the JSON in three ticks json!!!
`,
messageHistory: [],
userMessage: contextString,
});
console.log(result.message);
const resultObject: IDescriptionInterface = JSON.parse(
result.message.replace('```json', '').replace('```', ''),
);
// Create a standard ProjectContext instance for file operations
const projectContext = new ProjectContext(this.projectDir);
const files = await projectContext.gatherFiles();
const npmextraJson = files.smartfilesNpmextraJSON;
const npmextraJsonContent = JSON.parse(npmextraJson.contents.toString());
npmextraJsonContent.gitzone.module.description = resultObject.description;
npmextraJsonContent.gitzone.module.keywords = resultObject.keywords;
npmextraJson.contents = Buffer.from(JSON.stringify(npmextraJsonContent, null, 2));
await npmextraJson.write();
// do the same with packageJson
const packageJson = files.smartfilePackageJSON;
const packageJsonContent = JSON.parse(packageJson.contents.toString());
packageJsonContent.description = resultObject.description;
packageJsonContent.keywords = resultObject.keywords;
packageJson.contents = Buffer.from(JSON.stringify(packageJsonContent, null, 2));
await packageJson.write();
console.log(`\n======================\n`);
console.log(JSON.stringify(resultObject, null, 2));
console.log(`\n======================\n`);
return result.message;
}
}

View File

@@ -0,0 +1,4 @@
export * from './commit.js';
export * from './description.js';
export * from './projectcontext.js';
export * from './readme.js';

View File

@@ -0,0 +1,127 @@
import * as plugins from '../plugins.js';
export class ProjectContext {
public static async fromDir(dirArg: string) {}
// INSTANCE
public projectDir: string;
private tokenCount: number = 0;
private contextString: string = '';
constructor(projectDirArg: string) {
this.projectDir = projectDirArg;
}
public async gatherFiles() {
const smartfilePackageJSON = await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(this.projectDir, 'package.json'),
this.projectDir,
);
const smartfilesReadme = await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(this.projectDir, 'readme.md'),
this.projectDir,
);
const smartfilesReadmeHints = await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(this.projectDir, 'readme.hints.md'),
this.projectDir,
);
const smartfilesNpmextraJSON = await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(this.projectDir, 'npmextra.json'),
this.projectDir,
);
const smartfilesMod = await plugins.smartfile.fs.fileTreeToObject(
this.projectDir,
'ts*/**/*.ts',
);
const smartfilesTest = await plugins.smartfile.fs.fileTreeToObject(
this.projectDir,
'test/**/*.ts',
);
return {
smartfilePackageJSON,
smartfilesReadme,
smartfilesReadmeHints,
smartfilesNpmextraJSON,
smartfilesMod,
smartfilesTest,
};
}
public async convertFilesToContext(filesArg: plugins.smartfile.SmartFile[]) {
filesArg.map((fileArg) => {
// console.log(` -> ${fileArg.relative}`);
});
return filesArg
.map((smartfile) => {
return `
====== START OF FILE ${smartfile.relative} ======
${smartfile.contents.toString()}
====== END OF FILE ${smartfile.relative} ======
`;
})
.join('\n');
}
/**
* Calculate the token count for a string using the GPT tokenizer
* @param text The text to count tokens for
* @param model The model to use for token counting (default: gpt-3.5-turbo)
* @returns The number of tokens in the text
*/
public countTokens(text: string, model: string = 'gpt-3.5-turbo'): number {
try {
// Use the gpt-tokenizer library to count tokens
const tokens = plugins.gptTokenizer.encode(text);
return tokens.length;
} catch (error) {
console.error('Error counting tokens:', error);
// Provide a rough estimate (4 chars per token) if tokenization fails
return Math.ceil(text.length / 4);
}
}
private async buildContext(dirArg: string) {
const files = await this.gatherFiles();
let context = await this.convertFilesToContext([
files.smartfilePackageJSON,
files.smartfilesReadme,
files.smartfilesReadmeHints,
files.smartfilesNpmextraJSON,
...files.smartfilesMod,
...files.smartfilesTest,
]);
// Count tokens in the context
this.contextString = context;
this.tokenCount = this.countTokens(context);
// console.log(context);
return context;
}
/**
* Get the token count for the current context
* @returns The number of tokens in the context
*/
public getTokenCount(): number {
return this.tokenCount;
}
/**
* Get both the context string and its token count
* @returns An object containing the context string and token count
*/
public getContextWithTokenCount(): { context: string; tokenCount: number } {
return {
context: this.contextString,
tokenCount: this.tokenCount
};
}
public async update() {
const result = await this.buildContext(this.projectDir);
return result;
}
}

152
ts/aidocs_classes/readme.ts Normal file
View File

@@ -0,0 +1,152 @@
import type { AiDoc } from '../classes.aidoc.js';
import * as plugins from '../plugins.js';
import * as paths from '../paths.js';
import { ProjectContext } from './projectcontext.js';
import { logger } from '../logging.js';
export class Readme {
// INSTANCE
private aiDocsRef: AiDoc;
private projectDir: string;
constructor(aiDocsRef: AiDoc, projectDirArg: string) {
this.aiDocsRef = aiDocsRef;
this.projectDir = projectDirArg;
}
public async build() {
let finalReadmeString = ``;
// Use the new TaskContextFactory for optimized context
const taskContextFactory = new (await import('../context/index.js')).TaskContextFactory(this.projectDir);
await taskContextFactory.initialize();
// Generate context specifically for readme task
const contextResult = await taskContextFactory.createContextForReadme();
const contextString = contextResult.context;
// Log token usage statistics
console.log(`Token usage - Context: ${contextResult.tokenCount}, Files: ${contextResult.includedFiles.length + contextResult.trimmedFiles.length}, Savings: ${contextResult.tokenSavings}`);
// lets first check legal before introducung any cost
const projectContext = new ProjectContext(this.projectDir);
const npmExtraJson = JSON.parse(
(await projectContext.gatherFiles()).smartfilesNpmextraJSON.contents.toString()
);
const legalInfo = npmExtraJson?.tsdoc?.legal;
if (!legalInfo) {
const error = new Error(`No legal information found in npmextra.json`);
console.log(error);
}
let result = await this.aiDocsRef.openaiInstance.chat({
systemMessage: `
You create markdown readmes for npm projects. You only output the markdown readme.
The Readme should follow the following template:
# Project Name
[
The name is the module name of package.json
The description is in the description field of package.json
]
## Install
[
Write a short text on how to install the project
]
## Usage
[
Give code examples here.
Construct sensible scenarios for the user.
Make sure to show a complete set of features of the module.
Don't omit use cases.
It does not matter how much time you need.
ALWAYS USE ESM SYNTAX AND TYPESCRIPT.
DON'T CHICKEN OUT. Write at least 4000 words. More if necessary.
If there is already a readme, take the Usage section as base. Remove outdated content, and expand and improve upon the valid parts.
Super important: Check for completenes.
Don't include any licensing information. This will be added in a later step.
Avoid "in conclusions".
Good to know:
* npmextra.json contains overall module information.
* readme.hints.md provides valuable hints about module ideas.
]
`,
messageHistory: [],
userMessage: contextString,
});
finalReadmeString += result.message + '\n' + legalInfo;
console.log(`\n======================\n`);
console.log(result.message);
console.log(`\n======================\n`);
const readme = (await projectContext.gatherFiles()).smartfilesReadme;
readme.contents = Buffer.from(finalReadmeString);
await readme.write();
// lets care about monorepo aspects
const tsPublishInstance = new plugins.tspublish.TsPublish();
const subModules = await tsPublishInstance.getModuleSubDirs(paths.cwd);
logger.log('info', `Found ${Object.keys(subModules).length} sub modules`);
for (const subModule of Object.keys(subModules)) {
logger.log('info', `Building readme for ${subModule}`);
const subModuleContextString = await projectContext.update();
let result = await this.aiDocsRef.openaiInstance.chat({
systemMessage: `
You create markdown readmes for npm projects. You only output the markdown readme.
IMPORTANT: YOU ARE NOW CREATING THE README FOR THE FOLLOWING SUB MODULE: ${subModule} !!!!!!!!!!!
The Sub Module will be published with the following data:
${JSON.stringify(plugins.smartfile.fs.toStringSync(plugins.path.join(paths.cwd, subModule, 'tspublish.json')), null, 2)}
The Readme should follow the following template:
# Project Name
[
The name is the module name of package.json
The description is in the description field of package.json
]
## Install
[
Write a short text on how to install the project
]
## Usage
[
Give code examples here.
Construct sensible scenarios for the user.
Make sure to show a complete set of features of the module.
Don't omit use cases.
It does not matter how much time you need.
ALWAYS USE ESM SYNTAX AND TYPESCRIPT.
DON'T CHICKEN OUT. Write at least 4000 words. More if necessary.
If there is already a readme, take the Usage section as base. Remove outdated content, and expand and improve upon the valid parts.
Super important: Check for completenes.
Don't include any licensing information. This will be added in a later step.
Avoid "in conclusions".
Good to know:
* npmextra.json contains overall module information.
* readme.hints.md provides valuable hints about module ideas.
* Your output lands directly in the readme.md file.
* Don't use \`\`\` at the beginning or the end. It'll cause problems. Only use it for codeblocks. You are directly writing markdown. No need to introduce it weirdly.
]
`,
messageHistory: [],
userMessage: subModuleContextString,
});
const subModuleReadmeString = result.message + '\n' + legalInfo;
await plugins.smartfile.memory.toFs(subModuleReadmeString, plugins.path.join(paths.cwd, subModule, 'readme.md'));
logger.log('success', `Built readme for ${subModule}`);
}
return result.message;
}
}

134
ts/classes.aidoc.ts Normal file
View File

@@ -0,0 +1,134 @@
import * as plugins from './plugins.js';
import * as aiDocsClasses from './aidocs_classes/index.js';
export class AiDoc {
private openaiToken: string;
public npmextraKV: plugins.npmextra.KeyValueStore;
public qenvInstance: plugins.qenv.Qenv;
public aidocInteract: plugins.smartinteract.SmartInteract;
public openaiInstance: plugins.smartai.OpenAiProvider;
argvArg: any;
constructor(argvArg?: any) {
this.argvArg = argvArg;
}
private printSanitizedToken() {
// Check if the token length is greater than the sum of startLength and endLength
let printToken: string;
if (this.openaiToken.length > 6) {
// Extract the beginning and end parts of the token
const start = this.openaiToken.substring(0, 3);
const end = this.openaiToken.substring(this.openaiToken.length - 3);
printToken = `${start}...${end}`;
} else {
// If the token is not long enough, return it as is
printToken = this.openaiToken;
}
console.log(`OpenAI Token on record: ${printToken}`);
}
public async start() {
// lets care about prerequisites
this.aidocInteract = new plugins.smartinteract.SmartInteract();
this.qenvInstance = new plugins.qenv.Qenv();
if (!(await this.qenvInstance.getEnvVarOnDemand('OPENAI_TOKEN'))) {
this.npmextraKV = new plugins.npmextra.KeyValueStore({
typeArg: 'userHomeDir',
identityArg: 'tsdoc',
mandatoryKeys: ['OPENAI_TOKEN'],
});
const missingKeys = await this.npmextraKV.getMissingMandatoryKeys();
if (missingKeys.length > 0) {
// lets try argv
if (this.argvArg?.OPENAI_TOKEN) {
this.openaiToken = this.argvArg.OPENAI_TOKEN;
} else {
// lets try smartinteract
// wait for a second until OpenAI fixes punycode problem...
await plugins.smartdelay.delayFor(1000);
const answerObject = await this.aidocInteract.askQuestion({
type: 'input',
message: `Please provide your OpenAI token. This will be persisted in your home directory.`,
name: 'OPENAI_TOKEN',
default: '',
});
this.openaiToken = answerObject.value;
}
this.printSanitizedToken();
await this.npmextraKV.writeKey('OPENAI_TOKEN', this.openaiToken);
}
}
if (!this.openaiToken) {
this.openaiToken = await this.npmextraKV.readKey('OPENAI_TOKEN');
}
// lets assume we have an OPENAI_Token now
this.openaiInstance = new plugins.smartai.OpenAiProvider({
openaiToken: this.openaiToken,
});
await this.openaiInstance.start();
}
public async stop() {
await this.openaiInstance.stop();
}
public async buildReadme(projectDirArg: string) {
const readmeInstance = new aiDocsClasses.Readme(this, projectDirArg);
return await readmeInstance.build();
}
public async buildDescription(projectDirArg: string) {
const descriptionInstance = new aiDocsClasses.Description(this, projectDirArg);
return await descriptionInstance.build();
}
public async buildNextCommitObject(projectDirArg: string) {
const commitInstance = new aiDocsClasses.Commit(this, projectDirArg);
return await commitInstance.buildNextCommitObject();
}
public async getProjectContext(projectDirArg: string) {
const projectContextInstance = new aiDocsClasses.ProjectContext(projectDirArg);
return await projectContextInstance.gatherFiles();
}
/**
* Get the context with token count information
* @param projectDirArg The path to the project directory
* @returns An object containing the context string and its token count
*/
public async getProjectContextWithTokenCount(projectDirArg: string) {
const projectContextInstance = new aiDocsClasses.ProjectContext(projectDirArg);
await projectContextInstance.update();
return projectContextInstance.getContextWithTokenCount();
}
/**
* Get just the token count for a project's context
* @param projectDirArg The path to the project directory
* @returns The number of tokens in the project context
*/
public async getProjectContextTokenCount(projectDirArg: string) {
const projectContextInstance = new aiDocsClasses.ProjectContext(projectDirArg);
await projectContextInstance.update();
return projectContextInstance.getTokenCount();
}
/**
* Count tokens in a text string using GPT tokenizer
* @param text The text to count tokens for
* @param model The model to use for tokenization (default: gpt-3.5-turbo)
* @returns The number of tokens in the text
*/
public countTokens(text: string, model: string = 'gpt-3.5-turbo'): number {
const projectContextInstance = new aiDocsClasses.ProjectContext('');
return projectContextInstance.countTokens(text, model);
}
}

58
ts/classes.typedoc.ts Normal file
View File

@@ -0,0 +1,58 @@
import * as plugins from './plugins.js';
import * as paths from './paths.js';
export class TypeDoc {
public smartshellInstance = new plugins.smartshell.Smartshell({
executor: 'bash',
pathDirectories: [paths.binDir],
});
// Static
public static async isTypeDocDir(dirPathArg: string): Promise<boolean> {
return true;
}
// Instance
public typedocDirectory: string;
constructor(dirPathArg) {
this.typedocDirectory = dirPathArg;
}
public async compile(options?: { publicSubdir?: string }) {
const data = {
compilerOptions: {
experimentalDecorators: true,
useDefineForClassFields: false,
target: 'ES2022',
module: 'NodeNext',
moduleResolution: 'NodeNext',
esModuleInterop: true,
verbatimModuleSyntax: true,
skipLibCheck: true,
},
include: [],
};
let startDirectory = '';
if (plugins.smartfile.fs.isDirectory(plugins.path.join(paths.cwd, './ts'))) {
data.include.push(plugins.path.join(paths.cwd, './ts/**/*'));
startDirectory = 'ts';
}
if (plugins.smartfile.fs.isDirectory(plugins.path.join(paths.cwd, './ts_web'))) {
data.include.push(plugins.path.join(paths.cwd, './ts_web/**/*'));
if (!startDirectory) {
startDirectory = 'ts_web';
}
}
await plugins.smartfile.memory.toFs(JSON.stringify(data), paths.tsconfigFile);
let targetDir = paths.publicDir;
if (options?.publicSubdir) {
targetDir = plugins.path.join(targetDir, options.publicSubdir);
}
await this.smartshellInstance.exec(
`typedoc --tsconfig ${paths.tsconfigFile} --out ${targetDir} ${startDirectory}/index.ts`,
);
plugins.smartfile.fs.remove(paths.tsconfigFile);
}
}

177
ts/cli.ts Normal file
View File

@@ -0,0 +1,177 @@
import * as plugins from './plugins.js';
import * as paths from './paths.js';
import { logger } from './logging.js';
import { TypeDoc } from './classes.typedoc.js';
import { AiDoc } from './classes.aidoc.js';
import * as context from './context/index.js';
export const run = async () => {
const tsdocCli = new plugins.smartcli.Smartcli();
tsdocCli.standardCommand().subscribe(async (argvArg) => {
logger.log('warn', `Auto detecting environment!`);
switch (true) {
case await TypeDoc.isTypeDocDir(paths.cwd):
logger.log('ok', `Detected TypeDoc compliant directory at ${paths.cwd}`);
tsdocCli.triggerCommand('typedoc', argvArg);
break;
default:
logger.log('error', `Cannot determine docs format at ${paths.cwd}`);
}
});
tsdocCli.addCommand('typedoc').subscribe(async (argvArg) => {
const typeDocInstance = new TypeDoc(paths.cwd);
await typeDocInstance.compile({
publicSubdir: argvArg.publicSubdir,
});
});
tsdocCli.addCommand('aidoc').subscribe(async (argvArg) => {
const aidocInstance = new AiDoc();
await aidocInstance.start();
// Get context token count if requested
if (argvArg.tokens || argvArg.showTokens) {
logger.log('info', `Calculating context token count...`);
const tokenCount = await aidocInstance.getProjectContextTokenCount(paths.cwd);
logger.log('ok', `Total context token count: ${tokenCount}`);
if (argvArg.tokensOnly) {
return; // Exit early if we only want token count
}
}
logger.log('info', `Generating new readme...`);
logger.log('info', `This may take some time...`);
await aidocInstance.buildReadme(paths.cwd);
logger.log('info', `Generating new keywords...`);
logger.log('info', `This may take some time...`);
await aidocInstance.buildDescription(paths.cwd);
});
tsdocCli.addCommand('tokens').subscribe(async (argvArg) => {
const aidocInstance = new AiDoc();
await aidocInstance.start();
logger.log('info', `Calculating context token count...`);
// Determine context mode based on args
let contextMode: context.ContextMode = 'full';
if (argvArg.trim || argvArg.trimmed) {
contextMode = 'trimmed';
} else if (argvArg.summarize || argvArg.summarized) {
contextMode = 'summarized';
}
// Get task type if specified
let taskType: context.TaskType | undefined = undefined;
if (argvArg.task) {
if (['readme', 'commit', 'description'].includes(argvArg.task)) {
taskType = argvArg.task as context.TaskType;
} else {
logger.log('warn', `Unknown task type: ${argvArg.task}. Using default context.`);
}
}
// Use enhanced context
const taskFactory = new context.TaskContextFactory(paths.cwd);
await taskFactory.initialize();
let contextResult: context.IContextResult;
if (argvArg.all) {
// Show stats for all task types
const stats = await taskFactory.getTokenStats();
logger.log('ok', 'Token statistics by task:');
for (const [task, data] of Object.entries(stats)) {
logger.log('info', `\n${task.toUpperCase()}:`);
logger.log('info', ` Tokens: ${data.tokenCount}`);
logger.log('info', ` Token savings: ${data.savings}`);
logger.log('info', ` Files: ${data.includedFiles} included, ${data.trimmedFiles} trimmed, ${data.excludedFiles} excluded`);
// Calculate percentage of model context
const o4MiniPercentage = (data.tokenCount / 200000 * 100).toFixed(2);
logger.log('info', ` Context usage: ${o4MiniPercentage}% of o4-mini (200K tokens)`);
}
return;
}
if (taskType) {
// Get context for specific task
contextResult = await taskFactory.createContextForTask(taskType);
} else {
// Get generic context with specified mode
const enhancedContext = new context.EnhancedContext(paths.cwd);
await enhancedContext.initialize();
enhancedContext.setContextMode(contextMode);
if (argvArg.maxTokens) {
enhancedContext.setTokenBudget(parseInt(argvArg.maxTokens, 10));
}
contextResult = await enhancedContext.buildContext();
}
// Display results
logger.log('ok', `Total context token count: ${contextResult.tokenCount}`);
logger.log('info', `Files included: ${contextResult.includedFiles.length}`);
logger.log('info', `Files trimmed: ${contextResult.trimmedFiles.length}`);
logger.log('info', `Files excluded: ${contextResult.excludedFiles.length}`);
logger.log('info', `Token savings: ${contextResult.tokenSavings}`);
if (argvArg.detailed) {
// Show more detailed info about the context and token usage
const o4MiniPercentage = (contextResult.tokenCount / 200000 * 100).toFixed(2);
logger.log('info', `Token usage: ${o4MiniPercentage}% of o4-mini 200K token context window`);
if (argvArg.model) {
// Show percentages for different models
if (argvArg.model === 'gpt4') {
const gpt4Percentage = (contextResult.tokenCount / 8192 * 100).toFixed(2);
logger.log('info', `Token usage (GPT-4): ${gpt4Percentage}% of 8192 token context window`);
} else if (argvArg.model === 'gpt35') {
const gpt35Percentage = (contextResult.tokenCount / 4096 * 100).toFixed(2);
logger.log('info', `Token usage (GPT-3.5): ${gpt35Percentage}% of 4096 token context window`);
}
}
// Estimate cost (approximate values)
const o4MiniInputCost = 0.00005; // per 1K tokens for o4-mini
const estimatedCost = (contextResult.tokenCount / 1000 * o4MiniInputCost).toFixed(6);
logger.log('info', `Estimated input cost: $${estimatedCost} (o4-mini)`);
if (argvArg.listFiles) {
// List files included in context
logger.log('info', '\nIncluded files:');
contextResult.includedFiles.forEach(file => {
logger.log('info', ` ${file.relativePath} (${file.tokenCount} tokens)`);
});
logger.log('info', '\nTrimmed files:');
contextResult.trimmedFiles.forEach(file => {
logger.log('info', ` ${file.relativePath} (${file.tokenCount} tokens)`);
});
if (contextResult.excludedFiles.length > 0) {
logger.log('info', '\nExcluded files:');
contextResult.excludedFiles.forEach(file => {
logger.log('info', ` ${file.relativePath} (${file.tokenCount} tokens)`);
});
}
}
}
});
tsdocCli.addCommand('test').subscribe((argvArg) => {
tsdocCli.triggerCommand('typedoc', argvArg);
process.on('exit', async () => {
await plugins.smartfile.fs.remove(paths.publicDir);
});
});
tsdocCli.startParse();
};

View File

@@ -0,0 +1,209 @@
import * as plugins from '../plugins.js';
import type { IContextConfig, ITrimConfig, ITaskConfig, TaskType, ContextMode } from './types.js';
/**
* Manages configuration for context building
*/
export class ConfigManager {
private static instance: ConfigManager;
private config: IContextConfig;
private projectDir: string = '';
/**
* Get the singleton instance of ConfigManager
*/
public static getInstance(): ConfigManager {
if (!ConfigManager.instance) {
ConfigManager.instance = new ConfigManager();
}
return ConfigManager.instance;
}
/**
* Private constructor for singleton pattern
*/
private constructor() {
this.config = this.getDefaultConfig();
}
/**
* Initialize the config manager with a project directory
* @param projectDir The project directory
*/
public async initialize(projectDir: string): Promise<void> {
this.projectDir = projectDir;
await this.loadConfig();
}
/**
* Get the default configuration
*/
private getDefaultConfig(): IContextConfig {
return {
maxTokens: 190000, // Default for o4-mini with some buffer
defaultMode: 'trimmed',
taskSpecificSettings: {
readme: {
mode: 'trimmed',
includePaths: ['ts/', 'src/'],
excludePaths: ['test/', 'node_modules/']
},
commit: {
mode: 'trimmed',
focusOnChangedFiles: true
},
description: {
mode: 'trimmed',
includePackageInfo: true
}
},
trimming: {
removeImplementations: true,
preserveInterfaces: true,
preserveTypeDefs: true,
preserveJSDoc: true,
maxFunctionLines: 5,
removeComments: true,
removeBlankLines: true
}
};
}
/**
* Load configuration from npmextra.json
*/
private async loadConfig(): Promise<void> {
try {
if (!this.projectDir) {
return;
}
// Create KeyValueStore for this project
// We'll just use smartfile directly instead of KeyValueStore
// Read the npmextra.json file
const npmextraJsonFile = await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(this.projectDir, 'npmextra.json')
);
const npmextraContent = JSON.parse(npmextraJsonFile.contents.toString());
// Check for tsdoc context configuration
if (npmextraContent?.tsdoc?.context) {
// Merge with default config
this.config = this.mergeConfigs(this.config, npmextraContent.tsdoc.context);
}
} catch (error) {
console.error('Error loading context configuration:', error);
}
}
/**
* Merge configurations, with userConfig taking precedence
* @param defaultConfig The default configuration
* @param userConfig The user configuration
*/
private mergeConfigs(defaultConfig: IContextConfig, userConfig: Partial<IContextConfig>): IContextConfig {
const result: IContextConfig = { ...defaultConfig };
// Merge top-level properties
if (userConfig.maxTokens !== undefined) result.maxTokens = userConfig.maxTokens;
if (userConfig.defaultMode !== undefined) result.defaultMode = userConfig.defaultMode;
// Merge task-specific settings
if (userConfig.taskSpecificSettings) {
result.taskSpecificSettings = result.taskSpecificSettings || {};
// For each task type, merge settings
(['readme', 'commit', 'description'] as TaskType[]).forEach(taskType => {
if (userConfig.taskSpecificSettings?.[taskType]) {
result.taskSpecificSettings![taskType] = {
...result.taskSpecificSettings![taskType],
...userConfig.taskSpecificSettings[taskType]
};
}
});
}
// Merge trimming configuration
if (userConfig.trimming) {
result.trimming = {
...result.trimming,
...userConfig.trimming
};
}
return result;
}
/**
* Get the complete configuration
*/
public getConfig(): IContextConfig {
return this.config;
}
/**
* Get the trimming configuration
*/
public getTrimConfig(): ITrimConfig {
return this.config.trimming || {};
}
/**
* Get configuration for a specific task
* @param taskType The type of task
*/
public getTaskConfig(taskType: TaskType): ITaskConfig {
// Get task-specific config or empty object
const taskConfig = this.config.taskSpecificSettings?.[taskType] || {};
// If mode is not specified, use default mode
if (!taskConfig.mode) {
taskConfig.mode = this.config.defaultMode;
}
return taskConfig;
}
/**
* Get the maximum tokens allowed for context
*/
public getMaxTokens(): number {
return this.config.maxTokens || 190000;
}
/**
* Update the configuration
* @param config The new configuration
*/
public async updateConfig(config: Partial<IContextConfig>): Promise<void> {
// Merge with existing config
this.config = this.mergeConfigs(this.config, config);
try {
if (!this.projectDir) {
return;
}
// Read the existing npmextra.json file
const npmextraJsonPath = plugins.path.join(this.projectDir, 'npmextra.json');
let npmextraContent = {};
if (await plugins.smartfile.fs.fileExists(npmextraJsonPath)) {
const npmextraJsonFile = await plugins.smartfile.SmartFile.fromFilePath(npmextraJsonPath);
npmextraContent = JSON.parse(npmextraJsonFile.contents.toString()) || {};
}
// Update the tsdoc context configuration
const typedContent = npmextraContent as any;
if (!typedContent.tsdoc) typedContent.tsdoc = {};
typedContent.tsdoc.context = this.config;
// Write back to npmextra.json
const updatedContent = JSON.stringify(npmextraContent, null, 2);
await plugins.smartfile.memory.toFs(updatedContent, npmextraJsonPath);
} catch (error) {
console.error('Error updating context configuration:', error);
}
}
}

View File

@@ -0,0 +1,246 @@
import * as plugins from '../plugins.js';
import type { ITrimConfig, ContextMode } from './types.js';
/**
* Class responsible for trimming file contents to reduce token usage
* while preserving important information for context
*/
export class ContextTrimmer {
private config: ITrimConfig;
/**
* Create a new ContextTrimmer with the given configuration
* @param config The trimming configuration
*/
constructor(config?: ITrimConfig) {
this.config = {
removeImplementations: true,
preserveInterfaces: true,
preserveTypeDefs: true,
preserveJSDoc: true,
maxFunctionLines: 5,
removeComments: true,
removeBlankLines: true,
...config
};
}
/**
* Trim a file's contents based on the configuration
* @param filePath The path to the file
* @param content The file's contents
* @param mode The context mode to use
* @returns The trimmed file contents
*/
public trimFile(filePath: string, content: string, mode: ContextMode = 'trimmed'): string {
// If mode is 'full', return the original content
if (mode === 'full') {
return content;
}
// Process based on file type
if (filePath.endsWith('.ts') || filePath.endsWith('.tsx')) {
return this.trimTypeScriptFile(content);
} else if (filePath.endsWith('.md')) {
return this.trimMarkdownFile(content);
} else if (filePath.endsWith('.json')) {
return this.trimJsonFile(content);
}
// Default to returning the original content for unknown file types
return content;
}
/**
* Trim a TypeScript file to reduce token usage
* @param content The TypeScript file contents
* @returns The trimmed file contents
*/
private trimTypeScriptFile(content: string): string {
let result = content;
// Step 1: Preserve JSDoc comments if configured
const jsDocComments: string[] = [];
if (this.config.preserveJSDoc) {
const jsDocRegex = /\/\*\*[\s\S]*?\*\//g;
const matches = result.match(jsDocRegex) || [];
jsDocComments.push(...matches);
}
// Step 2: Remove comments if configured
if (this.config.removeComments) {
// Remove single-line comments
result = result.replace(/\/\/.*$/gm, '');
// Remove multi-line comments (except JSDoc if preserveJSDoc is true)
if (!this.config.preserveJSDoc) {
result = result.replace(/\/\*[\s\S]*?\*\//g, '');
} else {
// Only remove non-JSDoc comments
result = result.replace(/\/\*(?!\*)[\s\S]*?\*\//g, '');
}
}
// Step 3: Remove function implementations if configured
if (this.config.removeImplementations) {
// Match function and method bodies
result = result.replace(
/(\b(function|constructor|async function)\s+[\w$]*\s*\([^)]*\)\s*{)([\s\S]*?)(})/g,
(match, start, funcType, body, end) => {
// Keep function signature and opening brace, replace body with comment
return `${start} /* implementation removed */ ${end}`;
}
);
// Match arrow function bodies
result = result.replace(
/(\([^)]*\)\s*=>\s*{)([\s\S]*?)(})/g,
(match, start, body, end) => {
return `${start} /* implementation removed */ ${end}`;
}
);
// Match method declarations
result = result.replace(
/(^\s*[\w$]*\s*\([^)]*\)\s*{)([\s\S]*?)(})/gm,
(match, start, body, end) => {
return `${start} /* implementation removed */ ${end}`;
}
);
// Match class methods
result = result.replace(
/(\b(public|private|protected|static|async)?\s+[\w$]+\s*\([^)]*\)\s*{)([\s\S]*?)(})/g,
(match, start, modifier, body, end) => {
return `${start} /* implementation removed */ ${end}`;
}
);
} else if (this.config.maxFunctionLines && this.config.maxFunctionLines > 0) {
// If not removing implementations completely, limit the number of lines
// Match function and method bodies
result = result.replace(
/(\b(function|constructor|async function)\s+[\w$]*\s*\([^)]*\)\s*{)([\s\S]*?)(})/g,
(match, start, funcType, body, end) => {
return this.limitFunctionBody(start, body, end);
}
);
// Match arrow function bodies
result = result.replace(
/(\([^)]*\)\s*=>\s*{)([\s\S]*?)(})/g,
(match, start, body, end) => {
return this.limitFunctionBody(start, body, end);
}
);
// Match method declarations
result = result.replace(
/(^\s*[\w$]*\s*\([^)]*\)\s*{)([\s\S]*?)(})/gm,
(match, start, body, end) => {
return this.limitFunctionBody(start, body, end);
}
);
// Match class methods
result = result.replace(
/(\b(public|private|protected|static|async)?\s+[\w$]+\s*\([^)]*\)\s*{)([\s\S]*?)(})/g,
(match, start, modifier, body, end) => {
return this.limitFunctionBody(start, body, end);
}
);
}
// Step 4: Remove blank lines if configured
if (this.config.removeBlankLines) {
result = result.replace(/^\s*[\r\n]/gm, '');
}
// Step 5: Restore preserved JSDoc comments
if (this.config.preserveJSDoc && jsDocComments.length > 0) {
// This is a placeholder; we already preserved JSDoc comments in the regex steps
}
return result;
}
/**
* Limit a function body to a maximum number of lines
* @param start The function signature and opening brace
* @param body The function body
* @param end The closing brace
* @returns The limited function body
*/
private limitFunctionBody(start: string, body: string, end: string): string {
const lines = body.split('\n');
if (lines.length > this.config.maxFunctionLines!) {
const limitedBody = lines.slice(0, this.config.maxFunctionLines!).join('\n');
return `${start}${limitedBody}\n // ... (${lines.length - this.config.maxFunctionLines!} lines trimmed)\n${end}`;
}
return `${start}${body}${end}`;
}
/**
* Trim a Markdown file to reduce token usage
* @param content The Markdown file contents
* @returns The trimmed file contents
*/
private trimMarkdownFile(content: string): string {
// For markdown files, we generally want to keep most content
// but we can remove lengthy code blocks if needed
return content;
}
/**
* Trim a JSON file to reduce token usage
* @param content The JSON file contents
* @returns The trimmed file contents
*/
private trimJsonFile(content: string): string {
try {
// Parse the JSON
const json = JSON.parse(content);
// For package.json, keep only essential information
if ('name' in json && 'version' in json && 'dependencies' in json) {
const essentialKeys = [
'name', 'version', 'description', 'author', 'license',
'main', 'types', 'exports', 'type'
];
const trimmedJson: any = {};
essentialKeys.forEach(key => {
if (key in json) {
trimmedJson[key] = json[key];
}
});
// Add dependency information without versions
if ('dependencies' in json) {
trimmedJson.dependencies = Object.keys(json.dependencies).reduce((acc, dep) => {
acc[dep] = '*'; // Replace version with wildcard
return acc;
}, {} as Record<string, string>);
}
// Return the trimmed JSON
return JSON.stringify(trimmedJson, null, 2);
}
// For other JSON files, leave as is
return content;
} catch (error) {
// If there's an error parsing the JSON, return the original content
return content;
}
}
/**
* Update the trimmer configuration
* @param config The new configuration to apply
*/
public updateConfig(config: ITrimConfig): void {
this.config = {
...this.config,
...config
};
}
}

View File

@@ -0,0 +1,343 @@
import * as plugins from '../plugins.js';
import type { ContextMode, IContextResult, IFileInfo, TaskType } from './types.js';
import { ContextTrimmer } from './context-trimmer.js';
import { ConfigManager } from './config-manager.js';
/**
* Enhanced ProjectContext that supports context optimization strategies
*/
export class EnhancedContext {
private projectDir: string;
private trimmer: ContextTrimmer;
private configManager: ConfigManager;
private contextMode: ContextMode = 'trimmed';
private tokenBudget: number = 190000; // Default for o4-mini
private contextResult: IContextResult = {
context: '',
tokenCount: 0,
includedFiles: [],
trimmedFiles: [],
excludedFiles: [],
tokenSavings: 0
};
/**
* Create a new EnhancedContext
* @param projectDirArg The project directory
*/
constructor(projectDirArg: string) {
this.projectDir = projectDirArg;
this.configManager = ConfigManager.getInstance();
this.trimmer = new ContextTrimmer(this.configManager.getTrimConfig());
}
/**
* Initialize the context builder
*/
public async initialize(): Promise<void> {
await this.configManager.initialize(this.projectDir);
this.tokenBudget = this.configManager.getMaxTokens();
this.trimmer.updateConfig(this.configManager.getTrimConfig());
}
/**
* Set the context mode
* @param mode The context mode to use
*/
public setContextMode(mode: ContextMode): void {
this.contextMode = mode;
}
/**
* Set the token budget
* @param maxTokens The maximum tokens to use
*/
public setTokenBudget(maxTokens: number): void {
this.tokenBudget = maxTokens;
}
/**
* Gather files from the project
* @param includePaths Optional paths to include
* @param excludePaths Optional paths to exclude
*/
public async gatherFiles(includePaths?: string[], excludePaths?: string[]): Promise<Record<string, plugins.smartfile.SmartFile | plugins.smartfile.SmartFile[]>> {
const smartfilePackageJSON = await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(this.projectDir, 'package.json'),
this.projectDir,
);
const smartfilesReadme = await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(this.projectDir, 'readme.md'),
this.projectDir,
);
const smartfilesReadmeHints = await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(this.projectDir, 'readme.hints.md'),
this.projectDir,
);
const smartfilesNpmextraJSON = await plugins.smartfile.SmartFile.fromFilePath(
plugins.path.join(this.projectDir, 'npmextra.json'),
this.projectDir,
);
// Use provided include paths or default to all TypeScript files
const includeGlobs = includePaths?.map(path => `${path}/**/*.ts`) || ['ts*/**/*.ts'];
// Get TypeScript files
const smartfilesModPromises = includeGlobs.map(glob =>
plugins.smartfile.fs.fileTreeToObject(this.projectDir, glob)
);
const smartfilesModArrays = await Promise.all(smartfilesModPromises);
// Flatten the arrays
const smartfilesMod: plugins.smartfile.SmartFile[] = [];
smartfilesModArrays.forEach(array => {
smartfilesMod.push(...array);
});
// Get test files if not excluded
let smartfilesTest: plugins.smartfile.SmartFile[] = [];
if (!excludePaths?.includes('test/')) {
smartfilesTest = await plugins.smartfile.fs.fileTreeToObject(
this.projectDir,
'test/**/*.ts',
);
}
return {
smartfilePackageJSON,
smartfilesReadme,
smartfilesReadmeHints,
smartfilesNpmextraJSON,
smartfilesMod,
smartfilesTest,
};
}
/**
* Convert files to context string
* @param files The files to convert
* @param mode The context mode to use
*/
public async convertFilesToContext(
files: plugins.smartfile.SmartFile[],
mode: ContextMode = this.contextMode
): Promise<string> {
// Reset context result
this.contextResult = {
context: '',
tokenCount: 0,
includedFiles: [],
trimmedFiles: [],
excludedFiles: [],
tokenSavings: 0
};
let totalTokenCount = 0;
let totalOriginalTokens = 0;
// Sort files by importance (for now just a simple alphabetical sort)
// Later this could be enhanced with more sophisticated prioritization
const sortedFiles = [...files].sort((a, b) => a.relative.localeCompare(b.relative));
const processedFiles: string[] = [];
for (const smartfile of sortedFiles) {
// Calculate original token count
const originalContent = smartfile.contents.toString();
const originalTokenCount = this.countTokens(originalContent);
totalOriginalTokens += originalTokenCount;
// Apply trimming based on mode
let processedContent = originalContent;
if (mode !== 'full') {
processedContent = this.trimmer.trimFile(
smartfile.relative,
originalContent,
mode
);
}
// Calculate new token count
const processedTokenCount = this.countTokens(processedContent);
// Check if we have budget for this file
if (totalTokenCount + processedTokenCount > this.tokenBudget) {
// We don't have budget for this file
this.contextResult.excludedFiles.push({
path: smartfile.path,
contents: originalContent,
relativePath: smartfile.relative,
tokenCount: originalTokenCount
});
continue;
}
// Format the file for context
const formattedContent = `
====== START OF FILE ${smartfile.relative} ======
${processedContent}
====== END OF FILE ${smartfile.relative} ======
`;
processedFiles.push(formattedContent);
totalTokenCount += processedTokenCount;
// Track file in appropriate list
const fileInfo: IFileInfo = {
path: smartfile.path,
contents: processedContent,
relativePath: smartfile.relative,
tokenCount: processedTokenCount
};
if (mode === 'full' || processedContent === originalContent) {
this.contextResult.includedFiles.push(fileInfo);
} else {
this.contextResult.trimmedFiles.push(fileInfo);
this.contextResult.tokenSavings += (originalTokenCount - processedTokenCount);
}
}
// Join all processed files
const context = processedFiles.join('\n');
// Update context result
this.contextResult.context = context;
this.contextResult.tokenCount = totalTokenCount;
return context;
}
/**
* Build context for the project
* @param taskType Optional task type for task-specific context
*/
public async buildContext(taskType?: TaskType): Promise<IContextResult> {
// Initialize if needed
if (this.tokenBudget === 0) {
await this.initialize();
}
// Get task-specific configuration if a task type is provided
if (taskType) {
const taskConfig = this.configManager.getTaskConfig(taskType);
if (taskConfig.mode) {
this.setContextMode(taskConfig.mode);
}
}
// Gather files
const taskConfig = taskType ? this.configManager.getTaskConfig(taskType) : undefined;
const files = await this.gatherFiles(
taskConfig?.includePaths,
taskConfig?.excludePaths
);
// Convert files to context
// Create an array of all files to process
const allFiles: plugins.smartfile.SmartFile[] = [];
// Add individual files
if (files.smartfilePackageJSON) allFiles.push(files.smartfilePackageJSON as plugins.smartfile.SmartFile);
if (files.smartfilesReadme) allFiles.push(files.smartfilesReadme as plugins.smartfile.SmartFile);
if (files.smartfilesReadmeHints) allFiles.push(files.smartfilesReadmeHints as plugins.smartfile.SmartFile);
if (files.smartfilesNpmextraJSON) allFiles.push(files.smartfilesNpmextraJSON as plugins.smartfile.SmartFile);
// Add arrays of files
if (files.smartfilesMod) {
if (Array.isArray(files.smartfilesMod)) {
allFiles.push(...files.smartfilesMod);
} else {
allFiles.push(files.smartfilesMod);
}
}
if (files.smartfilesTest) {
if (Array.isArray(files.smartfilesTest)) {
allFiles.push(...files.smartfilesTest);
} else {
allFiles.push(files.smartfilesTest);
}
}
const context = await this.convertFilesToContext(allFiles);
return this.contextResult;
}
/**
* Update the context with git diff information for commit tasks
* @param gitDiff The git diff to include
*/
public updateWithGitDiff(gitDiff: string): IContextResult {
// If we don't have a context yet, return empty result
if (!this.contextResult.context) {
return this.contextResult;
}
// Add git diff to context
const diffSection = `
====== GIT DIFF ======
${gitDiff}
====== END GIT DIFF ======
`;
const diffTokenCount = this.countTokens(diffSection);
// Update context and token count
this.contextResult.context += diffSection;
this.contextResult.tokenCount += diffTokenCount;
return this.contextResult;
}
/**
* Count tokens in a string
* @param text The text to count tokens for
* @param model The model to use for token counting
*/
public countTokens(text: string, model: string = 'gpt-3.5-turbo'): number {
try {
// Use the gpt-tokenizer library to count tokens
const tokens = plugins.gptTokenizer.encode(text);
return tokens.length;
} catch (error) {
console.error('Error counting tokens:', error);
// Provide a rough estimate if tokenization fails
return Math.ceil(text.length / 4);
}
}
/**
* Get the context result
*/
public getContextResult(): IContextResult {
return this.contextResult;
}
/**
* Get the token count for the current context
*/
public getTokenCount(): number {
return this.contextResult.tokenCount;
}
/**
* Get both the context string and its token count
*/
public getContextWithTokenCount(): { context: string; tokenCount: number } {
return {
context: this.contextResult.context,
tokenCount: this.contextResult.tokenCount
};
}
}

32
ts/context/index.ts Normal file
View File

@@ -0,0 +1,32 @@
import { EnhancedContext } from './enhanced-context.js';
import { TaskContextFactory } from './task-context-factory.js';
import { ConfigManager } from './config-manager.js';
import { ContextTrimmer } from './context-trimmer.js';
import type {
ContextMode,
IContextConfig,
IContextResult,
IFileInfo,
ITrimConfig,
ITaskConfig,
TaskType
} from './types.js';
export {
// Classes
EnhancedContext,
TaskContextFactory,
ConfigManager,
ContextTrimmer,
};
// Types
export type {
ContextMode,
IContextConfig,
IContextResult,
IFileInfo,
ITrimConfig,
ITaskConfig,
TaskType
};

View File

@@ -0,0 +1,138 @@
import * as plugins from '../plugins.js';
import { EnhancedContext } from './enhanced-context.js';
import { ConfigManager } from './config-manager.js';
import type { IContextResult, TaskType } from './types.js';
/**
* Factory class for creating task-specific context
*/
export class TaskContextFactory {
private projectDir: string;
private configManager: ConfigManager;
/**
* Create a new TaskContextFactory
* @param projectDirArg The project directory
*/
constructor(projectDirArg: string) {
this.projectDir = projectDirArg;
this.configManager = ConfigManager.getInstance();
}
/**
* Initialize the factory
*/
public async initialize(): Promise<void> {
await this.configManager.initialize(this.projectDir);
}
/**
* Create context for README generation
*/
public async createContextForReadme(): Promise<IContextResult> {
const contextBuilder = new EnhancedContext(this.projectDir);
await contextBuilder.initialize();
// Get README-specific configuration
const taskConfig = this.configManager.getTaskConfig('readme');
if (taskConfig.mode) {
contextBuilder.setContextMode(taskConfig.mode);
}
// Build the context for README task
return await contextBuilder.buildContext('readme');
}
/**
* Create context for description generation
*/
public async createContextForDescription(): Promise<IContextResult> {
const contextBuilder = new EnhancedContext(this.projectDir);
await contextBuilder.initialize();
// Get description-specific configuration
const taskConfig = this.configManager.getTaskConfig('description');
if (taskConfig.mode) {
contextBuilder.setContextMode(taskConfig.mode);
}
// Build the context for description task
return await contextBuilder.buildContext('description');
}
/**
* Create context for commit message generation
* @param gitDiff Optional git diff to include
*/
public async createContextForCommit(gitDiff?: string): Promise<IContextResult> {
const contextBuilder = new EnhancedContext(this.projectDir);
await contextBuilder.initialize();
// Get commit-specific configuration
const taskConfig = this.configManager.getTaskConfig('commit');
if (taskConfig.mode) {
contextBuilder.setContextMode(taskConfig.mode);
}
// Build the context for commit task
const contextResult = await contextBuilder.buildContext('commit');
// If git diff is provided, add it to the context
if (gitDiff) {
contextBuilder.updateWithGitDiff(gitDiff);
}
return contextBuilder.getContextResult();
}
/**
* Create context for any task type
* @param taskType The task type to create context for
* @param additionalContent Optional additional content to include
*/
public async createContextForTask(
taskType: TaskType,
additionalContent?: string
): Promise<IContextResult> {
switch (taskType) {
case 'readme':
return this.createContextForReadme();
case 'description':
return this.createContextForDescription();
case 'commit':
return this.createContextForCommit(additionalContent);
default:
// Generic context for unknown task types
const contextBuilder = new EnhancedContext(this.projectDir);
await contextBuilder.initialize();
return await contextBuilder.buildContext();
}
}
/**
* Get token stats for all task types
*/
public async getTokenStats(): Promise<Record<TaskType, {
tokenCount: number;
savings: number;
includedFiles: number;
trimmedFiles: number;
excludedFiles: number;
}>> {
const taskTypes: TaskType[] = ['readme', 'description', 'commit'];
const stats: Record<TaskType, any> = {} as any;
for (const taskType of taskTypes) {
const result = await this.createContextForTask(taskType);
stats[taskType] = {
tokenCount: result.tokenCount,
savings: result.tokenSavings,
includedFiles: result.includedFiles.length,
trimmedFiles: result.trimmedFiles.length,
excludedFiles: result.excludedFiles.length
};
}
return stats;
}
}

95
ts/context/types.ts Normal file
View File

@@ -0,0 +1,95 @@
/**
* Context processing mode to control how context is built
*/
export type ContextMode = 'full' | 'trimmed' | 'summarized';
/**
* Configuration for context trimming
*/
export interface ITrimConfig {
/** Whether to remove function implementations */
removeImplementations?: boolean;
/** Whether to preserve interface definitions */
preserveInterfaces?: boolean;
/** Whether to preserve type definitions */
preserveTypeDefs?: boolean;
/** Whether to preserve JSDoc comments */
preserveJSDoc?: boolean;
/** Maximum lines to keep for function bodies (if not removing completely) */
maxFunctionLines?: number;
/** Whether to remove normal comments (non-JSDoc) */
removeComments?: boolean;
/** Whether to remove blank lines */
removeBlankLines?: boolean;
}
/**
* Task types that require different context optimization
*/
export type TaskType = 'readme' | 'commit' | 'description';
/**
* Configuration for different tasks
*/
export interface ITaskConfig {
/** The context mode to use for this task */
mode?: ContextMode;
/** File paths to include for this task */
includePaths?: string[];
/** File paths to exclude for this task */
excludePaths?: string[];
/** For commit tasks, whether to focus on changed files */
focusOnChangedFiles?: boolean;
/** For description tasks, whether to include package info */
includePackageInfo?: boolean;
}
/**
* Complete context configuration
*/
export interface IContextConfig {
/** Maximum tokens to use for context */
maxTokens?: number;
/** Default context mode */
defaultMode?: ContextMode;
/** Task-specific settings */
taskSpecificSettings?: {
[key in TaskType]?: ITaskConfig;
};
/** Trimming configuration */
trimming?: ITrimConfig;
}
/**
* Basic file information interface
*/
export interface IFileInfo {
/** The file path */
path: string;
/** The file contents */
contents: string;
/** The file's relative path from the project root */
relativePath: string;
/** The estimated token count of the file */
tokenCount?: number;
/** The file's importance score (higher is more important) */
importanceScore?: number;
}
/**
* Result of context building
*/
export interface IContextResult {
/** The generated context string */
context: string;
/** The total token count of the context */
tokenCount: number;
/** Files included in the context */
includedFiles: IFileInfo[];
/** Files that were trimmed */
trimmedFiles: IFileInfo[];
/** Files that were excluded */
excludedFiles: IFileInfo[];
/** Token savings from trimming */
tokenSavings: number;
}

View File

@@ -1,6 +1,12 @@
import * as early from '@pushrocks/early';
import * as early from '@push.rocks/early';
early.start('tsdoc');
import * as plugins from './tsdoc.plugins';
import * as cli from './tsdoc.cli';
import * as plugins from './plugins.js';
import * as cli from './cli.js';
early.stop();
cli.run();
export const runCli = async () => {
await cli.run();
};
// exports
export * from './classes.aidoc.js';

6
ts/logging.ts Normal file
View File

@@ -0,0 +1,6 @@
import { commitinfo } from './00_commitinfo_data.js';
import * as plugins from './plugins.js';
export const logger = plugins.smartlog.Smartlog.createForCommitinfo(commitinfo);
logger.addLogDestination(new plugins.smartlogDestinationLocal.DestinationLocal());

16
ts/paths.ts Normal file
View File

@@ -0,0 +1,16 @@
import * as plugins from './plugins.js';
// dirs
export const packageDir = plugins.path.join(
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
'../',
);
export const cwd = process.cwd();
export const binDir = plugins.path.join(packageDir, './node_modules/.bin');
export const assetsDir = plugins.path.join(packageDir, './assets');
export const publicDir = plugins.path.join(cwd, './public');
export const tsDir = plugins.path.join(cwd, './ts');
// files
export const tsconfigFile = plugins.path.join(assetsDir, './tsconfig.json');
export const typedocOptionsFile = plugins.path.join(assetsDir, './typedoc.json');

46
ts/plugins.ts Normal file
View File

@@ -0,0 +1,46 @@
// node native
import * as path from 'path';
export { path };
// pushrocks scope
import * as npmextra from '@push.rocks/npmextra';
import * as qenv from '@push.rocks/qenv';
import * as smartai from '@push.rocks/smartai';
import * as smartcli from '@push.rocks/smartcli';
import * as smartdelay from '@push.rocks/smartdelay';
import * as smartfile from '@push.rocks/smartfile';
import * as smartgit from '@push.rocks/smartgit';
import * as smartinteract from '@push.rocks/smartinteract';
import * as smartlog from '@push.rocks/smartlog';
import * as smartlogDestinationLocal from '@push.rocks/smartlog-destination-local';
import * as smartpath from '@push.rocks/smartpath';
import * as smartshell from '@push.rocks/smartshell';
import * as smarttime from '@push.rocks/smarttime';
export {
npmextra,
qenv,
smartai,
smartcli,
smartdelay,
smartfile,
smartgit,
smartinteract,
smartlog,
smartlogDestinationLocal,
smartpath,
smartshell,
smarttime,
};
// @git.zone scope
import * as tspublish from '@git.zone/tspublish';
export { tspublish };
// third party scope
import * as typedoc from 'typedoc';
import * as gptTokenizer from 'gpt-tokenizer';
export { typedoc, gptTokenizer };

View File

@@ -1,67 +0,0 @@
import * as plugins from './tsdoc.plugins';
import * as paths from './tsdoc.paths';
export class MkDocs {
public smartshellInstance = new plugins.smartshell.Smartshell({
executor: 'bash',
pathDirectories: [paths.binDir]
});
public static async isMkDocsDir(dirPathArg: string): Promise<boolean> {
const result = await plugins.smartfile.fs.fileExists(
plugins.path.join(dirPathArg, 'mkdocs.yml')
);
return result;
}
public static async handleCommand(argvArg) {
const mkdocsInstance = new MkDocs(paths.cwd);
switch (true) {
case argvArg.serve:
await mkdocsInstance.serve();
break;
case argvArg.publish:
await mkdocsInstance.publish();
break;
default:
await mkdocsInstance.compile();
break;
}
}
// Instance
public typedocDirectory: string;
constructor(dirPathArg) {
this.typedocDirectory = dirPathArg;
}
public async update() {
await this.smartshellInstance.exec(
`docker pull registry.gitlab.com/hosttoday/ht-docker-mkdocs`
);
}
public async compile() {
await this.update();
await this.smartshellInstance.exec(`rm -rf public/`);
await this.smartshellInstance.exec(
`docker run --rm -p 8000:8000 -v ${
paths.cwd
}:/docs registry.gitlab.com/hosttoday/ht-docker-mkdocs build`
);
}
public async serve() {
await this.update();
await this.smartshellInstance.exec(
`docker run --rm -p 8000:8000 -v ${
paths.cwd
}:/docs registry.gitlab.com/hosttoday/ht-docker-mkdocs`
);
}
public async publish() {
await this.compile();
await this.smartshellInstance.exec(`gitzone commit`);
}
}

View File

@@ -1,29 +0,0 @@
import * as plugins from './tsdoc.plugins';
import * as paths from './tsdoc.paths';
export class TypeDoc {
public smartshellInstance = new plugins.smartshell.Smartshell({
executor: 'bash',
pathDirectories: [paths.binDir]
});
// Static
public static async isTypeDocDir(dirPathArg: string): Promise<boolean> {
const result = await plugins.smartfile.fs.fileExists(
plugins.path.join(dirPathArg, 'mkdocs.yml')
);
return !result;
}
// Instance
public typedocDirectory: string;
constructor(dirPathArg) {
this.typedocDirectory = dirPathArg;
}
public async compile() {
await this.smartshellInstance.exec(
`typedoc --tsconfig ${paths.tsconfigFile} --out public/ ts/`
);
}
}

View File

@@ -1,43 +0,0 @@
import * as plugins from './tsdoc.plugins';
import * as paths from './tsdoc.paths';
import { logger } from './tsdoc.logging';
import { TypeDoc } from './tsdoc.classes.typedoc';
import { MkDocs } from './tsdoc.classes.mkdocs';
export const run = async () => {
const tsdocCli = new plugins.smartcli.Smartcli();
tsdocCli.addCommand('typedoc').subscribe(async argvArg => {
const typeDocInstance = new TypeDoc(paths.cwd);
await typeDocInstance.compile();
});
tsdocCli.addCommand('mkdocs').subscribe(async argvArg => {
await MkDocs.handleCommand(argvArg);
});
tsdocCli.standardTask().subscribe(async argvArg => {
logger.log('warn', `Auto detecting environment!`);
switch (true) {
case await TypeDoc.isTypeDocDir(paths.cwd):
logger.log('ok', `Detected TypeDoc compliant directory at ${paths.cwd}`);
tsdocCli.trigger('typedoc');
break;
case await MkDocs.isMkDocsDir(paths.cwd):
logger.log('ok', `Detected MkDocs compliant directory at ${paths.cwd}`);
tsdocCli.trigger('mkdocs');
break;
default:
logger.log('error', `Cannot determine docs format at ${paths.cwd}`);
}
});
tsdocCli.addCommand('test').subscribe(argvArg => {
tsdocCli.trigger('typedoc');
process.on('exit', async () => {
await plugins.smartfile.fs.remove(paths.publicDir);
});
});
tsdocCli.startParse();
};

View File

@@ -1,15 +0,0 @@
import * as plugins from './tsdoc.plugins';
export const logger = new plugins.smartlog.Smartlog({
logContext: {
company: 'Some Company',
companyunit: 'Some CompanyUnit',
containerName: 'Some Containername',
environment: 'local',
runtime: 'node',
zone: 'gitzone'
},
minimumLogLevel: 'silly'
});
logger.addLogDestination(new plugins.smartlogDestinationLocal.DestinationLocal());

View File

@@ -1,11 +0,0 @@
import * as plugins from './tsdoc.plugins';
// dirs
export const packageDir = plugins.path.join(__dirname, '../');
export const cwd = process.cwd();
export const binDir = plugins.path.join(packageDir, './node_modules/.bin');
export const assetsDir = plugins.path.join(packageDir, './assets');
export const publicDir = plugins.path.join(packageDir, './public');
// files
export const tsconfigFile = plugins.path.join(assetsDir, './tsconfig.json');

View File

@@ -1,18 +0,0 @@
// node native
import * as path from 'path';
export { path };
// pushrocks scope
import * as smartcli from '@pushrocks/smartcli';
import * as smartfile from '@pushrocks/smartfile';
import * as smartlog from '@pushrocks/smartlog';
import * as smartlogDestinationLocal from '@pushrocks/smartlog-destination-local';
import * as smartshell from '@pushrocks/smartshell';
export { smartcli, smartfile, smartlog, smartlogDestinationLocal, smartshell };
// third party scope
import * as typedoc from 'typedoc';
export { typedoc };

14
tsconfig.json Normal file
View File

@@ -0,0 +1,14 @@
{
"compilerOptions": {
"experimentalDecorators": true,
"useDefineForClassFields": false,
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",
"esModuleInterop": true,
"verbatimModuleSyntax": true
},
"exclude": [
"dist_*/**/*.d.ts"
]
}

View File

@@ -1,17 +0,0 @@
{
"extends": ["tslint:latest", "tslint-config-prettier"],
"rules": {
"semicolon": [true, "always"],
"no-console": false,
"ordered-imports": false,
"object-literal-sort-keys": false,
"member-ordering": {
"options":{
"order": [
"static-method"
]
}
}
},
"defaultSeverity": "warning"
}