fix(npmextra): update to new format
This commit is contained in:
21
package.json
21
package.json
@@ -19,29 +19,30 @@
|
|||||||
"buildDocs": "tsdoc"
|
"buildDocs": "tsdoc"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.7.1",
|
"@git.zone/tsbuild": "^3.1.2",
|
||||||
"@git.zone/tsrun": "^1.6.2",
|
"@git.zone/tsrun": "^2.0.1",
|
||||||
"@git.zone/tstest": "^2.7.0",
|
"@git.zone/tstest": "^3.1.3",
|
||||||
"@types/node": "^22.15.17"
|
"@types/node": "^25.0.1"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@git.zone/tspublish": "^1.10.3",
|
"@git.zone/tspublish": "^1.10.3",
|
||||||
"@push.rocks/early": "^4.0.3",
|
"@push.rocks/early": "^4.0.4",
|
||||||
"@push.rocks/npmextra": "^5.3.3",
|
"@push.rocks/npmextra": "^5.3.3",
|
||||||
"@push.rocks/qenv": "^6.1.3",
|
"@push.rocks/qenv": "^6.1.3",
|
||||||
"@push.rocks/smartai": "^0.8.0",
|
"@push.rocks/smartai": "^0.8.0",
|
||||||
"@push.rocks/smartcli": "^4.0.19",
|
"@push.rocks/smartcli": "^4.0.19",
|
||||||
"@push.rocks/smartdelay": "^3.0.5",
|
"@push.rocks/smartdelay": "^3.0.5",
|
||||||
"@push.rocks/smartfile": "^11.2.7",
|
"@push.rocks/smartfile": "^13.1.0",
|
||||||
|
"@push.rocks/smartfs": "^1.2.0",
|
||||||
"@push.rocks/smartgit": "^3.3.1",
|
"@push.rocks/smartgit": "^3.3.1",
|
||||||
"@push.rocks/smartinteract": "^2.0.15",
|
"@push.rocks/smartinteract": "^2.0.16",
|
||||||
"@push.rocks/smartlog": "^3.1.10",
|
"@push.rocks/smartlog": "^3.1.10",
|
||||||
"@push.rocks/smartlog-destination-local": "^9.0.2",
|
"@push.rocks/smartlog-destination-local": "^9.0.2",
|
||||||
"@push.rocks/smartpath": "^6.0.0",
|
"@push.rocks/smartpath": "^6.0.0",
|
||||||
"@push.rocks/smartshell": "^3.3.0",
|
"@push.rocks/smartshell": "^3.3.0",
|
||||||
"@push.rocks/smarttime": "^4.0.6",
|
"@push.rocks/smarttime": "^4.1.1",
|
||||||
"gpt-tokenizer": "^3.2.0",
|
"gpt-tokenizer": "^3.4.0",
|
||||||
"typedoc": "^0.28.14",
|
"typedoc": "^0.28.15",
|
||||||
"typescript": "^5.9.3"
|
"typescript": "^5.9.3"
|
||||||
},
|
},
|
||||||
"files": [
|
"files": [
|
||||||
|
|||||||
3332
pnpm-lock.yaml
generated
3332
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -1,5 +0,0 @@
|
|||||||
onlyBuiltDependencies:
|
|
||||||
- esbuild
|
|
||||||
- mongodb-memory-server
|
|
||||||
- puppeteer
|
|
||||||
- sharp
|
|
||||||
265
readme.md
265
readme.md
@@ -1,15 +1,18 @@
|
|||||||
# @git.zone/tsdoc 🚀
|
# @git.zone/tsdoc 🚀
|
||||||
**AI-Powered Documentation for TypeScript Projects**
|
|
||||||
|
|
||||||
> Stop writing documentation. Let AI understand your code and do it for you.
|
AI-Powered Documentation for TypeScript Projects
|
||||||
|
|
||||||
|
## Issue Reporting and Security
|
||||||
|
|
||||||
|
For reporting bugs, issues, or security vulnerabilities, please visit [community.foss.global/](https://community.foss.global/). This is the central community hub for all issue reporting. Developers who sign and comply with our contribution agreement and go through identification can also get a [code.foss.global/](https://code.foss.global/) account to submit Pull Requests directly.
|
||||||
|
|
||||||
## What is tsdoc?
|
## What is tsdoc?
|
||||||
|
|
||||||
`@git.zone/tsdoc` is a next-generation documentation tool that combines traditional TypeDoc generation with cutting-edge AI to create comprehensive, intelligent documentation for your TypeScript projects. It reads your code, understands it, and writes documentation that actually makes sense.
|
`@git.zone/tsdoc` is a next-generation documentation CLI tool that combines traditional TypeDoc generation with cutting-edge AI to create comprehensive, intelligent documentation for your TypeScript projects. It reads your code, understands it, and writes documentation that actually makes sense.
|
||||||
|
|
||||||
### ✨ Key Features
|
### ✨ Key Features
|
||||||
|
|
||||||
- **🤖 AI-Enhanced Documentation** - Leverages GPT-5 and other models to generate contextual READMEs
|
- **🤖 AI-Enhanced Documentation** - Leverages AI to generate contextual READMEs
|
||||||
- **🧠 Smart Context Building** - Intelligent file prioritization with dependency analysis and caching
|
- **🧠 Smart Context Building** - Intelligent file prioritization with dependency analysis and caching
|
||||||
- **📚 TypeDoc Integration** - Classic API documentation generation when you need it
|
- **📚 TypeDoc Integration** - Classic API documentation generation when you need it
|
||||||
- **💬 Smart Commit Messages** - AI analyzes your changes and suggests meaningful commit messages
|
- **💬 Smart Commit Messages** - AI analyzes your changes and suggests meaningful commit messages
|
||||||
@@ -22,9 +25,6 @@
|
|||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Global installation (recommended)
|
# Global installation (recommended)
|
||||||
npm install -g @git.zone/tsdoc
|
|
||||||
|
|
||||||
# Or with pnpm
|
|
||||||
pnpm add -g @git.zone/tsdoc
|
pnpm add -g @git.zone/tsdoc
|
||||||
|
|
||||||
# Or use with npx
|
# Or use with npx
|
||||||
@@ -66,7 +66,6 @@ tsdoc commit
|
|||||||
| `tsdoc typedoc` | Generate TypeDoc documentation |
|
| `tsdoc typedoc` | Generate TypeDoc documentation |
|
||||||
| `tsdoc commit` | Generate smart commit message |
|
| `tsdoc commit` | Generate smart commit message |
|
||||||
| `tsdoc tokens` | Analyze token usage for AI context |
|
| `tsdoc tokens` | Analyze token usage for AI context |
|
||||||
| `tsdoc context` | Display context information |
|
|
||||||
|
|
||||||
### Token Analysis
|
### Token Analysis
|
||||||
|
|
||||||
@@ -79,91 +78,25 @@ tsdoc tokens
|
|||||||
# Show detailed stats for all task types
|
# Show detailed stats for all task types
|
||||||
tsdoc tokens --all
|
tsdoc tokens --all
|
||||||
|
|
||||||
# Test with trimmed context
|
# Show detailed breakdown with file listing
|
||||||
tsdoc tokens --trim
|
tsdoc tokens --detailed --listFiles
|
||||||
```
|
```
|
||||||
|
|
||||||
## Programmatic Usage
|
### Command Options
|
||||||
|
|
||||||
### Generate Documentation Programmatically
|
#### tsdoc aidoc
|
||||||
|
- `--tokens` / `--showTokens` - Show token count before generating
|
||||||
|
- `--tokensOnly` - Only show token count, don't generate
|
||||||
|
|
||||||
```typescript
|
#### tsdoc typedoc
|
||||||
import { AiDoc } from '@git.zone/tsdoc';
|
- `--publicSubdir <dir>` - Output subdirectory within public folder
|
||||||
|
|
||||||
const generateDocs = async () => {
|
#### tsdoc tokens
|
||||||
const aiDoc = new AiDoc({ OPENAI_TOKEN: 'your-token' });
|
- `--task <type>` - Specify task type: `readme`, `commit`, or `description`
|
||||||
await aiDoc.start();
|
- `--all` - Show stats for all task types
|
||||||
|
- `--detailed` - Show detailed token usage and costs
|
||||||
// Generate README
|
- `--listFiles` - List all files included in context
|
||||||
await aiDoc.buildReadme('./');
|
- `--model <name>` - Show usage for specific model (`gpt4`, `gpt35`)
|
||||||
|
|
||||||
// Update package.json description
|
|
||||||
await aiDoc.buildDescription('./');
|
|
||||||
|
|
||||||
// Get smart commit message
|
|
||||||
const commit = await aiDoc.buildNextCommitObject('./');
|
|
||||||
console.log(commit.recommendedNextVersionMessage);
|
|
||||||
|
|
||||||
// Don't forget to stop when done
|
|
||||||
await aiDoc.stop();
|
|
||||||
};
|
|
||||||
```
|
|
||||||
|
|
||||||
### TypeDoc Generation
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { TypeDoc } from '@git.zone/tsdoc';
|
|
||||||
|
|
||||||
const typeDoc = new TypeDoc(process.cwd());
|
|
||||||
await typeDoc.compile({ publicSubdir: 'docs' });
|
|
||||||
```
|
|
||||||
|
|
||||||
### Smart Context Management
|
|
||||||
|
|
||||||
Control how tsdoc processes your codebase with the new intelligent context system:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { EnhancedContext, ContextAnalyzer, LazyFileLoader, ContextCache } from '@git.zone/tsdoc';
|
|
||||||
|
|
||||||
const context = new EnhancedContext('./');
|
|
||||||
await context.initialize();
|
|
||||||
|
|
||||||
// Set token budget
|
|
||||||
context.setTokenBudget(100000);
|
|
||||||
|
|
||||||
// Choose context mode
|
|
||||||
context.setContextMode('trimmed'); // 'full' | 'trimmed' | 'summarized'
|
|
||||||
|
|
||||||
// Build optimized context with smart prioritization
|
|
||||||
const result = await context.buildContext('readme');
|
|
||||||
console.log(`Tokens used: ${result.tokenCount}`);
|
|
||||||
console.log(`Files included: ${result.includedFiles.length}`);
|
|
||||||
console.log(`Token savings: ${result.tokenSavings}`);
|
|
||||||
```
|
|
||||||
|
|
||||||
### Advanced: Using Individual Context Components
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { LazyFileLoader, ContextAnalyzer, ContextCache } from '@git.zone/tsdoc';
|
|
||||||
|
|
||||||
// Lazy file loading - scan metadata without loading contents
|
|
||||||
const loader = new LazyFileLoader('./');
|
|
||||||
const metadata = await loader.scanFiles(['ts/**/*.ts']);
|
|
||||||
console.log(`Found ${metadata.length} files`);
|
|
||||||
|
|
||||||
// Analyze and prioritize files
|
|
||||||
const analyzer = new ContextAnalyzer('./');
|
|
||||||
const analysis = await analyzer.analyze(metadata, 'readme');
|
|
||||||
|
|
||||||
// Files are sorted by importance with dependency analysis
|
|
||||||
for (const file of analysis.files) {
|
|
||||||
console.log(`${file.path}: score ${file.importanceScore.toFixed(2)}, tier ${file.tier}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Context caching for performance
|
|
||||||
const cache = new ContextCache('./', { enabled: true, ttl: 3600 });
|
|
||||||
await cache.init();
|
|
||||||
```
|
|
||||||
|
|
||||||
## Configuration
|
## Configuration
|
||||||
|
|
||||||
@@ -171,7 +104,8 @@ Configure tsdoc via `npmextra.json`:
|
|||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"tsdoc": {
|
"@git.zone/tsdoc": {
|
||||||
|
"legal": "## License and Legal Information\n\n...",
|
||||||
"context": {
|
"context": {
|
||||||
"maxTokens": 190000,
|
"maxTokens": 190000,
|
||||||
"defaultMode": "trimmed",
|
"defaultMode": "trimmed",
|
||||||
@@ -181,7 +115,6 @@ Configure tsdoc via `npmextra.json`:
|
|||||||
"maxSize": 100
|
"maxSize": 100
|
||||||
},
|
},
|
||||||
"analyzer": {
|
"analyzer": {
|
||||||
"enabled": true,
|
|
||||||
"useAIRefinement": false
|
"useAIRefinement": false
|
||||||
},
|
},
|
||||||
"prioritization": {
|
"prioritization": {
|
||||||
@@ -234,11 +167,6 @@ Configure tsdoc via `npmextra.json`:
|
|||||||
- **maxSize** - Maximum cache size in MB (default: 100)
|
- **maxSize** - Maximum cache size in MB (default: 100)
|
||||||
- **directory** - Cache directory path (default: .nogit/context-cache)
|
- **directory** - Cache directory path (default: .nogit/context-cache)
|
||||||
|
|
||||||
#### Analyzer Configuration
|
|
||||||
- **enabled** - Enable smart file analysis (default: true)
|
|
||||||
- **useAIRefinement** - Use AI for additional context refinement (default: false)
|
|
||||||
- **aiModel** - Model for AI refinement (default: 'haiku')
|
|
||||||
|
|
||||||
## How It Works
|
## How It Works
|
||||||
|
|
||||||
### 🚀 Smart Context Building Pipeline
|
### 🚀 Smart Context Building Pipeline
|
||||||
@@ -270,21 +198,14 @@ The smart context system delivers significant improvements:
|
|||||||
| **Relevance** | Alphabetical sorting | Smart scoring | 🎯 90%+ relevant |
|
| **Relevance** | Alphabetical sorting | Smart scoring | 🎯 90%+ relevant |
|
||||||
| **Cache Hits** | None | 70-80% | 🚀 Major speedup |
|
| **Cache Hits** | None | 70-80% | 🚀 Major speedup |
|
||||||
|
|
||||||
### Traditional Context Optimization
|
|
||||||
|
|
||||||
For projects where the analyzer is disabled, tsdoc still employs:
|
|
||||||
|
|
||||||
- **Intelligent Trimming** - Removes implementation details while preserving signatures
|
|
||||||
- **JSDoc Preservation** - Keeps documentation comments
|
|
||||||
- **Interface Prioritization** - Type definitions always included
|
|
||||||
- **Token Budgeting** - Ensures optimal use of AI context windows
|
|
||||||
|
|
||||||
## Environment Variables
|
## Environment Variables
|
||||||
|
|
||||||
| Variable | Description |
|
| Variable | Description |
|
||||||
|----------|-------------|
|
|----------|-------------|
|
||||||
| `OPENAI_TOKEN` | Your OpenAI API key for AI features (required) |
|
| `OPENAI_TOKEN` | Your OpenAI API key for AI features (required) |
|
||||||
|
|
||||||
|
The token can also be provided interactively on first run - it will be persisted in `~/.npmextra/kv/@git.zone/tsdoc.json`.
|
||||||
|
|
||||||
## Use Cases
|
## Use Cases
|
||||||
|
|
||||||
### 🚀 Continuous Integration
|
### 🚀 Continuous Integration
|
||||||
@@ -336,103 +257,6 @@ tsdoc commit > .git/COMMIT_EDITMSG
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
## Advanced Features
|
|
||||||
|
|
||||||
### Multi-Module Projects
|
|
||||||
|
|
||||||
tsdoc automatically detects and documents multi-module projects:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const aiDoc = new AiDoc();
|
|
||||||
await aiDoc.start();
|
|
||||||
|
|
||||||
// Process main project
|
|
||||||
await aiDoc.buildReadme('./');
|
|
||||||
|
|
||||||
// Process submodules
|
|
||||||
for (const module of ['packages/core', 'packages/cli']) {
|
|
||||||
await aiDoc.buildReadme(module);
|
|
||||||
}
|
|
||||||
|
|
||||||
await aiDoc.stop();
|
|
||||||
```
|
|
||||||
|
|
||||||
### Custom Context Building
|
|
||||||
|
|
||||||
Fine-tune what gets sent to AI with task-specific contexts:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { TaskContextFactory } from '@git.zone/tsdoc';
|
|
||||||
|
|
||||||
const factory = new TaskContextFactory('./');
|
|
||||||
await factory.initialize();
|
|
||||||
|
|
||||||
// Get optimized context for specific tasks
|
|
||||||
const readmeContext = await factory.createContextForReadme();
|
|
||||||
const commitContext = await factory.createContextForCommit();
|
|
||||||
const descContext = await factory.createContextForDescription();
|
|
||||||
```
|
|
||||||
|
|
||||||
### Dependency Graph Analysis
|
|
||||||
|
|
||||||
Understand your codebase structure:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
import { ContextAnalyzer } from '@git.zone/tsdoc';
|
|
||||||
|
|
||||||
const analyzer = new ContextAnalyzer('./');
|
|
||||||
const analysis = await analyzer.analyze(metadata, 'readme');
|
|
||||||
|
|
||||||
// Explore dependency graph
|
|
||||||
for (const [path, deps] of analysis.dependencyGraph) {
|
|
||||||
console.log(`${path}:`);
|
|
||||||
console.log(` Imports: ${deps.imports.length}`);
|
|
||||||
console.log(` Imported by: ${deps.importedBy.length}`);
|
|
||||||
console.log(` Centrality: ${deps.centrality.toFixed(3)}`);
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Performance & Optimization
|
|
||||||
|
|
||||||
### ⚡ Performance Features
|
|
||||||
|
|
||||||
- **Lazy Loading** - Files scanned for metadata before content loading
|
|
||||||
- **Parallel Processing** - Multiple files loaded simultaneously
|
|
||||||
- **Smart Caching** - Results cached with mtime-based invalidation
|
|
||||||
- **Incremental Updates** - Only reprocess changed files
|
|
||||||
- **Streaming** - Minimal memory footprint
|
|
||||||
|
|
||||||
### 💰 Cost Optimization
|
|
||||||
|
|
||||||
The smart context system significantly reduces AI API costs:
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
// Check token usage before and after optimization
|
|
||||||
import { EnhancedContext } from '@git.zone/tsdoc';
|
|
||||||
|
|
||||||
const context = new EnhancedContext('./');
|
|
||||||
await context.initialize();
|
|
||||||
|
|
||||||
// Build with analyzer enabled
|
|
||||||
const result = await context.buildContext('readme');
|
|
||||||
console.log(`Tokens: ${result.tokenCount}`);
|
|
||||||
console.log(`Savings: ${result.tokenSavings} (${(result.tokenSavings/result.tokenCount*100).toFixed(1)}%)`);
|
|
||||||
```
|
|
||||||
|
|
||||||
### 📊 Token Analysis
|
|
||||||
|
|
||||||
Monitor and optimize your token usage:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Analyze current token usage
|
|
||||||
tsdoc tokens
|
|
||||||
|
|
||||||
# Compare modes
|
|
||||||
tsdoc tokens --mode full # No optimization
|
|
||||||
tsdoc tokens --mode trimmed # Standard optimization
|
|
||||||
tsdoc tokens --analyze # With smart prioritization
|
|
||||||
```
|
|
||||||
|
|
||||||
## Requirements
|
## Requirements
|
||||||
|
|
||||||
- **Node.js** >= 18.0.0
|
- **Node.js** >= 18.0.0
|
||||||
@@ -446,21 +270,15 @@ tsdoc tokens --analyze # With smart prioritization
|
|||||||
If you hit token limits, try:
|
If you hit token limits, try:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
# Enable smart analyzer (default)
|
|
||||||
tsdoc aidoc
|
|
||||||
|
|
||||||
# Use aggressive trimming
|
|
||||||
tsdoc aidoc --trim
|
|
||||||
|
|
||||||
# Check token usage details
|
# Check token usage details
|
||||||
tsdoc tokens --all --analyze
|
tsdoc tokens --all --detailed
|
||||||
```
|
```
|
||||||
|
|
||||||
Or configure stricter limits:
|
Or configure stricter limits in `npmextra.json`:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"tsdoc": {
|
"@git.zone/tsdoc": {
|
||||||
"context": {
|
"context": {
|
||||||
"maxTokens": 100000,
|
"maxTokens": 100000,
|
||||||
"tiers": {
|
"tiers": {
|
||||||
@@ -484,19 +302,16 @@ tsdoc aidoc
|
|||||||
|
|
||||||
### Slow Performance
|
### Slow Performance
|
||||||
|
|
||||||
Enable caching and adjust settings:
|
Enable caching and adjust settings in `npmextra.json`:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"tsdoc": {
|
"@git.zone/tsdoc": {
|
||||||
"context": {
|
"context": {
|
||||||
"cache": {
|
"cache": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"ttl": 7200,
|
"ttl": 7200,
|
||||||
"maxSize": 200
|
"maxSize": 200
|
||||||
},
|
|
||||||
"analyzer": {
|
|
||||||
"enabled": true
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -525,9 +340,6 @@ Regenerate documentation with every change. Smart dependency analysis ensures no
|
|||||||
### 🎨 Beautiful Output
|
### 🎨 Beautiful Output
|
||||||
Clean, professional documentation every time. AI understands your code's purpose and explains it clearly.
|
Clean, professional documentation every time. AI understands your code's purpose and explains it clearly.
|
||||||
|
|
||||||
### 🛠️ Developer-Friendly
|
|
||||||
Built by developers, for developers. Sensible defaults, powerful configuration, and extensive programmatic API.
|
|
||||||
|
|
||||||
### 💰 Cost-Effective
|
### 💰 Cost-Effective
|
||||||
Smart context optimization reduces AI API costs by 40-60% without sacrificing quality.
|
Smart context optimization reduces AI API costs by 40-60% without sacrificing quality.
|
||||||
|
|
||||||
@@ -545,6 +357,7 @@ Smart context optimization reduces AI API costs by 40-60% without sacrificing qu
|
|||||||
│ ├── ContextCache # Performance caching
|
│ ├── ContextCache # Performance caching
|
||||||
│ ├── ContextAnalyzer # Intelligent file analysis
|
│ ├── ContextAnalyzer # Intelligent file analysis
|
||||||
│ ├── ContextTrimmer # Adaptive code trimming
|
│ ├── ContextTrimmer # Adaptive code trimming
|
||||||
|
│ ├── DiffProcessor # Git diff optimization
|
||||||
│ ├── ConfigManager # Configuration management
|
│ ├── ConfigManager # Configuration management
|
||||||
│ └── TaskContextFactory # Task-specific contexts
|
│ └── TaskContextFactory # Task-specific contexts
|
||||||
└── CLI # Command-line interface
|
└── CLI # Command-line interface
|
||||||
@@ -567,30 +380,28 @@ ContextTrimmer (tier-based)
|
|||||||
↓
|
↓
|
||||||
Token Budget (enforcement)
|
Token Budget (enforcement)
|
||||||
↓
|
↓
|
||||||
AI Model (GPT-5)
|
AI Model
|
||||||
↓
|
↓
|
||||||
Generated Documentation
|
Generated Documentation
|
||||||
```
|
```
|
||||||
|
|
||||||
## Contributing
|
|
||||||
|
|
||||||
We appreciate your interest! However, we are not accepting external contributions at this time. If you find bugs or have feature requests, please open an issue.
|
|
||||||
|
|
||||||
## License and Legal Information
|
## License and Legal Information
|
||||||
|
|
||||||
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
This repository contains open-source code licensed under the MIT License. A copy of the license can be found in the [LICENSE](./LICENSE) file.
|
||||||
|
|
||||||
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
### Trademarks
|
### Trademarks
|
||||||
|
|
||||||
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
|
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH or third parties, and are not included within the scope of the MIT license granted herein.
|
||||||
|
|
||||||
|
Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines or the guidelines of the respective third-party owners, and any usage must be approved in writing. Third-party trademarks used herein are the property of their respective owners and used only in a descriptive manner, e.g. for an implementation of an API or similar.
|
||||||
|
|
||||||
### Company Information
|
### Company Information
|
||||||
|
|
||||||
Task Venture Capital GmbH
|
Task Venture Capital GmbH
|
||||||
Registered at District court Bremen HRB 35230 HB, Germany
|
Registered at District Court Bremen HRB 35230 HB, Germany
|
||||||
|
|
||||||
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
For any legal inquiries or further information, please contact us via email at hello@task.vc.
|
||||||
|
|
||||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||||
|
|||||||
@@ -175,8 +175,8 @@ Never mention CLAUDE code, or codex.
|
|||||||
|
|
||||||
const previousChangelogPath = plugins.path.join(this.projectDir, 'changelog.md');
|
const previousChangelogPath = plugins.path.join(this.projectDir, 'changelog.md');
|
||||||
let previousChangelog: plugins.smartfile.SmartFile;
|
let previousChangelog: plugins.smartfile.SmartFile;
|
||||||
if (await plugins.smartfile.fs.fileExists(previousChangelogPath)) {
|
if (await plugins.fsInstance.file(previousChangelogPath).exists()) {
|
||||||
previousChangelog = await plugins.smartfile.SmartFile.fromFilePath(previousChangelogPath);
|
previousChangelog = await plugins.smartfileFactory.fromFilePath(previousChangelogPath);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!previousChangelog) {
|
if (!previousChangelog) {
|
||||||
@@ -207,7 +207,7 @@ ${JSON.stringify(commitMessages, null, 2)}
|
|||||||
`,
|
`,
|
||||||
});
|
});
|
||||||
|
|
||||||
previousChangelog = await plugins.smartfile.SmartFile.fromString(
|
previousChangelog = plugins.smartfileFactory.fromString(
|
||||||
previousChangelogPath,
|
previousChangelogPath,
|
||||||
result2.message.replaceAll('```markdown', '').replaceAll('```', ''),
|
result2.message.replaceAll('```markdown', '').replaceAll('```', ''),
|
||||||
'utf8'
|
'utf8'
|
||||||
|
|||||||
@@ -65,8 +65,8 @@ Don't wrap the JSON in three ticks json!!!
|
|||||||
const npmextraJson = files.smartfilesNpmextraJSON;
|
const npmextraJson = files.smartfilesNpmextraJSON;
|
||||||
const npmextraJsonContent = JSON.parse(npmextraJson.contents.toString());
|
const npmextraJsonContent = JSON.parse(npmextraJson.contents.toString());
|
||||||
|
|
||||||
npmextraJsonContent.gitzone.module.description = resultObject.description;
|
npmextraJsonContent['@git.zone/cli'].module.description = resultObject.description;
|
||||||
npmextraJsonContent.gitzone.module.keywords = resultObject.keywords;
|
npmextraJsonContent['@git.zone/cli'].module.keywords = resultObject.keywords;
|
||||||
|
|
||||||
npmextraJson.contents = Buffer.from(JSON.stringify(npmextraJsonContent, null, 2));
|
npmextraJson.contents = Buffer.from(JSON.stringify(npmextraJsonContent, null, 2));
|
||||||
await npmextraJson.write();
|
await npmextraJson.write();
|
||||||
|
|||||||
@@ -13,31 +13,29 @@ export class ProjectContext {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public async gatherFiles() {
|
public async gatherFiles() {
|
||||||
const smartfilePackageJSON = await plugins.smartfile.SmartFile.fromFilePath(
|
const smartfilePackageJSON = await plugins.smartfileFactory.fromFilePath(
|
||||||
plugins.path.join(this.projectDir, 'package.json'),
|
plugins.path.join(this.projectDir, 'package.json'),
|
||||||
this.projectDir,
|
this.projectDir,
|
||||||
);
|
);
|
||||||
const smartfilesReadme = await plugins.smartfile.SmartFile.fromFilePath(
|
const smartfilesReadme = await plugins.smartfileFactory.fromFilePath(
|
||||||
plugins.path.join(this.projectDir, 'readme.md'),
|
plugins.path.join(this.projectDir, 'readme.md'),
|
||||||
this.projectDir,
|
this.projectDir,
|
||||||
);
|
);
|
||||||
|
|
||||||
const smartfilesReadmeHints = await plugins.smartfile.SmartFile.fromFilePath(
|
const smartfilesReadmeHints = await plugins.smartfileFactory.fromFilePath(
|
||||||
plugins.path.join(this.projectDir, 'readme.hints.md'),
|
plugins.path.join(this.projectDir, 'readme.hints.md'),
|
||||||
this.projectDir,
|
this.projectDir,
|
||||||
);
|
);
|
||||||
const smartfilesNpmextraJSON = await plugins.smartfile.SmartFile.fromFilePath(
|
const smartfilesNpmextraJSON = await plugins.smartfileFactory.fromFilePath(
|
||||||
plugins.path.join(this.projectDir, 'npmextra.json'),
|
plugins.path.join(this.projectDir, 'npmextra.json'),
|
||||||
this.projectDir,
|
this.projectDir,
|
||||||
);
|
);
|
||||||
const smartfilesMod = await plugins.smartfile.fs.fileTreeToObject(
|
const smartfilesMod = await plugins.smartfileFactory.virtualDirectoryFromPath(
|
||||||
this.projectDir,
|
this.projectDir,
|
||||||
'ts*/**/*.ts',
|
).then(vd => vd.filter(f => f.relative.startsWith('ts') && f.relative.endsWith('.ts')).listFiles());
|
||||||
);
|
const smartfilesTest = await plugins.smartfileFactory.virtualDirectoryFromPath(
|
||||||
const smartfilesTest = await plugins.smartfile.fs.fileTreeToObject(
|
|
||||||
this.projectDir,
|
this.projectDir,
|
||||||
'test/**/*.ts',
|
).then(vd => vd.filter(f => f.relative.startsWith('test/') && f.relative.endsWith('.ts')).listFiles());
|
||||||
);
|
|
||||||
return {
|
return {
|
||||||
smartfilePackageJSON,
|
smartfilePackageJSON,
|
||||||
smartfilesReadme,
|
smartfilesReadme,
|
||||||
|
|||||||
@@ -36,7 +36,7 @@ export class Readme {
|
|||||||
const npmExtraJson = JSON.parse(
|
const npmExtraJson = JSON.parse(
|
||||||
(await projectContext.gatherFiles()).smartfilesNpmextraJSON.contents.toString()
|
(await projectContext.gatherFiles()).smartfilesNpmextraJSON.contents.toString()
|
||||||
);
|
);
|
||||||
const legalInfo = npmExtraJson?.tsdoc?.legal;
|
const legalInfo = npmExtraJson?.['@git.zone/tsdoc']?.legal;
|
||||||
if (!legalInfo) {
|
if (!legalInfo) {
|
||||||
const error = new Error(`No legal information found in npmextra.json`);
|
const error = new Error(`No legal information found in npmextra.json`);
|
||||||
console.log(error);
|
console.log(error);
|
||||||
@@ -105,7 +105,7 @@ The Readme should follow the following template:
|
|||||||
|
|
||||||
IMPORTANT: YOU ARE NOW CREATING THE README FOR THE FOLLOWING SUB MODULE: ${subModule} !!!!!!!!!!!
|
IMPORTANT: YOU ARE NOW CREATING THE README FOR THE FOLLOWING SUB MODULE: ${subModule} !!!!!!!!!!!
|
||||||
The Sub Module will be published with the following data:
|
The Sub Module will be published with the following data:
|
||||||
${JSON.stringify(plugins.smartfile.fs.toStringSync(plugins.path.join(paths.cwd, subModule, 'tspublish.json')), null, 2)}
|
${JSON.stringify(await plugins.fsInstance.file(plugins.path.join(paths.cwd, subModule, 'tspublish.json')).encoding('utf8').read(), null, 2)}
|
||||||
|
|
||||||
|
|
||||||
The Readme should follow the following template:
|
The Readme should follow the following template:
|
||||||
@@ -147,7 +147,7 @@ The Readme should follow the following template:
|
|||||||
});
|
});
|
||||||
|
|
||||||
const subModuleReadmeString = result.message + '\n' + legalInfo;
|
const subModuleReadmeString = result.message + '\n' + legalInfo;
|
||||||
await plugins.smartfile.memory.toFs(subModuleReadmeString, plugins.path.join(paths.cwd, subModule, 'readme.md'));
|
await plugins.fsInstance.file(plugins.path.join(paths.cwd, subModule, 'readme.md')).encoding('utf8').write(subModuleReadmeString);
|
||||||
logger.log('success', `Built readme for ${subModule}`);
|
logger.log('success', `Built readme for ${subModule}`);
|
||||||
}
|
}
|
||||||
return result.message;
|
return result.message;
|
||||||
|
|||||||
@@ -36,9 +36,25 @@ export class AiDoc {
|
|||||||
this.aidocInteract = new plugins.smartinteract.SmartInteract();
|
this.aidocInteract = new plugins.smartinteract.SmartInteract();
|
||||||
this.qenvInstance = new plugins.qenv.Qenv();
|
this.qenvInstance = new plugins.qenv.Qenv();
|
||||||
if (!(await this.qenvInstance.getEnvVarOnDemand('OPENAI_TOKEN'))) {
|
if (!(await this.qenvInstance.getEnvVarOnDemand('OPENAI_TOKEN'))) {
|
||||||
|
// Migrate old KV store path to new path if needed
|
||||||
|
const homeDir = plugins.smartpath.get.home();
|
||||||
|
const oldKvPath = plugins.path.join(homeDir, '.npmextra/kv/tsdoc.json');
|
||||||
|
const newKvDir = plugins.path.join(homeDir, '.npmextra/kv/@git.zone');
|
||||||
|
const newKvPath = plugins.path.join(newKvDir, 'tsdoc.json');
|
||||||
|
if (
|
||||||
|
await plugins.fsInstance.file(oldKvPath).exists() &&
|
||||||
|
!(await plugins.fsInstance.file(newKvPath).exists())
|
||||||
|
) {
|
||||||
|
console.log('Migrating tsdoc KeyValueStore to @git.zone/tsdoc...');
|
||||||
|
await plugins.fsInstance.directory(newKvDir).recursive().create();
|
||||||
|
await plugins.fsInstance.file(oldKvPath).copy(newKvPath);
|
||||||
|
await plugins.fsInstance.file(oldKvPath).delete();
|
||||||
|
console.log('Migration complete: tsdoc.json -> @git.zone/tsdoc.json');
|
||||||
|
}
|
||||||
|
|
||||||
this.npmextraKV = new plugins.npmextra.KeyValueStore({
|
this.npmextraKV = new plugins.npmextra.KeyValueStore({
|
||||||
typeArg: 'userHomeDir',
|
typeArg: 'userHomeDir',
|
||||||
identityArg: 'tsdoc',
|
identityArg: '@git.zone/tsdoc',
|
||||||
mandatoryKeys: ['OPENAI_TOKEN'],
|
mandatoryKeys: ['OPENAI_TOKEN'],
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -33,19 +33,19 @@ export class TypeDoc {
|
|||||||
include: [],
|
include: [],
|
||||||
};
|
};
|
||||||
let startDirectory = '';
|
let startDirectory = '';
|
||||||
if (plugins.smartfile.fs.isDirectory(plugins.path.join(paths.cwd, './ts'))) {
|
if (await plugins.fsInstance.directory(plugins.path.join(paths.cwd, './ts')).exists()) {
|
||||||
data.include.push(plugins.path.join(paths.cwd, './ts/**/*'));
|
data.include.push(plugins.path.join(paths.cwd, './ts/**/*'));
|
||||||
startDirectory = 'ts';
|
startDirectory = 'ts';
|
||||||
}
|
}
|
||||||
|
|
||||||
if (plugins.smartfile.fs.isDirectory(plugins.path.join(paths.cwd, './ts_web'))) {
|
if (await plugins.fsInstance.directory(plugins.path.join(paths.cwd, './ts_web')).exists()) {
|
||||||
data.include.push(plugins.path.join(paths.cwd, './ts_web/**/*'));
|
data.include.push(plugins.path.join(paths.cwd, './ts_web/**/*'));
|
||||||
if (!startDirectory) {
|
if (!startDirectory) {
|
||||||
startDirectory = 'ts_web';
|
startDirectory = 'ts_web';
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
await plugins.smartfile.memory.toFs(JSON.stringify(data), paths.tsconfigFile);
|
await plugins.fsInstance.file(paths.tsconfigFile).encoding('utf8').write(JSON.stringify(data));
|
||||||
let targetDir = paths.publicDir;
|
let targetDir = paths.publicDir;
|
||||||
if (options?.publicSubdir) {
|
if (options?.publicSubdir) {
|
||||||
targetDir = plugins.path.join(targetDir, options.publicSubdir);
|
targetDir = plugins.path.join(targetDir, options.publicSubdir);
|
||||||
@@ -53,6 +53,6 @@ export class TypeDoc {
|
|||||||
await this.smartshellInstance.exec(
|
await this.smartshellInstance.exec(
|
||||||
`typedoc --tsconfig ${paths.tsconfigFile} --out ${targetDir} ${startDirectory}/index.ts`,
|
`typedoc --tsconfig ${paths.tsconfigFile} --out ${targetDir} ${startDirectory}/index.ts`,
|
||||||
);
|
);
|
||||||
plugins.smartfile.fs.remove(paths.tsconfigFile);
|
await plugins.fsInstance.file(paths.tsconfigFile).delete();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -152,7 +152,7 @@ export const run = async () => {
|
|||||||
tsdocCli.addCommand('test').subscribe((argvArg) => {
|
tsdocCli.addCommand('test').subscribe((argvArg) => {
|
||||||
tsdocCli.triggerCommand('typedoc', argvArg);
|
tsdocCli.triggerCommand('typedoc', argvArg);
|
||||||
process.on('exit', async () => {
|
process.on('exit', async () => {
|
||||||
await plugins.smartfile.fs.remove(paths.publicDir);
|
await plugins.fsInstance.directory(paths.publicDir).recursive().delete();
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -122,7 +122,7 @@ export class ConfigManager {
|
|||||||
const npmextraJsonPath = plugins.path.join(this.projectDir, 'npmextra.json');
|
const npmextraJsonPath = plugins.path.join(this.projectDir, 'npmextra.json');
|
||||||
|
|
||||||
// Check if file exists
|
// Check if file exists
|
||||||
const fileExists = await plugins.smartfile.fs.fileExists(npmextraJsonPath);
|
const fileExists = await plugins.fsInstance.file(npmextraJsonPath).exists();
|
||||||
if (!fileExists) {
|
if (!fileExists) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@@ -138,13 +138,13 @@ export class ConfigManager {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Read the npmextra.json file
|
// Read the npmextra.json file
|
||||||
const npmextraJsonFile = await plugins.smartfile.SmartFile.fromFilePath(npmextraJsonPath);
|
const npmextraJsonFile = await plugins.smartfileFactory.fromFilePath(npmextraJsonPath);
|
||||||
const npmextraContent = JSON.parse(npmextraJsonFile.contents.toString());
|
const npmextraContent = JSON.parse(npmextraJsonFile.contents.toString());
|
||||||
|
|
||||||
// Check for tsdoc context configuration
|
// Check for tsdoc context configuration
|
||||||
if (npmextraContent?.tsdoc?.context) {
|
if (npmextraContent?.['@git.zone/tsdoc']?.context) {
|
||||||
// Merge with default config
|
// Merge with default config
|
||||||
this.config = this.mergeConfigs(this.config, npmextraContent.tsdoc.context);
|
this.config = this.mergeConfigs(this.config, npmextraContent['@git.zone/tsdoc'].context);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Cache the config
|
// Cache the config
|
||||||
@@ -292,8 +292,8 @@ export class ConfigManager {
|
|||||||
const npmextraJsonPath = plugins.path.join(this.projectDir, 'npmextra.json');
|
const npmextraJsonPath = plugins.path.join(this.projectDir, 'npmextra.json');
|
||||||
let npmextraContent = {};
|
let npmextraContent = {};
|
||||||
|
|
||||||
if (await plugins.smartfile.fs.fileExists(npmextraJsonPath)) {
|
if (await plugins.fsInstance.file(npmextraJsonPath).exists()) {
|
||||||
const npmextraJsonFile = await plugins.smartfile.SmartFile.fromFilePath(npmextraJsonPath);
|
const npmextraJsonFile = await plugins.smartfileFactory.fromFilePath(npmextraJsonPath);
|
||||||
npmextraContent = JSON.parse(npmextraJsonFile.contents.toString()) || {};
|
npmextraContent = JSON.parse(npmextraJsonFile.contents.toString()) || {};
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -304,7 +304,7 @@ export class ConfigManager {
|
|||||||
|
|
||||||
// Write back to npmextra.json
|
// Write back to npmextra.json
|
||||||
const updatedContent = JSON.stringify(npmextraContent, null, 2);
|
const updatedContent = JSON.stringify(npmextraContent, null, 2);
|
||||||
await plugins.smartfile.memory.toFs(updatedContent, npmextraJsonPath);
|
await plugins.fsInstance.file(npmextraJsonPath).encoding('utf8').write(updatedContent);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Error updating context configuration:', error);
|
console.error('Error updating context configuration:', error);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -116,7 +116,7 @@ export class ContextAnalyzer {
|
|||||||
// Parse imports from each file
|
// Parse imports from each file
|
||||||
for (const meta of metadata) {
|
for (const meta of metadata) {
|
||||||
try {
|
try {
|
||||||
const contents = await plugins.smartfile.fs.toStringSync(meta.path);
|
const contents = await plugins.fsInstance.file(meta.path).encoding('utf8').read() as string;
|
||||||
const imports = this.extractImports(contents, meta.path);
|
const imports = this.extractImports(contents, meta.path);
|
||||||
|
|
||||||
const deps = graph.get(meta.path)!;
|
const deps = graph.get(meta.path)!;
|
||||||
|
|||||||
@@ -39,13 +39,13 @@ export class ContextCache {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Ensure cache directory exists
|
// Ensure cache directory exists
|
||||||
await plugins.smartfile.fs.ensureDir(this.cacheDir);
|
await plugins.fsInstance.directory(this.cacheDir).recursive().create();
|
||||||
|
|
||||||
// Load cache index if it exists
|
// Load cache index if it exists
|
||||||
try {
|
try {
|
||||||
const indexExists = await plugins.smartfile.fs.fileExists(this.cacheIndexPath);
|
const indexExists = await plugins.fsInstance.file(this.cacheIndexPath).exists();
|
||||||
if (indexExists) {
|
if (indexExists) {
|
||||||
const indexContent = await plugins.smartfile.fs.toStringSync(this.cacheIndexPath);
|
const indexContent = await plugins.fsInstance.file(this.cacheIndexPath).encoding('utf8').read() as string;
|
||||||
const indexData = JSON.parse(indexContent) as ICacheEntry[];
|
const indexData = JSON.parse(indexContent) as ICacheEntry[];
|
||||||
if (Array.isArray(indexData)) {
|
if (Array.isArray(indexData)) {
|
||||||
for (const entry of indexData) {
|
for (const entry of indexData) {
|
||||||
@@ -278,7 +278,7 @@ export class ContextCache {
|
|||||||
try {
|
try {
|
||||||
const entries = Array.from(this.cache.values());
|
const entries = Array.from(this.cache.values());
|
||||||
const content = JSON.stringify(entries, null, 2);
|
const content = JSON.stringify(entries, null, 2);
|
||||||
await plugins.smartfile.memory.toFs(content, this.cacheIndexPath);
|
await plugins.fsInstance.file(this.cacheIndexPath).encoding('utf8').write(content);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.warn('Failed to persist cache index:', error.message);
|
console.warn('Failed to persist cache index:', error.message);
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -120,7 +120,7 @@ export class EnhancedContext {
|
|||||||
originalTokenCount = cached.tokenCount;
|
originalTokenCount = cached.tokenCount;
|
||||||
} else {
|
} else {
|
||||||
// Load file
|
// Load file
|
||||||
const fileData = await plugins.smartfile.fs.toStringSync(fileAnalysis.path);
|
const fileData = await plugins.fsInstance.file(fileAnalysis.path).encoding('utf8').read() as string;
|
||||||
contents = fileData;
|
contents = fileData;
|
||||||
originalTokenCount = this.countTokens(contents);
|
originalTokenCount = this.countTokens(contents);
|
||||||
|
|
||||||
|
|||||||
@@ -463,7 +463,7 @@ Do not wrap the JSON in markdown code blocks or add any other text.`,
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Load from disk
|
// Load from disk
|
||||||
const contents = await plugins.smartfile.fs.toStringSync(filePath);
|
const contents = await plugins.fsInstance.file(filePath).encoding('utf8').read() as string;
|
||||||
const tokenCount = this.countTokens(contents);
|
const tokenCount = this.countTokens(contents);
|
||||||
const relativePath = plugins.path.relative(this.projectRoot, filePath);
|
const relativePath = plugins.path.relative(this.projectRoot, filePath);
|
||||||
|
|
||||||
|
|||||||
@@ -31,16 +31,32 @@ export class LazyFileLoader {
|
|||||||
|
|
||||||
for (const globPattern of globs) {
|
for (const globPattern of globs) {
|
||||||
try {
|
try {
|
||||||
const smartFiles = await plugins.smartfile.fs.fileTreeToObject(this.projectRoot, globPattern);
|
const virtualDir = await plugins.smartfileFactory.virtualDirectoryFromPath(this.projectRoot);
|
||||||
const fileArray = Array.isArray(smartFiles) ? smartFiles : [smartFiles];
|
// Filter files based on glob pattern using simple pattern matching
|
||||||
|
const smartFiles = virtualDir.filter(file => {
|
||||||
|
// Simple glob matching
|
||||||
|
const relativePath = file.relative;
|
||||||
|
if (globPattern.includes('**')) {
|
||||||
|
// Handle ** patterns - match any path
|
||||||
|
const pattern = globPattern.replace(/\*\*/g, '.*').replace(/\*/g, '[^/]*');
|
||||||
|
return new RegExp(`^${pattern}$`).test(relativePath);
|
||||||
|
} else if (globPattern.includes('*')) {
|
||||||
|
// Handle single * patterns
|
||||||
|
const pattern = globPattern.replace(/\*/g, '[^/]*');
|
||||||
|
return new RegExp(`^${pattern}$`).test(relativePath);
|
||||||
|
} else {
|
||||||
|
// Exact match
|
||||||
|
return relativePath === globPattern;
|
||||||
|
}
|
||||||
|
}).listFiles();
|
||||||
|
|
||||||
for (const smartFile of fileArray) {
|
for (const smartFile of smartFiles) {
|
||||||
try {
|
try {
|
||||||
const meta = await this.getMetadata(smartFile.path);
|
const meta = await this.getMetadata(smartFile.absolutePath);
|
||||||
metadata.push(meta);
|
metadata.push(meta);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
// Skip files that can't be read
|
// Skip files that can't be read
|
||||||
console.warn(`Failed to get metadata for ${smartFile.path}:`, error.message);
|
console.warn(`Failed to get metadata for ${smartFile.absolutePath}:`, error.message);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -104,7 +120,7 @@ export class LazyFileLoader {
|
|||||||
// Load files in parallel
|
// Load files in parallel
|
||||||
const loadPromises = metadata.map(async (meta) => {
|
const loadPromises = metadata.map(async (meta) => {
|
||||||
try {
|
try {
|
||||||
const contents = await plugins.smartfile.fs.toStringSync(meta.path);
|
const contents = await plugins.fsInstance.file(meta.path).encoding('utf8').read() as string;
|
||||||
const tokenCount = tokenizer(contents);
|
const tokenCount = tokenizer(contents);
|
||||||
|
|
||||||
const fileInfo: IFileInfo = {
|
const fileInfo: IFileInfo = {
|
||||||
@@ -138,7 +154,7 @@ export class LazyFileLoader {
|
|||||||
tokenizer: (content: string) => number
|
tokenizer: (content: string) => number
|
||||||
): Promise<IFileInfo> {
|
): Promise<IFileInfo> {
|
||||||
const meta = await this.getMetadata(filePath);
|
const meta = await this.getMetadata(filePath);
|
||||||
const contents = await plugins.smartfile.fs.toStringSync(filePath);
|
const contents = await plugins.fsInstance.file(filePath).encoding('utf8').read() as string;
|
||||||
const tokenCount = tokenizer(contents);
|
const tokenCount = tokenizer(contents);
|
||||||
const relativePath = plugins.path.relative(this.projectRoot, filePath);
|
const relativePath = plugins.path.relative(this.projectRoot, filePath);
|
||||||
|
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ import * as smartai from '@push.rocks/smartai';
|
|||||||
import * as smartcli from '@push.rocks/smartcli';
|
import * as smartcli from '@push.rocks/smartcli';
|
||||||
import * as smartdelay from '@push.rocks/smartdelay';
|
import * as smartdelay from '@push.rocks/smartdelay';
|
||||||
import * as smartfile from '@push.rocks/smartfile';
|
import * as smartfile from '@push.rocks/smartfile';
|
||||||
|
import * as smartfs from '@push.rocks/smartfs';
|
||||||
import * as smartgit from '@push.rocks/smartgit';
|
import * as smartgit from '@push.rocks/smartgit';
|
||||||
import * as smartinteract from '@push.rocks/smartinteract';
|
import * as smartinteract from '@push.rocks/smartinteract';
|
||||||
import * as smartlog from '@push.rocks/smartlog';
|
import * as smartlog from '@push.rocks/smartlog';
|
||||||
@@ -25,6 +26,7 @@ export {
|
|||||||
smartcli,
|
smartcli,
|
||||||
smartdelay,
|
smartdelay,
|
||||||
smartfile,
|
smartfile,
|
||||||
|
smartfs,
|
||||||
smartgit,
|
smartgit,
|
||||||
smartinteract,
|
smartinteract,
|
||||||
smartlog,
|
smartlog,
|
||||||
@@ -34,6 +36,13 @@ export {
|
|||||||
smarttime,
|
smarttime,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Create a shared SmartFs instance for filesystem operations
|
||||||
|
const smartFsNodeProvider = new smartfs.SmartFsProviderNode();
|
||||||
|
export const fsInstance = new smartfs.SmartFs(smartFsNodeProvider);
|
||||||
|
|
||||||
|
// Create a shared SmartFileFactory for in-memory file operations
|
||||||
|
export const smartfileFactory = smartfile.SmartFileFactory.nodeFs();
|
||||||
|
|
||||||
// @git.zone scope
|
// @git.zone scope
|
||||||
import * as tspublish from '@git.zone/tspublish';
|
import * as tspublish from '@git.zone/tspublish';
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user