feat(docs): Update project metadata and documentation to reflect comprehensive AI-enhanced features and improved installation and usage instructions

This commit is contained in:
2025-05-14 11:27:38 +00:00
parent 620737566f
commit ab273ea75c
21 changed files with 2305 additions and 258 deletions

View File

@@ -31,15 +31,27 @@ export class Commit {
'pnpm-lock.yaml',
'package-lock.json',
]);
const projectContext = new ProjectContext(this.projectDir);
let contextString = await projectContext.update();
contextString = `
${contextString}
Below is the diff of the uncommitted changes. If nothing is changed, there are no changes:
${diffStringArray[0] ? diffStringArray.join('\n\n') : 'No changes.'}
`;
// Use the new TaskContextFactory for optimized context
const taskContextFactory = new (await import('../context/index.js')).TaskContextFactory(this.projectDir);
await taskContextFactory.initialize();
// Generate context specifically for commit task
const contextResult = await taskContextFactory.createContextForCommit(
diffStringArray[0] ? diffStringArray.join('\n\n') : 'No changes.'
);
// Get the optimized context string
let contextString = contextResult.context;
// Log token usage statistics
console.log(`Token usage - Context: ${contextResult.tokenCount}, Files: ${contextResult.includedFiles.length + contextResult.trimmedFiles.length}, Savings: ${contextResult.tokenSavings}`);
// Check for token overflow against model limits
const MODEL_TOKEN_LIMIT = 200000; // o4-mini
if (contextResult.tokenCount > MODEL_TOKEN_LIMIT * 0.9) {
console.log(`⚠️ Warning: Context size (${contextResult.tokenCount} tokens) is close to or exceeds model limit (${MODEL_TOKEN_LIMIT} tokens).`);
console.log(`The model may not be able to process all information effectively.`);
}
let result = await this.aiDocsRef.openaiInstance.chat({
systemMessage: `