feat(OllamaProvider): add model options, streaming support, and thinking tokens

- Add IOllamaModelOptions interface for runtime options (num_ctx, temperature, etc.)
- Extend IOllamaProviderOptions with defaultOptions and defaultTimeout
- Add IOllamaChatOptions for per-request overrides
- Add IOllamaStreamChunk and IOllamaChatResponse interfaces
- Add chatStreamResponse() for async iteration with options
- Add collectStreamResponse() for streaming with progress callback
- Add chatWithOptions() for non-streaming with full options
- Update chat() to use defaultOptions and defaultTimeout
This commit is contained in:
2026-01-20 00:02:45 +00:00
parent a556053510
commit 126e9b239b
12 changed files with 320 additions and 74 deletions

View File

@@ -18,22 +18,22 @@
"@git.zone/tsbuild": "^4.1.2",
"@git.zone/tsbundle": "^2.8.1",
"@git.zone/tsrun": "^2.0.1",
"@git.zone/tstest": "^3.1.4",
"@git.zone/tstest": "^3.1.6",
"@push.rocks/qenv": "^6.1.3",
"@types/node": "^22.15.17",
"@types/node": "^25.0.9",
"typescript": "^5.9.3"
},
"dependencies": {
"@anthropic-ai/sdk": "^0.71.2",
"@mistralai/mistralai": "^1.11.0",
"@mistralai/mistralai": "^1.12.0",
"@push.rocks/smartarray": "^1.1.0",
"@push.rocks/smartfile": "^11.2.7",
"@push.rocks/smartfs": "^1.3.1",
"@push.rocks/smartpath": "^6.0.0",
"@push.rocks/smartpdf": "^4.1.1",
"@push.rocks/smartpromise": "^4.2.3",
"@push.rocks/smartrequest": "^5.0.1",
"@push.rocks/webstream": "^1.0.10",
"openai": "^5.12.2"
"openai": "^6.16.0"
},
"repository": {
"type": "git",