feat(OllamaProvider): add model options, streaming support, and thinking tokens
- Add IOllamaModelOptions interface for runtime options (num_ctx, temperature, etc.) - Extend IOllamaProviderOptions with defaultOptions and defaultTimeout - Add IOllamaChatOptions for per-request overrides - Add IOllamaStreamChunk and IOllamaChatResponse interfaces - Add chatStreamResponse() for async iteration with options - Add collectStreamResponse() for streaming with progress callback - Add chatWithOptions() for non-streaming with full options - Update chat() to use defaultOptions and defaultTimeout
This commit is contained in:
@@ -1,8 +1,9 @@
|
||||
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import { SmartFs, SmartFsProviderNode } from '@push.rocks/smartfs';
|
||||
|
||||
const testQenv = new qenv.Qenv('./', './.nogit/');
|
||||
const smartfs = new SmartFs(new SmartFsProviderNode());
|
||||
|
||||
import * as smartai from '../ts/index.js';
|
||||
|
||||
@@ -21,7 +22,7 @@ tap.test('Anthropic Vision: should analyze coffee image with latte art', async (
|
||||
const imagePath = './test/testimages/coffee-dani/coffee.jpg';
|
||||
console.log(`Loading coffee image from: ${imagePath}`);
|
||||
|
||||
const imageBuffer = await smartfile.fs.toBuffer(imagePath);
|
||||
const imageBuffer = await smartfs.file(imagePath).read();
|
||||
console.log(`Image loaded, size: ${imageBuffer.length} bytes`);
|
||||
|
||||
const result = await anthropicProvider.vision({
|
||||
@@ -45,7 +46,7 @@ tap.test('Anthropic Vision: should analyze laptop/workspace image', async () =>
|
||||
const imagePath = './test/testimages/laptop-nicolas/laptop.jpg';
|
||||
console.log(`Loading laptop image from: ${imagePath}`);
|
||||
|
||||
const imageBuffer = await smartfile.fs.toBuffer(imagePath);
|
||||
const imageBuffer = await smartfs.file(imagePath).read();
|
||||
console.log(`Image loaded, size: ${imageBuffer.length} bytes`);
|
||||
|
||||
const result = await anthropicProvider.vision({
|
||||
@@ -69,7 +70,7 @@ tap.test('Anthropic Vision: should analyze receipt/document image', async () =>
|
||||
const imagePath = './test/testimages/receipt-annie/receipt.jpg';
|
||||
console.log(`Loading receipt image from: ${imagePath}`);
|
||||
|
||||
const imageBuffer = await smartfile.fs.toBuffer(imagePath);
|
||||
const imageBuffer = await smartfs.file(imagePath).read();
|
||||
console.log(`Image loaded, size: ${imageBuffer.length} bytes`);
|
||||
|
||||
const result = await anthropicProvider.vision({
|
||||
|
||||
Reference in New Issue
Block a user