feat(provider.ollama): add chain-of-thought reasoning support to chat messages and Ollama provider

This commit is contained in:
2026-01-20 02:03:20 +00:00
parent 6c6652d75d
commit 9ad039f77b
4 changed files with 28 additions and 6 deletions

View File

@@ -1,5 +1,12 @@
# Changelog
## 2026-01-20 - 0.13.0 - feat(provider.ollama)
add chain-of-thought reasoning support to chat messages and Ollama provider
- Added optional reasoning?: string to chat message and chat response interfaces to surface chain-of-thought data.
- Propagates reasoning from message history into formatted requests sent to Ollama.
- Maps Ollama response fields (thinking or reasoning) into ChatResponse.reasoning so downstream code can access model reasoning output.
## 2026-01-20 - 0.12.1 - fix(docs)
update documentation: clarify provider capabilities, add provider capabilities summary, polish examples and formatting, and remove Serena project config

View File

@@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@push.rocks/smartai',
version: '0.12.1',
version: '0.13.0',
description: 'SmartAi is a versatile TypeScript library designed to facilitate integration and interaction with various AI models, offering functionalities for chat, audio generation, document processing, and vision tasks.'
}

View File

@@ -8,6 +8,8 @@ export interface ChatMessage {
content: string;
/** Base64-encoded images for vision-capable models */
images?: string[];
/** Chain-of-thought reasoning for GPT-OSS models (e.g., Ollama) */
reasoning?: string;
}
/**
@@ -35,6 +37,8 @@ export interface StreamingChatOptions extends ChatOptions {
export interface ChatResponse {
role: 'assistant';
message: string;
/** Chain-of-thought reasoning from reasoning models */
reasoning?: string;
}
/**

View File

@@ -205,13 +205,16 @@ export class OllamaProvider extends MultiModalModel {
public async chat(optionsArg: ChatOptions): Promise<ChatResponse> {
// Format messages for Ollama
const historyMessages = optionsArg.messageHistory.map((msg) => {
const formatted: { role: string; content: string; images?: string[] } = {
const formatted: { role: string; content: string; images?: string[]; reasoning?: string } = {
role: msg.role,
content: msg.content,
};
if (msg.images && msg.images.length > 0) {
formatted.images = msg.images;
}
if (msg.reasoning) {
formatted.reasoning = msg.reasoning;
}
return formatted;
});
@@ -254,6 +257,7 @@ export class OllamaProvider extends MultiModalModel {
return {
role: 'assistant' as const,
message: result.message.content,
reasoning: result.message.thinking || result.message.reasoning,
};
}
@@ -283,6 +287,7 @@ export class OllamaProvider extends MultiModalModel {
return {
role: 'assistant' as const,
message: response.message,
reasoning: response.thinking,
};
}
@@ -296,15 +301,18 @@ export class OllamaProvider extends MultiModalModel {
const timeout = optionsArg.timeout || this.defaultTimeout;
const modelOptions = { ...this.defaultOptions, ...optionsArg.options };
// Format history messages with optional images
// Format history messages with optional images and reasoning
const historyMessages = optionsArg.messageHistory.map((msg) => {
const formatted: { role: string; content: string; images?: string[] } = {
const formatted: { role: string; content: string; images?: string[]; reasoning?: string } = {
role: msg.role,
content: msg.content,
};
if (msg.images && msg.images.length > 0) {
formatted.images = msg.images;
}
if (msg.reasoning) {
formatted.reasoning = msg.reasoning;
}
return formatted;
});
@@ -410,15 +418,18 @@ export class OllamaProvider extends MultiModalModel {
const timeout = optionsArg.timeout || this.defaultTimeout;
const modelOptions = { ...this.defaultOptions, ...optionsArg.options };
// Format history messages with optional images
// Format history messages with optional images and reasoning
const historyMessages = optionsArg.messageHistory.map((msg) => {
const formatted: { role: string; content: string; images?: string[] } = {
const formatted: { role: string; content: string; images?: string[]; reasoning?: string } = {
role: msg.role,
content: msg.content,
};
if (msg.images && msg.images.length > 0) {
formatted.images = msg.images;
}
if (msg.reasoning) {
formatted.reasoning = msg.reasoning;
}
return formatted;
});