diff --git a/ts/smartagent.classes.driveragent.ts b/ts/smartagent.classes.driveragent.ts index d418c64..eb554a9 100644 --- a/ts/smartagent.classes.driveragent.ts +++ b/ts/smartagent.classes.driveragent.ts @@ -478,18 +478,25 @@ Your complete output here // Check if provider supports native tool calling (Ollama) const provider = this.provider as any; - if (typeof provider.chatWithOptions !== 'function') { + if (typeof provider.collectStreamResponse !== 'function') { throw new Error('Provider does not support native tool calling. Use startTask() instead.'); } - // Call with tools - const response = await provider.chatWithOptions({ - systemMessage: fullSystemMessage, - userMessage: userMessage, - messageHistory: [], - images: images, - tools: tools.length > 0 ? tools : undefined, - }); + // Use collectStreamResponse for streaming support with tools + const response = await provider.collectStreamResponse( + { + systemMessage: fullSystemMessage, + userMessage: userMessage, + messageHistory: [], + images: images, + tools: tools.length > 0 ? tools : undefined, + }, + // Pass onToken callback through onChunk for streaming + this.onToken ? (chunk: any) => { + if (chunk.thinking && this.onToken) this.onToken(chunk.thinking); + if (chunk.content && this.onToken) this.onToken(chunk.content); + } : undefined + ); // Add assistant response to history const historyMessage: plugins.smartai.ChatMessage = { @@ -555,17 +562,24 @@ Your complete output here // Check if provider supports native tool calling const provider = this.provider as any; - if (typeof provider.chatWithOptions !== 'function') { + if (typeof provider.collectStreamResponse !== 'function') { throw new Error('Provider does not support native tool calling. Use continueWithMessage() instead.'); } - // Call with tools - const response = await provider.chatWithOptions({ - systemMessage: fullSystemMessage, - userMessage: message, - messageHistory: historyForChat, - tools: tools.length > 0 ? tools : undefined, - }); + // Use collectStreamResponse for streaming support with tools + const response = await provider.collectStreamResponse( + { + systemMessage: fullSystemMessage, + userMessage: message, + messageHistory: historyForChat, + tools: tools.length > 0 ? tools : undefined, + }, + // Pass onToken callback through onChunk for streaming + this.onToken ? (chunk: any) => { + if (chunk.thinking && this.onToken) this.onToken(chunk.thinking); + if (chunk.content && this.onToken) this.onToken(chunk.content); + } : undefined + ); // Add assistant response to history this.messageHistory.push({