feat(native-tools): add native tool calling support for Ollama models

- Add INativeToolCall interface for native tool call format
- Add useNativeToolCalling option to IDualAgentOptions
- Add getToolsAsJsonSchema() to convert tools to Ollama JSON Schema format
- Add parseNativeToolCalls() to convert native tool calls to proposals
- Add startTaskWithNativeTools() and continueWithNativeTools() to DriverAgent
- Update DualAgentOrchestrator to support both XML parsing and native tool calling modes

Native tool calling is more efficient for models like GPT-OSS that use Harmony format,
as it activates Ollama's built-in tool parser instead of requiring XML generation.
This commit is contained in:
2026-01-20 02:44:54 +00:00
parent 472a8ed7f8
commit 4310c8086b
5 changed files with 341 additions and 23 deletions

View File

@@ -442,4 +442,238 @@ Your complete output here
public reset(): void {
this.messageHistory = [];
}
// ================================
// Native Tool Calling Support
// ================================
/**
* Start a task with native tool calling support
* Uses Ollama's native tool calling API instead of XML parsing
* @param task The task description
* @param images Optional base64-encoded images for vision tasks
* @returns Response with content, reasoning, and any tool calls
*/
public async startTaskWithNativeTools(
task: string,
images?: string[]
): Promise<{ message: interfaces.IAgentMessage; toolCalls?: interfaces.INativeToolCall[] }> {
// Reset message history
this.messageHistory = [];
// Build simple user message (no XML instructions needed for native tool calling)
const userMessage = `TASK: ${task}\n\nComplete this task using the available tools. When done, provide your final output.`;
// Add to history
this.messageHistory.push({
role: 'user',
content: userMessage,
});
// Build system message for native tool calling
const fullSystemMessage = this.getNativeToolsSystemMessage();
// Get tools in JSON schema format
const tools = this.getToolsAsJsonSchema();
// Check if provider supports native tool calling (Ollama)
const provider = this.provider as any;
if (typeof provider.chatWithOptions !== 'function') {
throw new Error('Provider does not support native tool calling. Use startTask() instead.');
}
// Call with tools
const response = await provider.chatWithOptions({
systemMessage: fullSystemMessage,
userMessage: userMessage,
messageHistory: [],
images: images,
tools: tools.length > 0 ? tools : undefined,
});
// Add assistant response to history
const historyMessage: plugins.smartai.ChatMessage = {
role: 'assistant',
content: response.message || '',
reasoning: response.thinking || response.reasoning,
};
this.messageHistory.push(historyMessage);
// Convert Ollama tool calls to our format
let toolCalls: interfaces.INativeToolCall[] | undefined;
if (response.toolCalls && response.toolCalls.length > 0) {
toolCalls = response.toolCalls.map((tc: any) => ({
function: {
name: tc.function.name,
arguments: tc.function.arguments,
index: tc.function.index,
},
}));
}
return {
message: {
role: 'assistant',
content: response.message || '',
},
toolCalls,
};
}
/**
* Continue conversation with native tool calling support
* @param message The message to continue with (e.g., tool result)
* @returns Response with content, reasoning, and any tool calls
*/
public async continueWithNativeTools(
message: string
): Promise<{ message: interfaces.IAgentMessage; toolCalls?: interfaces.INativeToolCall[] }> {
// Add the new message to history
this.messageHistory.push({
role: 'user',
content: message,
});
// Build system message
const fullSystemMessage = this.getNativeToolsSystemMessage();
// Get tools in JSON schema format
const tools = this.getToolsAsJsonSchema();
// Get response from provider with history windowing
let historyForChat: plugins.smartai.ChatMessage[];
const fullHistory = this.messageHistory.slice(0, -1);
if (this.maxHistoryMessages > 0 && fullHistory.length > this.maxHistoryMessages) {
historyForChat = [
fullHistory[0],
...fullHistory.slice(-(this.maxHistoryMessages - 1)),
];
} else {
historyForChat = fullHistory;
}
// Check if provider supports native tool calling
const provider = this.provider as any;
if (typeof provider.chatWithOptions !== 'function') {
throw new Error('Provider does not support native tool calling. Use continueWithMessage() instead.');
}
// Call with tools
const response = await provider.chatWithOptions({
systemMessage: fullSystemMessage,
userMessage: message,
messageHistory: historyForChat,
tools: tools.length > 0 ? tools : undefined,
});
// Add assistant response to history
this.messageHistory.push({
role: 'assistant',
content: response.message || '',
reasoning: response.thinking || response.reasoning,
});
// Convert Ollama tool calls to our format
let toolCalls: interfaces.INativeToolCall[] | undefined;
if (response.toolCalls && response.toolCalls.length > 0) {
toolCalls = response.toolCalls.map((tc: any) => ({
function: {
name: tc.function.name,
arguments: tc.function.arguments,
index: tc.function.index,
},
}));
}
return {
message: {
role: 'assistant',
content: response.message || '',
},
toolCalls,
};
}
/**
* Get system message for native tool calling mode
* Simplified prompt that lets the model use tools naturally
*/
private getNativeToolsSystemMessage(): string {
return `You are an AI assistant that executes tasks by using available tools.
## Your Role
You analyze tasks, break them down into steps, and use tools to accomplish goals.
## Guidelines
1. Think step by step about what needs to be done
2. Use the available tools to complete the task
3. Process tool results and continue until the task is complete
4. When the task is complete, provide a final summary
## Important
- Use tools when needed to gather information or perform actions
- If you need clarification, ask the user
- Always verify your work before marking the task complete`;
}
/**
* Convert registered tools to Ollama JSON Schema format for native tool calling
* Each tool action becomes a separate function with name format: "toolName_actionName"
* @returns Array of IOllamaTool compatible tool definitions
*/
public getToolsAsJsonSchema(): plugins.smartai.IOllamaTool[] {
const tools: plugins.smartai.IOllamaTool[] = [];
for (const tool of this.tools.values()) {
for (const action of tool.actions) {
// Build the tool definition in Ollama format
const toolDef: plugins.smartai.IOllamaTool = {
type: 'function',
function: {
name: `${tool.name}_${action.name}`, // e.g., "json_validate"
description: `[${tool.name}] ${action.description}`,
parameters: action.parameters as plugins.smartai.IOllamaTool['function']['parameters'],
},
};
tools.push(toolDef);
}
}
return tools;
}
/**
* Parse native tool calls from provider response into IToolCallProposal format
* @param toolCalls Array of native tool calls from the provider
* @returns Array of IToolCallProposal ready for execution
*/
public parseNativeToolCalls(
toolCalls: interfaces.INativeToolCall[]
): interfaces.IToolCallProposal[] {
return toolCalls.map(tc => {
// Split "json_validate" -> toolName="json", action="validate"
const fullName = tc.function.name;
const underscoreIndex = fullName.indexOf('_');
let toolName: string;
let action: string;
if (underscoreIndex > 0) {
toolName = fullName.substring(0, underscoreIndex);
action = fullName.substring(underscoreIndex + 1);
} else {
// Fallback: treat entire name as tool name with empty action
toolName = fullName;
action = '';
}
return {
proposalId: this.generateProposalId(),
toolName,
action,
params: tc.function.arguments,
};
});
}
}