import type { ToolSet, ModelMessage, LanguageModelV3 } from './plugins.js'; export interface IAgentRunOptions { /** The LanguageModelV3 to use — from smartai.getModel() */ model: LanguageModelV3; /** Initial user message or task description */ prompt: string; /** System prompt override */ system?: string; /** Tools available to the agent */ tools?: ToolSet; /** * Maximum number of LLM↔tool round trips. * Each step may execute multiple tools in parallel. * Default: 20 */ maxSteps?: number; /** Prior conversation messages to include */ messages?: ModelMessage[]; /** Called for each streamed text delta */ onToken?: (delta: string) => void; /** Called when a tool call starts */ onToolCall?: (toolName: string, input: unknown) => void; /** Called when a tool call completes */ onToolResult?: (toolName: string, result: unknown) => void; /** * Called when total token usage approaches the model's context limit. * Receives the full message history and must return a compacted replacement. * If not provided, runAgent throws a ContextOverflowError instead. */ onContextOverflow?: (messages: ModelMessage[]) => Promise; /** AbortSignal to cancel the run mid-flight */ abort?: AbortSignal; } export interface IAgentRunResult { /** Final text output from the model */ text: string; /** All messages in the completed conversation */ messages: ModelMessage[]; /** Total steps taken */ steps: number; /** Finish reason from the final step */ finishReason: string; /** Accumulated token usage across all steps */ usage: { inputTokens: number; outputTokens: number; totalTokens: number }; } export class ContextOverflowError extends Error { constructor(message = 'Agent context limit reached and no onContextOverflow handler provided') { super(message); this.name = 'ContextOverflowError'; } }