feat(openai): add getModelSetup() and typed provider options for OpenAI reasoning settings

This commit is contained in:
2026-05-06 19:09:58 +00:00
parent 8ad0b90f95
commit 5c871242b0
10 changed files with 952 additions and 1178 deletions
+47 -1
View File
@@ -1,4 +1,4 @@
import type { LanguageModelV3 } from '@ai-sdk/provider';
import type { JSONObject, JSONValue, LanguageModelV3 } from '@ai-sdk/provider';
export type TProvider =
| 'anthropic'
@@ -10,10 +10,56 @@ export type TProvider =
| 'perplexity'
| 'ollama';
export type TOpenAiReasoningEffort = 'none' | 'minimal' | 'low' | 'medium' | 'high' | 'xhigh';
export type TOpenAiTextVerbosity = 'low' | 'medium' | 'high';
export interface IOpenAiProviderOptions extends JSONObject {
conversation?: string | null;
include?: string[] | null;
instructions?: string | null;
logitBias?: Record<string, number>;
logprobs?: boolean | number | null;
maxCompletionTokens?: number;
maxToolCalls?: number | null;
metadata?: JSONObject | null;
parallelToolCalls?: boolean | null;
previousResponseId?: string | null;
prediction?: JSONObject;
promptCacheKey?: string | null;
promptCacheRetention?: 'in_memory' | '24h' | null;
reasoningEffort?: TOpenAiReasoningEffort | null;
reasoningSummary?: string | null;
safetyIdentifier?: string | null;
serviceTier?: 'auto' | 'flex' | 'priority' | 'default' | null;
store?: boolean | null;
strictJsonSchema?: boolean | null;
systemMessageMode?: 'remove' | 'system' | 'developer';
textVerbosity?: TOpenAiTextVerbosity | null;
truncation?: 'auto' | 'disabled' | null;
user?: string | null;
forceReasoning?: boolean;
[key: string]: JSONValue | undefined;
}
export type TSmartAiProviderOptions = Record<string, JSONObject> & {
openai?: IOpenAiProviderOptions;
};
export interface ISmartAiModelSetup {
model: LanguageModelV3;
providerOptions?: TSmartAiProviderOptions;
}
export interface ISmartAiOptions {
provider: TProvider;
model: string;
apiKey?: string;
/**
* Provider-specific AI SDK generation options.
* Pass this to generateText()/streamText() alongside the model.
*/
providerOptions?: TSmartAiProviderOptions;
/** For Ollama: base URL of the local server. Default: http://localhost:11434 */
baseUrl?: string;
/**