BREAKING CHANGE(vercel-ai-sdk): migrate to Vercel AI SDK v6 and introduce provider registry (getModel) returning LanguageModelV3

This commit is contained in:
2026-03-05 19:37:29 +00:00
parent 27cef60900
commit c24010c9bc
61 changed files with 4789 additions and 9083 deletions

View File

@@ -0,0 +1,38 @@
import type { LanguageModelV3Middleware, LanguageModelV3Prompt } from '@ai-sdk/provider';
/**
* Creates middleware that adds Anthropic prompt caching directives.
* Marks the last system message and last user message with ephemeral cache control,
* reducing input token cost and latency on repeated calls.
*/
export function createAnthropicCachingMiddleware(): LanguageModelV3Middleware {
return {
specificationVersion: 'v3',
transformParams: async ({ params }) => {
const messages = [...params.prompt] as Array<Record<string, unknown>>;
// Find the last system message and last user message
let lastSystemIdx = -1;
let lastUserIdx = -1;
for (let i = 0; i < messages.length; i++) {
if (messages[i].role === 'system') lastSystemIdx = i;
if (messages[i].role === 'user') lastUserIdx = i;
}
const targets = [lastSystemIdx, lastUserIdx].filter(i => i >= 0);
for (const idx of targets) {
const msg = { ...messages[idx] };
msg.providerOptions = {
...(msg.providerOptions as Record<string, unknown> || {}),
anthropic: {
...((msg.providerOptions as Record<string, unknown>)?.anthropic as Record<string, unknown> || {}),
cacheControl: { type: 'ephemeral' },
},
};
messages[idx] = msg;
}
return { ...params, prompt: messages as unknown as LanguageModelV3Prompt };
},
};
}