BREAKING CHANGE(vercel-ai-sdk): migrate to Vercel AI SDK v6 and introduce provider registry (getModel) returning LanguageModelV3

This commit is contained in:
2026-03-05 19:37:29 +00:00
parent 27cef60900
commit c24010c9bc
61 changed files with 4789 additions and 9083 deletions

161
test/test.smartai.ts Normal file
View File

@@ -0,0 +1,161 @@
import { tap, expect } from '@git.zone/tstest/tapbundle';
import * as qenv from '@push.rocks/qenv';
import * as smartai from '../ts/index.js';
const testQenv = new qenv.Qenv('./', './.nogit/');
tap.test('getModel should return a LanguageModelV3 for anthropic', async () => {
const apiKey = await testQenv.getEnvVarOnDemand('ANTHROPIC_TOKEN');
if (!apiKey) {
console.log('ANTHROPIC_TOKEN not set, skipping test');
return;
}
const model = smartai.getModel({
provider: 'anthropic',
model: 'claude-sonnet-4-5-20250929',
apiKey,
});
expect(model).toHaveProperty('specificationVersion');
expect(model).toHaveProperty('provider');
expect(model).toHaveProperty('modelId');
expect(model).toHaveProperty('doGenerate');
expect(model).toHaveProperty('doStream');
});
tap.test('getModel with anthropic prompt caching returns wrapped model', async () => {
const apiKey = await testQenv.getEnvVarOnDemand('ANTHROPIC_TOKEN');
if (!apiKey) {
console.log('ANTHROPIC_TOKEN not set, skipping test');
return;
}
// Default: prompt caching enabled
const model = smartai.getModel({
provider: 'anthropic',
model: 'claude-sonnet-4-5-20250929',
apiKey,
});
// With caching disabled
const modelNoCaching = smartai.getModel({
provider: 'anthropic',
model: 'claude-sonnet-4-5-20250929',
apiKey,
promptCaching: false,
});
// Both should be valid models
expect(model).toHaveProperty('doGenerate');
expect(modelNoCaching).toHaveProperty('doGenerate');
});
tap.test('generateText with anthropic model', async () => {
const apiKey = await testQenv.getEnvVarOnDemand('ANTHROPIC_TOKEN');
if (!apiKey) {
console.log('ANTHROPIC_TOKEN not set, skipping test');
return;
}
const model = smartai.getModel({
provider: 'anthropic',
model: 'claude-sonnet-4-5-20250929',
apiKey,
});
const result = await smartai.generateText({
model,
prompt: 'Say hello in exactly 3 words.',
});
console.log('Anthropic response:', result.text);
expect(result.text).toBeTruthy();
expect(result.text.length).toBeGreaterThan(0);
});
tap.test('getModel should return a LanguageModelV3 for openai', async () => {
const apiKey = await testQenv.getEnvVarOnDemand('OPENAI_TOKEN');
if (!apiKey) {
console.log('OPENAI_TOKEN not set, skipping test');
return;
}
const model = smartai.getModel({
provider: 'openai',
model: 'gpt-4o-mini',
apiKey,
});
expect(model).toHaveProperty('doGenerate');
expect(model).toHaveProperty('doStream');
});
tap.test('streamText with anthropic model', async () => {
const apiKey = await testQenv.getEnvVarOnDemand('ANTHROPIC_TOKEN');
if (!apiKey) {
console.log('ANTHROPIC_TOKEN not set, skipping test');
return;
}
const model = smartai.getModel({
provider: 'anthropic',
model: 'claude-sonnet-4-5-20250929',
apiKey,
});
const result = await smartai.streamText({
model,
prompt: 'Count from 1 to 5.',
});
const tokens: string[] = [];
for await (const chunk of result.textStream) {
tokens.push(chunk);
}
const fullText = tokens.join('');
console.log('Streamed text:', fullText);
expect(fullText).toBeTruthy();
expect(fullText.length).toBeGreaterThan(0);
expect(tokens.length).toBeGreaterThan(1); // Should have multiple chunks
});
tap.test('generateText with openai model', async () => {
const apiKey = await testQenv.getEnvVarOnDemand('OPENAI_TOKEN');
if (!apiKey) {
console.log('OPENAI_TOKEN not set, skipping test');
return;
}
const model = smartai.getModel({
provider: 'openai',
model: 'gpt-4o-mini',
apiKey,
});
const result = await smartai.generateText({
model,
prompt: 'What is 2+2? Reply with just the number.',
});
console.log('OpenAI response:', result.text);
expect(result.text).toBeTruthy();
expect(result.text).toInclude('4');
});
tap.test('getModel should throw for unknown provider', async () => {
let threw = false;
try {
smartai.getModel({
provider: 'nonexistent' as any,
model: 'test',
});
} catch (e) {
threw = true;
expect(e.message).toInclude('Unknown provider');
}
expect(threw).toBeTrue();
});
export default tap.start();