96 lines
3.9 KiB
TypeScript
96 lines
3.9 KiB
TypeScript
|
import { expect, tap } from '@push.rocks/tapbundle';
|
||
|
import * as qenv from '@push.rocks/qenv';
|
||
|
import * as smartfile from '@push.rocks/smartfile';
|
||
|
|
||
|
const testQenv = new qenv.Qenv('./', './.nogit/');
|
||
|
|
||
|
import * as smartai from '../ts/index.js';
|
||
|
|
||
|
let anthropicProvider: smartai.AnthropicProvider;
|
||
|
|
||
|
tap.test('Anthropic Vision: should create and start Anthropic provider', async () => {
|
||
|
anthropicProvider = new smartai.AnthropicProvider({
|
||
|
anthropicToken: await testQenv.getEnvVarOnDemand('ANTHROPIC_TOKEN'),
|
||
|
});
|
||
|
await anthropicProvider.start();
|
||
|
expect(anthropicProvider).toBeInstanceOf(smartai.AnthropicProvider);
|
||
|
});
|
||
|
|
||
|
tap.test('Anthropic Vision: should analyze coffee image with latte art', async () => {
|
||
|
// Test 1: Coffee image from Unsplash by Dani
|
||
|
const imagePath = './test/testimages/coffee-dani/coffee.jpg';
|
||
|
console.log(`Loading coffee image from: ${imagePath}`);
|
||
|
|
||
|
const imageBuffer = await smartfile.fs.toBuffer(imagePath);
|
||
|
console.log(`Image loaded, size: ${imageBuffer.length} bytes`);
|
||
|
|
||
|
const result = await anthropicProvider.vision({
|
||
|
image: imageBuffer,
|
||
|
prompt: 'Describe this coffee image. What do you see in terms of the cup, foam pattern, and overall composition?'
|
||
|
});
|
||
|
|
||
|
console.log(`Anthropic Vision (Coffee) - Result: ${result}`);
|
||
|
expect(result).toBeTruthy();
|
||
|
expect(typeof result).toEqual('string');
|
||
|
expect(result.toLowerCase()).toInclude('coffee');
|
||
|
// The image has a heart pattern in the latte art
|
||
|
const mentionsLatte = result.toLowerCase().includes('heart') ||
|
||
|
result.toLowerCase().includes('latte') ||
|
||
|
result.toLowerCase().includes('foam');
|
||
|
expect(mentionsLatte).toBeTrue();
|
||
|
});
|
||
|
|
||
|
tap.test('Anthropic Vision: should analyze laptop/workspace image', async () => {
|
||
|
// Test 2: Laptop image from Unsplash by Nicolas Bichon
|
||
|
const imagePath = './test/testimages/laptop-nicolas/laptop.jpg';
|
||
|
console.log(`Loading laptop image from: ${imagePath}`);
|
||
|
|
||
|
const imageBuffer = await smartfile.fs.toBuffer(imagePath);
|
||
|
console.log(`Image loaded, size: ${imageBuffer.length} bytes`);
|
||
|
|
||
|
const result = await anthropicProvider.vision({
|
||
|
image: imageBuffer,
|
||
|
prompt: 'Describe the technology and workspace setup in this image. What devices and equipment can you see?'
|
||
|
});
|
||
|
|
||
|
console.log(`Anthropic Vision (Laptop) - Result: ${result}`);
|
||
|
expect(result).toBeTruthy();
|
||
|
expect(typeof result).toEqual('string');
|
||
|
// Should mention laptop, computer, keyboard, or desk
|
||
|
const mentionsTech = result.toLowerCase().includes('laptop') ||
|
||
|
result.toLowerCase().includes('computer') ||
|
||
|
result.toLowerCase().includes('keyboard') ||
|
||
|
result.toLowerCase().includes('desk');
|
||
|
expect(mentionsTech).toBeTrue();
|
||
|
});
|
||
|
|
||
|
tap.test('Anthropic Vision: should analyze receipt/document image', async () => {
|
||
|
// Test 3: Receipt image from Unsplash by Annie Spratt
|
||
|
const imagePath = './test/testimages/receipt-annie/receipt.jpg';
|
||
|
console.log(`Loading receipt image from: ${imagePath}`);
|
||
|
|
||
|
const imageBuffer = await smartfile.fs.toBuffer(imagePath);
|
||
|
console.log(`Image loaded, size: ${imageBuffer.length} bytes`);
|
||
|
|
||
|
const result = await anthropicProvider.vision({
|
||
|
image: imageBuffer,
|
||
|
prompt: 'What type of document is this? Can you identify any text or numbers visible in the image?'
|
||
|
});
|
||
|
|
||
|
console.log(`Anthropic Vision (Receipt) - Result: ${result}`);
|
||
|
expect(result).toBeTruthy();
|
||
|
expect(typeof result).toEqual('string');
|
||
|
// Should mention receipt, document, text, or paper
|
||
|
const mentionsDocument = result.toLowerCase().includes('receipt') ||
|
||
|
result.toLowerCase().includes('document') ||
|
||
|
result.toLowerCase().includes('text') ||
|
||
|
result.toLowerCase().includes('paper');
|
||
|
expect(mentionsDocument).toBeTrue();
|
||
|
});
|
||
|
|
||
|
tap.test('Anthropic Vision: should stop the provider', async () => {
|
||
|
await anthropicProvider.stop();
|
||
|
});
|
||
|
|
||
|
export default tap.start();
|