160 lines
5.6 KiB
TypeScript
160 lines
5.6 KiB
TypeScript
import { expect, tap } from '@push.rocks/tapbundle';
|
|
import * as qenv from '@push.rocks/qenv';
|
|
import * as smartrequest from '@push.rocks/smartrequest';
|
|
import * as smartfile from '@push.rocks/smartfile';
|
|
|
|
const testQenv = new qenv.Qenv('./', './.nogit/');
|
|
|
|
import * as smartai from '../ts/index.js';
|
|
|
|
let anthropicProvider: smartai.AnthropicProvider;
|
|
|
|
tap.test('Anthropic: should create and start Anthropic provider', async () => {
|
|
anthropicProvider = new smartai.AnthropicProvider({
|
|
anthropicToken: await testQenv.getEnvVarOnDemand('ANTHROPIC_TOKEN'),
|
|
});
|
|
await anthropicProvider.start();
|
|
expect(anthropicProvider).toBeInstanceOf(smartai.AnthropicProvider);
|
|
});
|
|
|
|
tap.test('Anthropic: should create chat response', async () => {
|
|
const userMessage = 'What is the capital of France? Answer in one word.';
|
|
const response = await anthropicProvider.chat({
|
|
systemMessage: 'You are a helpful assistant. Be concise.',
|
|
userMessage: userMessage,
|
|
messageHistory: [],
|
|
});
|
|
console.log(`Anthropic Chat - User: ${userMessage}`);
|
|
console.log(`Anthropic Chat - Response: ${response.message}`);
|
|
|
|
expect(response.role).toEqual('assistant');
|
|
expect(response.message).toBeTruthy();
|
|
expect(response.message.toLowerCase()).toInclude('paris');
|
|
});
|
|
|
|
tap.test('Anthropic: should handle message history', async () => {
|
|
const messageHistory: smartai.ChatMessage[] = [
|
|
{ role: 'user', content: 'My name is Claude Test' },
|
|
{ role: 'assistant', content: 'Nice to meet you, Claude Test!' }
|
|
];
|
|
|
|
const response = await anthropicProvider.chat({
|
|
systemMessage: 'You are a helpful assistant with good memory.',
|
|
userMessage: 'What is my name?',
|
|
messageHistory: messageHistory,
|
|
});
|
|
|
|
console.log(`Anthropic Memory Test - Response: ${response.message}`);
|
|
expect(response.message.toLowerCase()).toInclude('claude test');
|
|
});
|
|
|
|
tap.test('Anthropic: should process vision tasks', async () => {
|
|
// Create a simple test image (1x1 red pixel JPEG)
|
|
// This is a valid 1x1 JPEG image
|
|
const redPixelBase64 = '/9j/4AAQSkZJRgABAQEAYABgAAD/2wBDAAgGBgcGBQgHBwcJCQgKDBQNDAsLDBkSEw8UHRofHh0aHBwgJC4nICIsIxwcKDcpLDAxNDQ0Hyc5PTgyPC4zNDL/2wBDAQkJCQwLDBgNDRgyIRwhMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjIyMjL/wAARCAABAAEDASIAAhEBAxEB/8QAFQABAQAAAAAAAAAAAAAAAAAAAAv/xAAUEAEAAAAAAAAAAAAAAAAAAAAA/8QAFQEBAQAAAAAAAAAAAAAAAAAAAAX/xAAUEQEAAAAAAAAAAAAAAAAAAAAA/9oADAMBAAIRAxEAPwCwAA8A/9k=';
|
|
const imageBuffer = Buffer.from(redPixelBase64, 'base64');
|
|
|
|
const result = await anthropicProvider.vision({
|
|
image: imageBuffer,
|
|
prompt: 'What color is this image? Answer with just the color name.'
|
|
});
|
|
|
|
console.log(`Anthropic Vision - Result: ${result}`);
|
|
expect(result).toBeTruthy();
|
|
expect(typeof result).toEqual('string');
|
|
});
|
|
|
|
tap.test('Anthropic: should document a PDF', async () => {
|
|
const pdfUrl = 'https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf';
|
|
const pdfResponse = await smartrequest.SmartRequest.create()
|
|
.url(pdfUrl)
|
|
.get();
|
|
|
|
const result = await anthropicProvider.document({
|
|
systemMessage: 'Classify the document. Only the following answers are allowed: "invoice", "bank account statement", "contract", "test document", "other". The answer should only contain the keyword for machine use.',
|
|
userMessage: 'Classify this document.',
|
|
messageHistory: [],
|
|
pdfDocuments: [Buffer.from(await pdfResponse.arrayBuffer())],
|
|
});
|
|
|
|
console.log(`Anthropic Document - Result:`, result);
|
|
expect(result).toBeTruthy();
|
|
expect(result.message).toBeTruthy();
|
|
});
|
|
|
|
tap.test('Anthropic: should handle complex document analysis', async () => {
|
|
// Test with the demo PDF if it exists
|
|
const pdfPath = './.nogit/demo_without_textlayer.pdf';
|
|
let pdfBuffer: Uint8Array;
|
|
|
|
try {
|
|
pdfBuffer = await smartfile.fs.toBuffer(pdfPath);
|
|
} catch (error) {
|
|
// If the file doesn't exist, use the dummy PDF
|
|
console.log('Demo PDF not found, using dummy PDF instead');
|
|
const pdfUrl = 'https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf';
|
|
const pdfResponse = await smartrequest.SmartRequest.create()
|
|
.url(pdfUrl)
|
|
.get();
|
|
pdfBuffer = Buffer.from(await pdfResponse.arrayBuffer());
|
|
}
|
|
|
|
const result = await anthropicProvider.document({
|
|
systemMessage: `
|
|
Analyze this document and provide a JSON response with the following structure:
|
|
{
|
|
"documentType": "string",
|
|
"hasText": boolean,
|
|
"summary": "string"
|
|
}
|
|
`,
|
|
userMessage: 'Analyze this document.',
|
|
messageHistory: [],
|
|
pdfDocuments: [pdfBuffer],
|
|
});
|
|
|
|
console.log(`Anthropic Complex Document Analysis:`, result);
|
|
expect(result).toBeTruthy();
|
|
expect(result.message).toBeTruthy();
|
|
});
|
|
|
|
tap.test('Anthropic: should handle errors gracefully', async () => {
|
|
// Test with invalid message (empty)
|
|
let errorCaught = false;
|
|
|
|
try {
|
|
await anthropicProvider.chat({
|
|
systemMessage: '',
|
|
userMessage: '',
|
|
messageHistory: [],
|
|
});
|
|
} catch (error) {
|
|
errorCaught = true;
|
|
console.log('Expected error caught:', error.message);
|
|
}
|
|
|
|
// Anthropic might handle empty messages, so we don't assert error
|
|
console.log(`Error handling test - Error caught: ${errorCaught}`);
|
|
});
|
|
|
|
tap.test('Anthropic: audio should throw not supported error', async () => {
|
|
let errorCaught = false;
|
|
|
|
try {
|
|
await anthropicProvider.audio({
|
|
message: 'This should fail'
|
|
});
|
|
} catch (error) {
|
|
errorCaught = true;
|
|
expect(error.message).toInclude('not yet supported');
|
|
}
|
|
|
|
expect(errorCaught).toBeTrue();
|
|
});
|
|
|
|
tap.test('Anthropic: should stop the provider', async () => {
|
|
await anthropicProvider.stop();
|
|
console.log('Anthropic provider stopped successfully');
|
|
});
|
|
|
|
export default tap.start(); |