Compare commits
2 Commits
Author | SHA1 | Date | |
---|---|---|---|
2791d738d6 | |||
3fbd054985 |
@@ -1,5 +1,13 @@
|
||||
# Changelog
|
||||
|
||||
## 2025-10-03 - 0.7.3 - fix(tests)
|
||||
Add extensive provider/feature tests and local Claude CI permissions
|
||||
|
||||
- Add many focused test files covering providers and features: OpenAI, Anthropic, Perplexity, Groq, Ollama, Exo, XAI (chat, audio, vision, document, research, image generation, stubs, interfaces, basic)
|
||||
- Introduce .claude/settings.local.json to declare allowed permissions for local Claude/CI actions
|
||||
- Replace older aggregated test files with modular per-feature tests (removed legacy combined tests and split into smaller suites)
|
||||
- No changes to library runtime code — this change adds tests and CI/local agent configuration only
|
||||
|
||||
## 2025-10-03 - 0.7.2 - fix(anthropic)
|
||||
Update Anthropic provider branding to Claude Sonnet 4.5 and add local Claude permissions
|
||||
|
||||
|
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@push.rocks/smartai",
|
||||
"version": "0.7.2",
|
||||
"version": "0.7.3",
|
||||
"private": false,
|
||||
"description": "SmartAi is a versatile TypeScript library designed to facilitate integration and interaction with various AI models, offering functionalities for chat, audio generation, document processing, and vision tasks.",
|
||||
"main": "dist_ts/index.js",
|
||||
|
@@ -1,216 +0,0 @@
|
||||
import { expect, tap } from '@push.rocks/tapbundle';
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
import * as smartrequest from '@push.rocks/smartrequest';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
|
||||
const testQenv = new qenv.Qenv('./', './.nogit/');
|
||||
|
||||
import * as smartai from '../ts/index.js';
|
||||
|
||||
let anthropicProvider: smartai.AnthropicProvider;
|
||||
|
||||
tap.test('Anthropic: should create and start Anthropic provider', async () => {
|
||||
anthropicProvider = new smartai.AnthropicProvider({
|
||||
anthropicToken: await testQenv.getEnvVarOnDemand('ANTHROPIC_TOKEN'),
|
||||
});
|
||||
await anthropicProvider.start();
|
||||
expect(anthropicProvider).toBeInstanceOf(smartai.AnthropicProvider);
|
||||
});
|
||||
|
||||
tap.test('Anthropic: should create chat response', async () => {
|
||||
const userMessage = 'What is the capital of France? Answer in one word.';
|
||||
const response = await anthropicProvider.chat({
|
||||
systemMessage: 'You are a helpful assistant. Be concise.',
|
||||
userMessage: userMessage,
|
||||
messageHistory: [],
|
||||
});
|
||||
console.log(`Anthropic Chat - User: ${userMessage}`);
|
||||
console.log(`Anthropic Chat - Response: ${response.message}`);
|
||||
|
||||
expect(response.role).toEqual('assistant');
|
||||
expect(response.message).toBeTruthy();
|
||||
expect(response.message.toLowerCase()).toInclude('paris');
|
||||
});
|
||||
|
||||
tap.test('Anthropic: should handle message history', async () => {
|
||||
const messageHistory: smartai.ChatMessage[] = [
|
||||
{ role: 'user', content: 'My name is Claude Test' },
|
||||
{ role: 'assistant', content: 'Nice to meet you, Claude Test!' }
|
||||
];
|
||||
|
||||
const response = await anthropicProvider.chat({
|
||||
systemMessage: 'You are a helpful assistant with good memory.',
|
||||
userMessage: 'What is my name?',
|
||||
messageHistory: messageHistory,
|
||||
});
|
||||
|
||||
console.log(`Anthropic Memory Test - Response: ${response.message}`);
|
||||
expect(response.message.toLowerCase()).toInclude('claude test');
|
||||
});
|
||||
|
||||
tap.test('Anthropic: should analyze coffee image with latte art', async () => {
|
||||
// Test 1: Coffee image from Unsplash by Dani
|
||||
const imagePath = './test/testimages/coffee-dani/coffee.jpg';
|
||||
console.log(`Loading coffee image from: ${imagePath}`);
|
||||
|
||||
const imageBuffer = await smartfile.fs.toBuffer(imagePath);
|
||||
console.log(`Image loaded, size: ${imageBuffer.length} bytes`);
|
||||
|
||||
const result = await anthropicProvider.vision({
|
||||
image: imageBuffer,
|
||||
prompt: 'Describe this coffee image. What do you see in terms of the cup, foam pattern, and overall composition?'
|
||||
});
|
||||
|
||||
console.log(`Anthropic Vision (Coffee) - Result: ${result}`);
|
||||
expect(result).toBeTruthy();
|
||||
expect(typeof result).toEqual('string');
|
||||
expect(result.toLowerCase()).toInclude('coffee');
|
||||
// The image has a heart pattern in the latte art
|
||||
const mentionsLatte = result.toLowerCase().includes('heart') ||
|
||||
result.toLowerCase().includes('latte') ||
|
||||
result.toLowerCase().includes('foam');
|
||||
expect(mentionsLatte).toBeTrue();
|
||||
});
|
||||
|
||||
tap.test('Anthropic: should analyze laptop/workspace image', async () => {
|
||||
// Test 2: Laptop image from Unsplash by Nicolas Bichon
|
||||
const imagePath = './test/testimages/laptop-nicolas/laptop.jpg';
|
||||
console.log(`Loading laptop image from: ${imagePath}`);
|
||||
|
||||
const imageBuffer = await smartfile.fs.toBuffer(imagePath);
|
||||
console.log(`Image loaded, size: ${imageBuffer.length} bytes`);
|
||||
|
||||
const result = await anthropicProvider.vision({
|
||||
image: imageBuffer,
|
||||
prompt: 'Describe the technology and workspace setup in this image. What devices and equipment can you see?'
|
||||
});
|
||||
|
||||
console.log(`Anthropic Vision (Laptop) - Result: ${result}`);
|
||||
expect(result).toBeTruthy();
|
||||
expect(typeof result).toEqual('string');
|
||||
// Should mention laptop, computer, keyboard, or desk
|
||||
const mentionsTech = result.toLowerCase().includes('laptop') ||
|
||||
result.toLowerCase().includes('computer') ||
|
||||
result.toLowerCase().includes('keyboard') ||
|
||||
result.toLowerCase().includes('desk');
|
||||
expect(mentionsTech).toBeTrue();
|
||||
});
|
||||
|
||||
tap.test('Anthropic: should analyze receipt/document image', async () => {
|
||||
// Test 3: Receipt image from Unsplash by Annie Spratt
|
||||
const imagePath = './test/testimages/receipt-annie/receipt.jpg';
|
||||
console.log(`Loading receipt image from: ${imagePath}`);
|
||||
|
||||
const imageBuffer = await smartfile.fs.toBuffer(imagePath);
|
||||
console.log(`Image loaded, size: ${imageBuffer.length} bytes`);
|
||||
|
||||
const result = await anthropicProvider.vision({
|
||||
image: imageBuffer,
|
||||
prompt: 'What type of document is this? Can you identify any text or numbers visible in the image?'
|
||||
});
|
||||
|
||||
console.log(`Anthropic Vision (Receipt) - Result: ${result}`);
|
||||
expect(result).toBeTruthy();
|
||||
expect(typeof result).toEqual('string');
|
||||
// Should mention receipt, document, text, or paper
|
||||
const mentionsDocument = result.toLowerCase().includes('receipt') ||
|
||||
result.toLowerCase().includes('document') ||
|
||||
result.toLowerCase().includes('text') ||
|
||||
result.toLowerCase().includes('paper');
|
||||
expect(mentionsDocument).toBeTrue();
|
||||
});
|
||||
|
||||
tap.test('Anthropic: should document a PDF', async () => {
|
||||
const pdfUrl = 'https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf';
|
||||
const pdfResponse = await smartrequest.SmartRequest.create()
|
||||
.url(pdfUrl)
|
||||
.get();
|
||||
|
||||
const result = await anthropicProvider.document({
|
||||
systemMessage: 'Classify the document. Only the following answers are allowed: "invoice", "bank account statement", "contract", "test document", "other". The answer should only contain the keyword for machine use.',
|
||||
userMessage: 'Classify this document.',
|
||||
messageHistory: [],
|
||||
pdfDocuments: [Buffer.from(await pdfResponse.arrayBuffer())],
|
||||
});
|
||||
|
||||
console.log(`Anthropic Document - Result:`, result);
|
||||
expect(result).toBeTruthy();
|
||||
expect(result.message).toBeTruthy();
|
||||
});
|
||||
|
||||
tap.test('Anthropic: should handle complex document analysis', async () => {
|
||||
// Test with the demo PDF if it exists
|
||||
const pdfPath = './.nogit/demo_without_textlayer.pdf';
|
||||
let pdfBuffer: Uint8Array;
|
||||
|
||||
try {
|
||||
pdfBuffer = await smartfile.fs.toBuffer(pdfPath);
|
||||
} catch (error) {
|
||||
// If the file doesn't exist, use the dummy PDF
|
||||
console.log('Demo PDF not found, using dummy PDF instead');
|
||||
const pdfUrl = 'https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf';
|
||||
const pdfResponse = await smartrequest.SmartRequest.create()
|
||||
.url(pdfUrl)
|
||||
.get();
|
||||
pdfBuffer = Buffer.from(await pdfResponse.arrayBuffer());
|
||||
}
|
||||
|
||||
const result = await anthropicProvider.document({
|
||||
systemMessage: `
|
||||
Analyze this document and provide a JSON response with the following structure:
|
||||
{
|
||||
"documentType": "string",
|
||||
"hasText": boolean,
|
||||
"summary": "string"
|
||||
}
|
||||
`,
|
||||
userMessage: 'Analyze this document.',
|
||||
messageHistory: [],
|
||||
pdfDocuments: [pdfBuffer],
|
||||
});
|
||||
|
||||
console.log(`Anthropic Complex Document Analysis:`, result);
|
||||
expect(result).toBeTruthy();
|
||||
expect(result.message).toBeTruthy();
|
||||
});
|
||||
|
||||
tap.test('Anthropic: should handle errors gracefully', async () => {
|
||||
// Test with invalid message (empty)
|
||||
let errorCaught = false;
|
||||
|
||||
try {
|
||||
await anthropicProvider.chat({
|
||||
systemMessage: '',
|
||||
userMessage: '',
|
||||
messageHistory: [],
|
||||
});
|
||||
} catch (error) {
|
||||
errorCaught = true;
|
||||
console.log('Expected error caught:', error.message);
|
||||
}
|
||||
|
||||
// Anthropic might handle empty messages, so we don't assert error
|
||||
console.log(`Error handling test - Error caught: ${errorCaught}`);
|
||||
});
|
||||
|
||||
tap.test('Anthropic: audio should throw not supported error', async () => {
|
||||
let errorCaught = false;
|
||||
|
||||
try {
|
||||
await anthropicProvider.audio({
|
||||
message: 'This should fail'
|
||||
});
|
||||
} catch (error) {
|
||||
errorCaught = true;
|
||||
expect(error.message).toInclude('not yet supported');
|
||||
}
|
||||
|
||||
expect(errorCaught).toBeTrue();
|
||||
});
|
||||
|
||||
tap.test('Anthropic: should stop the provider', async () => {
|
||||
await anthropicProvider.stop();
|
||||
console.log('Anthropic provider stopped successfully');
|
||||
});
|
||||
|
||||
export default tap.start();
|
39
test/test.audio.openai.ts
Normal file
39
test/test.audio.openai.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { expect, tap } from '@push.rocks/tapbundle';
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
|
||||
const testQenv = new qenv.Qenv('./', './.nogit/');
|
||||
|
||||
import * as smartai from '../ts/index.js';
|
||||
|
||||
let testSmartai: smartai.SmartAi;
|
||||
|
||||
tap.test('OpenAI Audio: should create a smartai instance with OpenAI provider', async () => {
|
||||
testSmartai = new smartai.SmartAi({
|
||||
openaiToken: await testQenv.getEnvVarOnDemand('OPENAI_TOKEN'),
|
||||
});
|
||||
await testSmartai.start();
|
||||
});
|
||||
|
||||
tap.test('OpenAI Audio: should create audio response', async () => {
|
||||
// Call the audio method with a sample message.
|
||||
const audioStream = await testSmartai.openaiProvider.audio({
|
||||
message: 'This is a test of audio generation.',
|
||||
});
|
||||
// Read all chunks from the stream.
|
||||
const chunks: Uint8Array[] = [];
|
||||
for await (const chunk of audioStream) {
|
||||
chunks.push(chunk as Uint8Array);
|
||||
}
|
||||
const audioBuffer = Buffer.concat(chunks);
|
||||
await smartfile.fs.toFs(audioBuffer, './.nogit/testoutput.mp3');
|
||||
console.log(`Audio Buffer length: ${audioBuffer.length}`);
|
||||
// Assert that the resulting buffer is not empty.
|
||||
expect(audioBuffer.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
tap.test('OpenAI Audio: should stop the smartai instance', async () => {
|
||||
await testSmartai.stop();
|
||||
});
|
||||
|
||||
export default tap.start();
|
36
test/test.audio.stubs.ts
Normal file
36
test/test.audio.stubs.ts
Normal file
@@ -0,0 +1,36 @@
|
||||
import { expect, tap } from '@push.rocks/tapbundle';
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
|
||||
const testQenv = new qenv.Qenv('./', './.nogit/');
|
||||
|
||||
import * as smartai from '../ts/index.js';
|
||||
|
||||
let anthropicProvider: smartai.AnthropicProvider;
|
||||
|
||||
tap.test('Audio Stubs: should create Anthropic provider', async () => {
|
||||
anthropicProvider = new smartai.AnthropicProvider({
|
||||
anthropicToken: await testQenv.getEnvVarOnDemand('ANTHROPIC_TOKEN'),
|
||||
});
|
||||
await anthropicProvider.start();
|
||||
});
|
||||
|
||||
tap.test('Audio Stubs: Anthropic audio should throw not supported error', async () => {
|
||||
let errorCaught = false;
|
||||
|
||||
try {
|
||||
await anthropicProvider.audio({
|
||||
message: 'This should fail'
|
||||
});
|
||||
} catch (error) {
|
||||
errorCaught = true;
|
||||
expect(error.message).toInclude('not yet supported');
|
||||
}
|
||||
|
||||
expect(errorCaught).toBeTrue();
|
||||
});
|
||||
|
||||
tap.test('Audio Stubs: should stop Anthropic provider', async () => {
|
||||
await anthropicProvider.stop();
|
||||
});
|
||||
|
||||
export default tap.start();
|
72
test/test.chat.anthropic.ts
Normal file
72
test/test.chat.anthropic.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
import { expect, tap } from '@push.rocks/tapbundle';
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
|
||||
const testQenv = new qenv.Qenv('./', './.nogit/');
|
||||
|
||||
import * as smartai from '../ts/index.js';
|
||||
|
||||
let anthropicProvider: smartai.AnthropicProvider;
|
||||
|
||||
tap.test('Anthropic Chat: should create and start Anthropic provider', async () => {
|
||||
anthropicProvider = new smartai.AnthropicProvider({
|
||||
anthropicToken: await testQenv.getEnvVarOnDemand('ANTHROPIC_TOKEN'),
|
||||
});
|
||||
await anthropicProvider.start();
|
||||
expect(anthropicProvider).toBeInstanceOf(smartai.AnthropicProvider);
|
||||
});
|
||||
|
||||
tap.test('Anthropic Chat: should create chat response', async () => {
|
||||
const userMessage = 'What is the capital of France? Answer in one word.';
|
||||
const response = await anthropicProvider.chat({
|
||||
systemMessage: 'You are a helpful assistant. Be concise.',
|
||||
userMessage: userMessage,
|
||||
messageHistory: [],
|
||||
});
|
||||
console.log(`Anthropic Chat - User: ${userMessage}`);
|
||||
console.log(`Anthropic Chat - Response: ${response.message}`);
|
||||
|
||||
expect(response.role).toEqual('assistant');
|
||||
expect(response.message).toBeTruthy();
|
||||
expect(response.message.toLowerCase()).toInclude('paris');
|
||||
});
|
||||
|
||||
tap.test('Anthropic Chat: should handle message history', async () => {
|
||||
const messageHistory: smartai.ChatMessage[] = [
|
||||
{ role: 'user', content: 'My name is Claude Test' },
|
||||
{ role: 'assistant', content: 'Nice to meet you, Claude Test!' }
|
||||
];
|
||||
|
||||
const response = await anthropicProvider.chat({
|
||||
systemMessage: 'You are a helpful assistant with good memory.',
|
||||
userMessage: 'What is my name?',
|
||||
messageHistory: messageHistory,
|
||||
});
|
||||
|
||||
console.log(`Anthropic Memory Test - Response: ${response.message}`);
|
||||
expect(response.message.toLowerCase()).toInclude('claude test');
|
||||
});
|
||||
|
||||
tap.test('Anthropic Chat: should handle errors gracefully', async () => {
|
||||
// Test with invalid message (empty)
|
||||
let errorCaught = false;
|
||||
|
||||
try {
|
||||
await anthropicProvider.chat({
|
||||
systemMessage: '',
|
||||
userMessage: '',
|
||||
messageHistory: [],
|
||||
});
|
||||
} catch (error) {
|
||||
errorCaught = true;
|
||||
console.log('Expected error caught:', error.message);
|
||||
}
|
||||
|
||||
// Anthropic might handle empty messages, so we don't assert error
|
||||
console.log(`Error handling test - Error caught: ${errorCaught}`);
|
||||
});
|
||||
|
||||
tap.test('Anthropic Chat: should stop the provider', async () => {
|
||||
await anthropicProvider.stop();
|
||||
});
|
||||
|
||||
export default tap.start();
|
34
test/test.chat.openai.ts
Normal file
34
test/test.chat.openai.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { expect, tap } from '@push.rocks/tapbundle';
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
|
||||
const testQenv = new qenv.Qenv('./', './.nogit/');
|
||||
|
||||
import * as smartai from '../ts/index.js';
|
||||
|
||||
let testSmartai: smartai.SmartAi;
|
||||
|
||||
tap.test('OpenAI Chat: should create a smartai instance with OpenAI provider', async () => {
|
||||
testSmartai = new smartai.SmartAi({
|
||||
openaiToken: await testQenv.getEnvVarOnDemand('OPENAI_TOKEN'),
|
||||
});
|
||||
await testSmartai.start();
|
||||
});
|
||||
|
||||
tap.test('OpenAI Chat: should create chat response', async () => {
|
||||
const userMessage = 'How are you?';
|
||||
const response = await testSmartai.openaiProvider.chat({
|
||||
systemMessage: 'Hello',
|
||||
userMessage: userMessage,
|
||||
messageHistory: [],
|
||||
});
|
||||
console.log(`userMessage: ${userMessage}`);
|
||||
console.log(response.message);
|
||||
expect(response.role).toEqual('assistant');
|
||||
expect(response.message).toBeTruthy();
|
||||
});
|
||||
|
||||
tap.test('OpenAI Chat: should stop the smartai instance', async () => {
|
||||
await testSmartai.stop();
|
||||
});
|
||||
|
||||
export default tap.start();
|
78
test/test.document.anthropic.ts
Normal file
78
test/test.document.anthropic.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
import { expect, tap } from '@push.rocks/tapbundle';
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
import * as smartrequest from '@push.rocks/smartrequest';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
|
||||
const testQenv = new qenv.Qenv('./', './.nogit/');
|
||||
|
||||
import * as smartai from '../ts/index.js';
|
||||
|
||||
let anthropicProvider: smartai.AnthropicProvider;
|
||||
|
||||
tap.test('Anthropic Document: should create and start Anthropic provider', async () => {
|
||||
anthropicProvider = new smartai.AnthropicProvider({
|
||||
anthropicToken: await testQenv.getEnvVarOnDemand('ANTHROPIC_TOKEN'),
|
||||
});
|
||||
await anthropicProvider.start();
|
||||
expect(anthropicProvider).toBeInstanceOf(smartai.AnthropicProvider);
|
||||
});
|
||||
|
||||
tap.test('Anthropic Document: should document a PDF', async () => {
|
||||
const pdfUrl = 'https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf';
|
||||
const pdfResponse = await smartrequest.SmartRequest.create()
|
||||
.url(pdfUrl)
|
||||
.get();
|
||||
|
||||
const result = await anthropicProvider.document({
|
||||
systemMessage: 'Classify the document. Only the following answers are allowed: "invoice", "bank account statement", "contract", "test document", "other". The answer should only contain the keyword for machine use.',
|
||||
userMessage: 'Classify this document.',
|
||||
messageHistory: [],
|
||||
pdfDocuments: [Buffer.from(await pdfResponse.arrayBuffer())],
|
||||
});
|
||||
|
||||
console.log(`Anthropic Document - Result:`, result);
|
||||
expect(result).toBeTruthy();
|
||||
expect(result.message).toBeTruthy();
|
||||
});
|
||||
|
||||
tap.test('Anthropic Document: should handle complex document analysis', async () => {
|
||||
// Test with the demo PDF if it exists
|
||||
const pdfPath = './.nogit/demo_without_textlayer.pdf';
|
||||
let pdfBuffer: Uint8Array;
|
||||
|
||||
try {
|
||||
pdfBuffer = await smartfile.fs.toBuffer(pdfPath);
|
||||
} catch (error) {
|
||||
// If the file doesn't exist, use the dummy PDF
|
||||
console.log('Demo PDF not found, using dummy PDF instead');
|
||||
const pdfUrl = 'https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf';
|
||||
const pdfResponse = await smartrequest.SmartRequest.create()
|
||||
.url(pdfUrl)
|
||||
.get();
|
||||
pdfBuffer = Buffer.from(await pdfResponse.arrayBuffer());
|
||||
}
|
||||
|
||||
const result = await anthropicProvider.document({
|
||||
systemMessage: `
|
||||
Analyze this document and provide a JSON response with the following structure:
|
||||
{
|
||||
"documentType": "string",
|
||||
"hasText": boolean,
|
||||
"summary": "string"
|
||||
}
|
||||
`,
|
||||
userMessage: 'Analyze this document.',
|
||||
messageHistory: [],
|
||||
pdfDocuments: [pdfBuffer],
|
||||
});
|
||||
|
||||
console.log(`Anthropic Complex Document Analysis:`, result);
|
||||
expect(result).toBeTruthy();
|
||||
expect(result.message).toBeTruthy();
|
||||
});
|
||||
|
||||
tap.test('Anthropic Document: should stop the provider', async () => {
|
||||
await anthropicProvider.stop();
|
||||
});
|
||||
|
||||
export default tap.start();
|
@@ -9,25 +9,14 @@ import * as smartai from '../ts/index.js';
|
||||
|
||||
let testSmartai: smartai.SmartAi;
|
||||
|
||||
tap.test('OpenAI: should create a smartai instance with OpenAI provider', async () => {
|
||||
tap.test('OpenAI Document: should create a smartai instance with OpenAI provider', async () => {
|
||||
testSmartai = new smartai.SmartAi({
|
||||
openaiToken: await testQenv.getEnvVarOnDemand('OPENAI_TOKEN'),
|
||||
});
|
||||
await testSmartai.start();
|
||||
});
|
||||
|
||||
tap.test('OpenAI: should create chat response', async () => {
|
||||
const userMessage = 'How are you?';
|
||||
const response = await testSmartai.openaiProvider.chat({
|
||||
systemMessage: 'Hello',
|
||||
userMessage: userMessage,
|
||||
messageHistory: [],
|
||||
});
|
||||
console.log(`userMessage: ${userMessage}`);
|
||||
console.log(response.message);
|
||||
});
|
||||
|
||||
tap.test('OpenAI: should document a pdf', async () => {
|
||||
tap.test('OpenAI Document: should document a pdf', async () => {
|
||||
const pdfUrl = 'https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf';
|
||||
const pdfResponse = await smartrequest.SmartRequest.create()
|
||||
.url(pdfUrl)
|
||||
@@ -39,9 +28,10 @@ tap.test('OpenAI: should document a pdf', async () => {
|
||||
pdfDocuments: [Buffer.from(await pdfResponse.arrayBuffer())],
|
||||
});
|
||||
console.log(result);
|
||||
expect(result.message).toBeTruthy();
|
||||
});
|
||||
|
||||
tap.test('OpenAI: should recognize companies in a pdf', async () => {
|
||||
tap.test('OpenAI Document: should recognize companies in a pdf', async () => {
|
||||
const pdfBuffer = await smartfile.fs.toBuffer('./.nogit/demo_without_textlayer.pdf');
|
||||
const result = await testSmartai.openaiProvider.document({
|
||||
systemMessage: `
|
||||
@@ -76,27 +66,11 @@ tap.test('OpenAI: should recognize companies in a pdf', async () => {
|
||||
pdfDocuments: [pdfBuffer],
|
||||
});
|
||||
console.log(result);
|
||||
expect(result.message).toBeTruthy();
|
||||
});
|
||||
|
||||
tap.test('OpenAI: should create audio response', async () => {
|
||||
// Call the audio method with a sample message.
|
||||
const audioStream = await testSmartai.openaiProvider.audio({
|
||||
message: 'This is a test of audio generation.',
|
||||
});
|
||||
// Read all chunks from the stream.
|
||||
const chunks: Uint8Array[] = [];
|
||||
for await (const chunk of audioStream) {
|
||||
chunks.push(chunk as Uint8Array);
|
||||
}
|
||||
const audioBuffer = Buffer.concat(chunks);
|
||||
await smartfile.fs.toFs(audioBuffer, './.nogit/testoutput.mp3');
|
||||
console.log(`Audio Buffer length: ${audioBuffer.length}`);
|
||||
// Assert that the resulting buffer is not empty.
|
||||
expect(audioBuffer.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
tap.test('OpenAI: should stop the smartai instance', async () => {
|
||||
tap.test('OpenAI Document: should stop the smartai instance', async () => {
|
||||
await testSmartai.stop();
|
||||
});
|
||||
|
||||
export default tap.start();
|
||||
export default tap.start();
|
95
test/test.vision.anthropic.ts
Normal file
95
test/test.vision.anthropic.ts
Normal file
@@ -0,0 +1,95 @@
|
||||
import { expect, tap } from '@push.rocks/tapbundle';
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
|
||||
const testQenv = new qenv.Qenv('./', './.nogit/');
|
||||
|
||||
import * as smartai from '../ts/index.js';
|
||||
|
||||
let anthropicProvider: smartai.AnthropicProvider;
|
||||
|
||||
tap.test('Anthropic Vision: should create and start Anthropic provider', async () => {
|
||||
anthropicProvider = new smartai.AnthropicProvider({
|
||||
anthropicToken: await testQenv.getEnvVarOnDemand('ANTHROPIC_TOKEN'),
|
||||
});
|
||||
await anthropicProvider.start();
|
||||
expect(anthropicProvider).toBeInstanceOf(smartai.AnthropicProvider);
|
||||
});
|
||||
|
||||
tap.test('Anthropic Vision: should analyze coffee image with latte art', async () => {
|
||||
// Test 1: Coffee image from Unsplash by Dani
|
||||
const imagePath = './test/testimages/coffee-dani/coffee.jpg';
|
||||
console.log(`Loading coffee image from: ${imagePath}`);
|
||||
|
||||
const imageBuffer = await smartfile.fs.toBuffer(imagePath);
|
||||
console.log(`Image loaded, size: ${imageBuffer.length} bytes`);
|
||||
|
||||
const result = await anthropicProvider.vision({
|
||||
image: imageBuffer,
|
||||
prompt: 'Describe this coffee image. What do you see in terms of the cup, foam pattern, and overall composition?'
|
||||
});
|
||||
|
||||
console.log(`Anthropic Vision (Coffee) - Result: ${result}`);
|
||||
expect(result).toBeTruthy();
|
||||
expect(typeof result).toEqual('string');
|
||||
expect(result.toLowerCase()).toInclude('coffee');
|
||||
// The image has a heart pattern in the latte art
|
||||
const mentionsLatte = result.toLowerCase().includes('heart') ||
|
||||
result.toLowerCase().includes('latte') ||
|
||||
result.toLowerCase().includes('foam');
|
||||
expect(mentionsLatte).toBeTrue();
|
||||
});
|
||||
|
||||
tap.test('Anthropic Vision: should analyze laptop/workspace image', async () => {
|
||||
// Test 2: Laptop image from Unsplash by Nicolas Bichon
|
||||
const imagePath = './test/testimages/laptop-nicolas/laptop.jpg';
|
||||
console.log(`Loading laptop image from: ${imagePath}`);
|
||||
|
||||
const imageBuffer = await smartfile.fs.toBuffer(imagePath);
|
||||
console.log(`Image loaded, size: ${imageBuffer.length} bytes`);
|
||||
|
||||
const result = await anthropicProvider.vision({
|
||||
image: imageBuffer,
|
||||
prompt: 'Describe the technology and workspace setup in this image. What devices and equipment can you see?'
|
||||
});
|
||||
|
||||
console.log(`Anthropic Vision (Laptop) - Result: ${result}`);
|
||||
expect(result).toBeTruthy();
|
||||
expect(typeof result).toEqual('string');
|
||||
// Should mention laptop, computer, keyboard, or desk
|
||||
const mentionsTech = result.toLowerCase().includes('laptop') ||
|
||||
result.toLowerCase().includes('computer') ||
|
||||
result.toLowerCase().includes('keyboard') ||
|
||||
result.toLowerCase().includes('desk');
|
||||
expect(mentionsTech).toBeTrue();
|
||||
});
|
||||
|
||||
tap.test('Anthropic Vision: should analyze receipt/document image', async () => {
|
||||
// Test 3: Receipt image from Unsplash by Annie Spratt
|
||||
const imagePath = './test/testimages/receipt-annie/receipt.jpg';
|
||||
console.log(`Loading receipt image from: ${imagePath}`);
|
||||
|
||||
const imageBuffer = await smartfile.fs.toBuffer(imagePath);
|
||||
console.log(`Image loaded, size: ${imageBuffer.length} bytes`);
|
||||
|
||||
const result = await anthropicProvider.vision({
|
||||
image: imageBuffer,
|
||||
prompt: 'What type of document is this? Can you identify any text or numbers visible in the image?'
|
||||
});
|
||||
|
||||
console.log(`Anthropic Vision (Receipt) - Result: ${result}`);
|
||||
expect(result).toBeTruthy();
|
||||
expect(typeof result).toEqual('string');
|
||||
// Should mention receipt, document, text, or paper
|
||||
const mentionsDocument = result.toLowerCase().includes('receipt') ||
|
||||
result.toLowerCase().includes('document') ||
|
||||
result.toLowerCase().includes('text') ||
|
||||
result.toLowerCase().includes('paper');
|
||||
expect(mentionsDocument).toBeTrue();
|
||||
});
|
||||
|
||||
tap.test('Anthropic Vision: should stop the provider', async () => {
|
||||
await anthropicProvider.stop();
|
||||
});
|
||||
|
||||
export default tap.start();
|
@@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartai',
|
||||
version: '0.7.2',
|
||||
version: '0.7.3',
|
||||
description: 'SmartAi is a versatile TypeScript library designed to facilitate integration and interaction with various AI models, offering functionalities for chat, audio generation, document processing, and vision tasks.'
|
||||
}
|
||||
|
Reference in New Issue
Block a user