feat(provider.anthropic): Add support for vision and document processing in Anthropic provider
This commit is contained in:
@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartai',
|
||||
version: '0.1.0',
|
||||
version: '0.2.0',
|
||||
description: 'A TypeScript library for integrating and interacting with multiple AI models, offering capabilities for chat and potentially audio responses.'
|
||||
}
|
||||
|
@ -2,6 +2,9 @@ import * as plugins from './plugins.js';
|
||||
import * as paths from './paths.js';
|
||||
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
||||
import type { ChatOptions, ChatResponse, ChatMessage } from './abstract.classes.multimodal.js';
|
||||
import type { ImageBlockParam, TextBlockParam } from '@anthropic-ai/sdk/resources/messages';
|
||||
|
||||
type ContentBlock = ImageBlockParam | TextBlockParam;
|
||||
|
||||
export interface IAnthropicProviderOptions {
|
||||
anthropicToken: string;
|
||||
@ -132,7 +135,40 @@ export class AnthropicProvider extends MultiModalModel {
|
||||
}
|
||||
|
||||
public async vision(optionsArg: { image: Buffer; prompt: string }): Promise<string> {
|
||||
throw new Error('Vision tasks are not yet supported by Anthropic.');
|
||||
const base64Image = optionsArg.image.toString('base64');
|
||||
|
||||
const content: ContentBlock[] = [
|
||||
{
|
||||
type: 'text',
|
||||
text: optionsArg.prompt
|
||||
},
|
||||
{
|
||||
type: 'image',
|
||||
source: {
|
||||
type: 'base64',
|
||||
media_type: 'image/jpeg',
|
||||
data: base64Image
|
||||
}
|
||||
}
|
||||
];
|
||||
|
||||
const result = await this.anthropicApiClient.messages.create({
|
||||
model: 'claude-3-opus-20240229',
|
||||
messages: [{
|
||||
role: 'user',
|
||||
content
|
||||
}],
|
||||
max_tokens: 1024
|
||||
});
|
||||
|
||||
// Extract text content from the response
|
||||
let message = '';
|
||||
for (const block of result.content) {
|
||||
if ('text' in block) {
|
||||
message += block.text;
|
||||
}
|
||||
}
|
||||
return message;
|
||||
}
|
||||
|
||||
public async document(optionsArg: {
|
||||
@ -141,6 +177,64 @@ export class AnthropicProvider extends MultiModalModel {
|
||||
pdfDocuments: Uint8Array[];
|
||||
messageHistory: ChatMessage[];
|
||||
}): Promise<{ message: any }> {
|
||||
throw new Error('Document processing is not yet supported by Anthropic.');
|
||||
// Convert PDF documents to images using SmartPDF
|
||||
const smartpdfInstance = new plugins.smartpdf.SmartPdf();
|
||||
let documentImageBytesArray: Uint8Array[] = [];
|
||||
|
||||
for (const pdfDocument of optionsArg.pdfDocuments) {
|
||||
const documentImageArray = await smartpdfInstance.convertPDFToPngBytes(pdfDocument);
|
||||
documentImageBytesArray = documentImageBytesArray.concat(documentImageArray);
|
||||
}
|
||||
|
||||
// Convert message history to Anthropic format
|
||||
const messages = optionsArg.messageHistory.map(msg => ({
|
||||
role: msg.role === 'assistant' ? 'assistant' as const : 'user' as const,
|
||||
content: msg.content
|
||||
}));
|
||||
|
||||
// Create content array with text and images
|
||||
const content: ContentBlock[] = [
|
||||
{
|
||||
type: 'text',
|
||||
text: optionsArg.userMessage
|
||||
}
|
||||
];
|
||||
|
||||
// Add each document page as an image
|
||||
for (const imageBytes of documentImageBytesArray) {
|
||||
content.push({
|
||||
type: 'image',
|
||||
source: {
|
||||
type: 'base64',
|
||||
media_type: 'image/jpeg',
|
||||
data: Buffer.from(imageBytes).toString('base64')
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const result = await this.anthropicApiClient.messages.create({
|
||||
model: 'claude-3-opus-20240229',
|
||||
system: optionsArg.systemMessage,
|
||||
messages: [
|
||||
...messages,
|
||||
{ role: 'user', content }
|
||||
],
|
||||
max_tokens: 4096
|
||||
});
|
||||
|
||||
// Extract text content from the response
|
||||
let message = '';
|
||||
for (const block of result.content) {
|
||||
if ('text' in block) {
|
||||
message += block.text;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
message: {
|
||||
role: 'assistant',
|
||||
content: message
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user