405 lines
12 KiB
TypeScript
405 lines
12 KiB
TypeScript
import * as plugins from './plugins.js';
|
|
import * as paths from './paths.js';
|
|
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
|
import type {
|
|
ChatOptions,
|
|
ChatResponse,
|
|
ChatMessage,
|
|
ResearchOptions,
|
|
ResearchResponse,
|
|
ImageGenerateOptions,
|
|
ImageEditOptions,
|
|
ImageResponse
|
|
} from './abstract.classes.multimodal.js';
|
|
import type { ImageBlockParam, TextBlockParam } from '@anthropic-ai/sdk/resources/messages';
|
|
|
|
type ContentBlock = ImageBlockParam | TextBlockParam;
|
|
|
|
export interface IAnthropicProviderOptions {
|
|
anthropicToken: string;
|
|
enableWebSearch?: boolean;
|
|
searchDomainAllowList?: string[];
|
|
searchDomainBlockList?: string[];
|
|
}
|
|
|
|
export class AnthropicProvider extends MultiModalModel {
|
|
private options: IAnthropicProviderOptions;
|
|
public anthropicApiClient: plugins.anthropic.default;
|
|
|
|
constructor(optionsArg: IAnthropicProviderOptions) {
|
|
super();
|
|
this.options = optionsArg // Ensure the token is stored
|
|
}
|
|
|
|
async start() {
|
|
await super.start();
|
|
this.anthropicApiClient = new plugins.anthropic.default({
|
|
apiKey: this.options.anthropicToken,
|
|
});
|
|
}
|
|
|
|
async stop() {
|
|
await super.stop();
|
|
}
|
|
|
|
public async chatStream(input: ReadableStream<Uint8Array>): Promise<ReadableStream<string>> {
|
|
// Create a TextDecoder to handle incoming chunks
|
|
const decoder = new TextDecoder();
|
|
let buffer = '';
|
|
let currentMessage: { role: string; content: string; } | null = null;
|
|
|
|
// Create a TransformStream to process the input
|
|
const transform = new TransformStream<Uint8Array, string>({
|
|
async transform(chunk, controller) {
|
|
buffer += decoder.decode(chunk, { stream: true });
|
|
|
|
// Try to parse complete JSON messages from the buffer
|
|
while (true) {
|
|
const newlineIndex = buffer.indexOf('\n');
|
|
if (newlineIndex === -1) break;
|
|
|
|
const line = buffer.slice(0, newlineIndex);
|
|
buffer = buffer.slice(newlineIndex + 1);
|
|
|
|
if (line.trim()) {
|
|
try {
|
|
const message = JSON.parse(line);
|
|
currentMessage = {
|
|
role: message.role || 'user',
|
|
content: message.content || '',
|
|
};
|
|
} catch (e) {
|
|
console.error('Failed to parse message:', e);
|
|
}
|
|
}
|
|
}
|
|
|
|
// If we have a complete message, send it to Anthropic
|
|
if (currentMessage) {
|
|
const stream = await this.anthropicApiClient.messages.create({
|
|
model: 'claude-sonnet-4-5-20250929',
|
|
messages: [{ role: currentMessage.role, content: currentMessage.content }],
|
|
system: '',
|
|
stream: true,
|
|
max_tokens: 4000,
|
|
});
|
|
|
|
// Process each chunk from Anthropic
|
|
for await (const chunk of stream) {
|
|
const content = chunk.delta?.text;
|
|
if (content) {
|
|
controller.enqueue(content);
|
|
}
|
|
}
|
|
|
|
currentMessage = null;
|
|
}
|
|
},
|
|
|
|
flush(controller) {
|
|
if (buffer) {
|
|
try {
|
|
const message = JSON.parse(buffer);
|
|
controller.enqueue(message.content || '');
|
|
} catch (e) {
|
|
console.error('Failed to parse remaining buffer:', e);
|
|
}
|
|
}
|
|
}
|
|
});
|
|
|
|
// Connect the input to our transform stream
|
|
return input.pipeThrough(transform);
|
|
}
|
|
|
|
// Implementing the synchronous chat interaction
|
|
public async chat(optionsArg: ChatOptions): Promise<ChatResponse> {
|
|
// Convert message history to Anthropic format
|
|
const messages = optionsArg.messageHistory.map(msg => ({
|
|
role: msg.role === 'assistant' ? 'assistant' as const : 'user' as const,
|
|
content: msg.content
|
|
}));
|
|
|
|
const result = await this.anthropicApiClient.messages.create({
|
|
model: 'claude-sonnet-4-5-20250929',
|
|
system: optionsArg.systemMessage,
|
|
messages: [
|
|
...messages,
|
|
{ role: 'user' as const, content: optionsArg.userMessage }
|
|
],
|
|
max_tokens: 4000,
|
|
});
|
|
|
|
// Extract text content from the response
|
|
let message = '';
|
|
for (const block of result.content) {
|
|
if ('text' in block) {
|
|
message += block.text;
|
|
}
|
|
}
|
|
|
|
return {
|
|
role: 'assistant' as const,
|
|
message,
|
|
};
|
|
}
|
|
|
|
public async audio(optionsArg: { message: string }): Promise<NodeJS.ReadableStream> {
|
|
// Anthropic does not provide an audio API, so this method is not implemented.
|
|
throw new Error('Audio generation is not yet supported by Anthropic.');
|
|
}
|
|
|
|
public async vision(optionsArg: { image: Buffer; prompt: string }): Promise<string> {
|
|
const base64Image = optionsArg.image.toString('base64');
|
|
|
|
const content: ContentBlock[] = [
|
|
{
|
|
type: 'text',
|
|
text: optionsArg.prompt
|
|
},
|
|
{
|
|
type: 'image',
|
|
source: {
|
|
type: 'base64',
|
|
media_type: 'image/jpeg',
|
|
data: base64Image
|
|
}
|
|
}
|
|
];
|
|
|
|
const result = await this.anthropicApiClient.messages.create({
|
|
model: 'claude-sonnet-4-5-20250929',
|
|
messages: [{
|
|
role: 'user',
|
|
content
|
|
}],
|
|
max_tokens: 1024
|
|
});
|
|
|
|
// Extract text content from the response
|
|
let message = '';
|
|
for (const block of result.content) {
|
|
if ('text' in block) {
|
|
message += block.text;
|
|
}
|
|
}
|
|
return message;
|
|
}
|
|
|
|
public async document(optionsArg: {
|
|
systemMessage: string;
|
|
userMessage: string;
|
|
pdfDocuments: Uint8Array[];
|
|
messageHistory: ChatMessage[];
|
|
}): Promise<{ message: any }> {
|
|
// Convert PDF documents to images using SmartPDF
|
|
let documentImageBytesArray: Uint8Array[] = [];
|
|
|
|
for (const pdfDocument of optionsArg.pdfDocuments) {
|
|
const documentImageArray = await this.smartpdfInstance.convertPDFToPngBytes(pdfDocument);
|
|
documentImageBytesArray = documentImageBytesArray.concat(documentImageArray);
|
|
}
|
|
|
|
// Convert message history to Anthropic format
|
|
const messages = optionsArg.messageHistory.map(msg => ({
|
|
role: msg.role === 'assistant' ? 'assistant' as const : 'user' as const,
|
|
content: msg.content
|
|
}));
|
|
|
|
// Create content array with text and images
|
|
const content: ContentBlock[] = [
|
|
{
|
|
type: 'text',
|
|
text: optionsArg.userMessage
|
|
}
|
|
];
|
|
|
|
// Add each document page as an image
|
|
for (const imageBytes of documentImageBytesArray) {
|
|
content.push({
|
|
type: 'image',
|
|
source: {
|
|
type: 'base64',
|
|
media_type: 'image/jpeg',
|
|
data: Buffer.from(imageBytes).toString('base64')
|
|
}
|
|
});
|
|
}
|
|
|
|
const result = await this.anthropicApiClient.messages.create({
|
|
model: 'claude-sonnet-4-5-20250929',
|
|
system: optionsArg.systemMessage,
|
|
messages: [
|
|
...messages,
|
|
{ role: 'user', content }
|
|
],
|
|
max_tokens: 4096
|
|
});
|
|
|
|
// Extract text content from the response
|
|
let message = '';
|
|
for (const block of result.content) {
|
|
if ('text' in block) {
|
|
message += block.text;
|
|
}
|
|
}
|
|
|
|
return {
|
|
message: {
|
|
role: 'assistant',
|
|
content: message
|
|
}
|
|
};
|
|
}
|
|
|
|
public async research(optionsArg: ResearchOptions): Promise<ResearchResponse> {
|
|
// Prepare the messages for the research request
|
|
const systemMessage = `You are a research assistant with web search capabilities.
|
|
Provide comprehensive, well-researched answers with citations and sources.
|
|
When searching the web, be thorough and cite your sources accurately.`;
|
|
|
|
try {
|
|
// Build the tool configuration for web search
|
|
const tools: any[] = [];
|
|
|
|
if (this.options.enableWebSearch) {
|
|
const webSearchTool: any = {
|
|
type: 'web_search_20250305',
|
|
name: 'web_search'
|
|
};
|
|
|
|
// Add optional parameters
|
|
if (optionsArg.maxSources) {
|
|
webSearchTool.max_uses = optionsArg.maxSources;
|
|
}
|
|
|
|
if (this.options.searchDomainAllowList?.length) {
|
|
webSearchTool.allowed_domains = this.options.searchDomainAllowList;
|
|
} else if (this.options.searchDomainBlockList?.length) {
|
|
webSearchTool.blocked_domains = this.options.searchDomainBlockList;
|
|
}
|
|
|
|
tools.push(webSearchTool);
|
|
}
|
|
|
|
// Configure the request based on search depth
|
|
const maxTokens = optionsArg.searchDepth === 'deep' ? 8192 :
|
|
optionsArg.searchDepth === 'advanced' ? 6144 : 4096;
|
|
|
|
// Create the research request
|
|
const requestParams: any = {
|
|
model: 'claude-sonnet-4-5-20250929',
|
|
system: systemMessage,
|
|
messages: [
|
|
{
|
|
role: 'user' as const,
|
|
content: optionsArg.query
|
|
}
|
|
],
|
|
max_tokens: maxTokens,
|
|
temperature: 0.7
|
|
};
|
|
|
|
// Add tools if web search is enabled
|
|
if (tools.length > 0) {
|
|
requestParams.tools = tools;
|
|
}
|
|
|
|
// Execute the research request
|
|
const result = await this.anthropicApiClient.messages.create(requestParams);
|
|
|
|
// Extract the answer from content blocks
|
|
let answer = '';
|
|
const sources: Array<{ url: string; title: string; snippet: string }> = [];
|
|
const searchQueries: string[] = [];
|
|
|
|
// Process content blocks
|
|
for (const block of result.content) {
|
|
if ('text' in block) {
|
|
// Accumulate text content
|
|
answer += block.text;
|
|
|
|
// Extract citations if present
|
|
if ('citations' in block && Array.isArray(block.citations)) {
|
|
for (const citation of block.citations) {
|
|
if (citation.type === 'web_search_result_location') {
|
|
sources.push({
|
|
title: citation.title || '',
|
|
url: citation.url || '',
|
|
snippet: citation.cited_text || ''
|
|
});
|
|
}
|
|
}
|
|
}
|
|
} else if ('type' in block && block.type === 'server_tool_use') {
|
|
// Extract search queries from server tool use
|
|
if (block.name === 'web_search' && block.input && typeof block.input === 'object' && 'query' in block.input) {
|
|
searchQueries.push((block.input as any).query);
|
|
}
|
|
} else if ('type' in block && block.type === 'web_search_tool_result') {
|
|
// Extract sources from web search results
|
|
if (Array.isArray(block.content)) {
|
|
for (const result of block.content) {
|
|
if (result.type === 'web_search_result') {
|
|
// Only add if not already in sources (avoid duplicates from citations)
|
|
if (!sources.some(s => s.url === result.url)) {
|
|
sources.push({
|
|
title: result.title || '',
|
|
url: result.url || '',
|
|
snippet: '' // Search results don't include snippets, only citations do
|
|
});
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
// Fallback: Parse markdown-style links if no citations found
|
|
if (sources.length === 0) {
|
|
const urlRegex = /\[([^\]]+)\]\(([^)]+)\)/g;
|
|
let match: RegExpExecArray | null;
|
|
|
|
while ((match = urlRegex.exec(answer)) !== null) {
|
|
sources.push({
|
|
title: match[1],
|
|
url: match[2],
|
|
snippet: ''
|
|
});
|
|
}
|
|
}
|
|
|
|
// Check if web search was used based on usage info
|
|
const webSearchCount = result.usage?.server_tool_use?.web_search_requests || 0;
|
|
|
|
return {
|
|
answer,
|
|
sources,
|
|
searchQueries: searchQueries.length > 0 ? searchQueries : undefined,
|
|
metadata: {
|
|
model: 'claude-sonnet-4-5-20250929',
|
|
searchDepth: optionsArg.searchDepth || 'basic',
|
|
tokensUsed: result.usage?.output_tokens,
|
|
webSearchesPerformed: webSearchCount
|
|
}
|
|
};
|
|
} catch (error) {
|
|
console.error('Anthropic research error:', error);
|
|
throw new Error(`Failed to perform research: ${error.message}`);
|
|
}
|
|
}
|
|
|
|
/**
|
|
* Image generation is not supported by Anthropic
|
|
*/
|
|
public async imageGenerate(optionsArg: ImageGenerateOptions): Promise<ImageResponse> {
|
|
throw new Error('Image generation is not supported by Anthropic. Claude can only analyze images, not generate them. Please use OpenAI provider for image generation.');
|
|
}
|
|
|
|
/**
|
|
* Image editing is not supported by Anthropic
|
|
*/
|
|
public async imageEdit(optionsArg: ImageEditOptions): Promise<ImageResponse> {
|
|
throw new Error('Image editing is not supported by Anthropic. Claude can only analyze images, not edit them. Please use OpenAI provider for image editing.');
|
|
}
|
|
} |