2024-04-04 02:47:44 +02:00
import * as plugins from './plugins.js' ;
import * as paths from './paths.js' ;
import { MultiModalModel } from './abstract.classes.multimodal.js' ;
2025-10-03 13:43:29 +00:00
import type {
ChatOptions ,
ChatResponse ,
ChatMessage ,
ResearchOptions ,
ResearchResponse ,
ImageGenerateOptions ,
ImageEditOptions ,
ImageResponse
} from './abstract.classes.multimodal.js' ;
2025-02-03 17:48:36 +01:00
import type { ImageBlockParam , TextBlockParam } from '@anthropic-ai/sdk/resources/messages' ;
type ContentBlock = ImageBlockParam | TextBlockParam ;
2024-04-04 02:47:44 +02:00
2024-04-29 11:18:40 +02:00
export interface IAnthropicProviderOptions {
anthropicToken : string ;
2025-09-28 15:06:07 +00:00
enableWebSearch? : boolean ;
searchDomainAllowList? : string [ ] ;
searchDomainBlockList? : string [ ] ;
2024-04-29 11:18:40 +02:00
}
2024-04-04 02:47:44 +02:00
export class AnthropicProvider extends MultiModalModel {
2024-04-29 11:18:40 +02:00
private options : IAnthropicProviderOptions ;
2024-04-04 02:47:44 +02:00
public anthropicApiClient : plugins.anthropic.default ;
2024-04-29 11:18:40 +02:00
constructor ( optionsArg : IAnthropicProviderOptions ) {
2024-04-04 02:47:44 +02:00
super ( ) ;
2024-04-29 11:18:40 +02:00
this . options = optionsArg // Ensure the token is stored
2024-04-04 02:47:44 +02:00
}
async start() {
2025-08-01 18:25:46 +00:00
await super . start ( ) ;
2024-04-04 02:47:44 +02:00
this . anthropicApiClient = new plugins . anthropic . default ( {
2024-04-29 11:18:40 +02:00
apiKey : this.options.anthropicToken ,
2024-04-04 02:47:44 +02:00
} ) ;
}
2025-08-01 18:25:46 +00:00
async stop() {
await super . stop ( ) ;
}
2024-04-04 02:47:44 +02:00
2025-02-03 15:16:58 +01:00
public async chatStream ( input : ReadableStream < Uint8Array > ) : Promise < ReadableStream < string > > {
// Create a TextDecoder to handle incoming chunks
const decoder = new TextDecoder ( ) ;
let buffer = '' ;
let currentMessage : { role : string ; content : string ; } | null = null ;
2024-04-04 02:47:44 +02:00
2025-02-03 15:16:58 +01:00
// Create a TransformStream to process the input
const transform = new TransformStream < Uint8Array , string > ( {
async transform ( chunk , controller ) {
buffer += decoder . decode ( chunk , { stream : true } ) ;
// Try to parse complete JSON messages from the buffer
while ( true ) {
const newlineIndex = buffer . indexOf ( '\n' ) ;
if ( newlineIndex === - 1 ) break ;
const line = buffer . slice ( 0 , newlineIndex ) ;
buffer = buffer . slice ( newlineIndex + 1 ) ;
if ( line . trim ( ) ) {
try {
const message = JSON . parse ( line ) ;
currentMessage = {
role : message.role || 'user' ,
content : message.content || '' ,
} ;
} catch ( e ) {
console . error ( 'Failed to parse message:' , e ) ;
}
}
}
// If we have a complete message, send it to Anthropic
if ( currentMessage ) {
const stream = await this . anthropicApiClient . messages . create ( {
2025-10-03 12:50:42 +00:00
model : 'claude-sonnet-4-5-20250929' ,
2025-02-03 15:16:58 +01:00
messages : [ { role : currentMessage.role , content : currentMessage.content } ] ,
system : '' ,
stream : true ,
max_tokens : 4000 ,
} ) ;
// Process each chunk from Anthropic
for await ( const chunk of stream ) {
const content = chunk . delta ? . text ;
if ( content ) {
controller . enqueue ( content ) ;
}
}
currentMessage = null ;
}
} ,
flush ( controller ) {
if ( buffer ) {
try {
const message = JSON . parse ( buffer ) ;
controller . enqueue ( message . content || '' ) ;
} catch ( e ) {
console . error ( 'Failed to parse remaining buffer:' , e ) ;
}
}
}
} ) ;
// Connect the input to our transform stream
return input . pipeThrough ( transform ) ;
2024-04-04 02:47:44 +02:00
}
// Implementing the synchronous chat interaction
2025-02-03 15:16:58 +01:00
public async chat ( optionsArg : ChatOptions ) : Promise < ChatResponse > {
// Convert message history to Anthropic format
const messages = optionsArg . messageHistory . map ( msg = > ( {
role : msg.role === 'assistant' ? 'assistant' as const : 'user' as const ,
content : msg.content
} ) ) ;
2024-04-04 02:47:44 +02:00
const result = await this . anthropicApiClient . messages . create ( {
2025-10-03 12:50:42 +00:00
model : 'claude-sonnet-4-5-20250929' ,
2024-04-29 11:18:40 +02:00
system : optionsArg.systemMessage ,
2024-04-04 02:47:44 +02:00
messages : [
2025-02-03 15:16:58 +01:00
. . . messages ,
{ role : 'user' as const , content : optionsArg.userMessage }
2024-04-04 02:47:44 +02:00
] ,
max_tokens : 4000 ,
} ) ;
2025-02-03 15:16:58 +01:00
// Extract text content from the response
let message = '' ;
for ( const block of result . content ) {
if ( 'text' in block ) {
message += block . text ;
}
}
2024-04-04 02:47:44 +02:00
return {
2025-02-03 15:16:58 +01:00
role : 'assistant' as const ,
message ,
2024-04-04 02:47:44 +02:00
} ;
}
2025-02-03 15:16:58 +01:00
public async audio ( optionsArg : { message : string } ) : Promise < NodeJS.ReadableStream > {
2024-04-04 02:47:44 +02:00
// Anthropic does not provide an audio API, so this method is not implemented.
2024-04-29 11:18:40 +02:00
throw new Error ( 'Audio generation is not yet supported by Anthropic.' ) ;
2024-04-04 02:47:44 +02:00
}
2025-02-03 15:26:00 +01:00
public async vision ( optionsArg : { image : Buffer ; prompt : string } ) : Promise < string > {
2025-02-03 17:48:36 +01:00
const base64Image = optionsArg . image . toString ( 'base64' ) ;
const content : ContentBlock [ ] = [
{
type : 'text' ,
text : optionsArg.prompt
} ,
{
type : 'image' ,
source : {
type : 'base64' ,
media_type : 'image/jpeg' ,
data : base64Image
}
}
] ;
const result = await this . anthropicApiClient . messages . create ( {
2025-10-03 12:50:42 +00:00
model : 'claude-sonnet-4-5-20250929' ,
2025-02-03 17:48:36 +01:00
messages : [ {
role : 'user' ,
content
} ] ,
max_tokens : 1024
} ) ;
// Extract text content from the response
let message = '' ;
for ( const block of result . content ) {
if ( 'text' in block ) {
message += block . text ;
}
}
return message ;
2025-02-03 15:26:00 +01:00
}
public async document ( optionsArg : {
systemMessage : string ;
userMessage : string ;
pdfDocuments : Uint8Array [ ] ;
messageHistory : ChatMessage [ ] ;
} ) : Promise < { message : any } > {
2025-10-10 07:32:21 +00:00
// Ensure SmartPdf is initialized before processing documents
await this . ensureSmartpdfReady ( ) ;
2025-02-03 17:48:36 +01:00
// Convert PDF documents to images using SmartPDF
let documentImageBytesArray : Uint8Array [ ] = [ ] ;
for ( const pdfDocument of optionsArg . pdfDocuments ) {
2025-10-10 07:32:21 +00:00
const documentImageArray = await this . smartpdfInstance ! . convertPDFToPngBytes ( pdfDocument ) ;
2025-02-03 17:48:36 +01:00
documentImageBytesArray = documentImageBytesArray . concat ( documentImageArray ) ;
}
// Convert message history to Anthropic format
const messages = optionsArg . messageHistory . map ( msg = > ( {
role : msg.role === 'assistant' ? 'assistant' as const : 'user' as const ,
content : msg.content
} ) ) ;
// Create content array with text and images
const content : ContentBlock [ ] = [
{
type : 'text' ,
text : optionsArg.userMessage
}
] ;
// Add each document page as an image
for ( const imageBytes of documentImageBytesArray ) {
content . push ( {
type : 'image' ,
source : {
type : 'base64' ,
2025-10-03 15:47:15 +00:00
media_type : 'image/png' ,
2025-02-03 17:48:36 +01:00
data : Buffer.from ( imageBytes ) . toString ( 'base64' )
}
} ) ;
}
const result = await this . anthropicApiClient . messages . create ( {
2025-10-03 12:50:42 +00:00
model : 'claude-sonnet-4-5-20250929' ,
2025-02-03 17:48:36 +01:00
system : optionsArg.systemMessage ,
messages : [
. . . messages ,
{ role : 'user' , content }
] ,
max_tokens : 4096
} ) ;
// Extract text content from the response
let message = '' ;
for ( const block of result . content ) {
if ( 'text' in block ) {
message += block . text ;
}
}
return {
message : {
role : 'assistant' ,
content : message
}
} ;
2025-02-03 15:26:00 +01:00
}
2025-09-28 15:06:07 +00:00
public async research ( optionsArg : ResearchOptions ) : Promise < ResearchResponse > {
// Prepare the messages for the research request
const systemMessage = ` You are a research assistant with web search capabilities.
Provide comprehensive , well - researched answers with citations and sources .
When searching the web , be thorough and cite your sources accurately . ` ;
try {
// Build the tool configuration for web search
2025-10-03 12:50:42 +00:00
const tools : any [ ] = [ ] ;
if ( this . options . enableWebSearch ) {
const webSearchTool : any = {
type : 'web_search_20250305' ,
name : 'web_search'
} ;
// Add optional parameters
if ( optionsArg . maxSources ) {
webSearchTool . max_uses = optionsArg . maxSources ;
}
if ( this . options . searchDomainAllowList ? . length ) {
webSearchTool . allowed_domains = this . options . searchDomainAllowList ;
} else if ( this . options . searchDomainBlockList ? . length ) {
webSearchTool . blocked_domains = this . options . searchDomainBlockList ;
2025-09-28 15:06:07 +00:00
}
2025-10-03 12:50:42 +00:00
tools . push ( webSearchTool ) ;
}
2025-09-28 15:06:07 +00:00
// Configure the request based on search depth
const maxTokens = optionsArg . searchDepth === 'deep' ? 8192 :
optionsArg . searchDepth === 'advanced' ? 6144 : 4096 ;
// Create the research request
const requestParams : any = {
2025-10-03 12:50:42 +00:00
model : 'claude-sonnet-4-5-20250929' ,
2025-09-28 15:06:07 +00:00
system : systemMessage ,
messages : [
{
role : 'user' as const ,
content : optionsArg.query
}
] ,
max_tokens : maxTokens ,
temperature : 0.7
} ;
// Add tools if web search is enabled
if ( tools . length > 0 ) {
requestParams . tools = tools ;
}
// Execute the research request
const result = await this . anthropicApiClient . messages . create ( requestParams ) ;
// Extract the answer from content blocks
let answer = '' ;
const sources : Array < { url : string ; title : string ; snippet : string } > = [ ] ;
const searchQueries : string [ ] = [ ] ;
// Process content blocks
for ( const block of result . content ) {
if ( 'text' in block ) {
2025-10-03 12:50:42 +00:00
// Accumulate text content
2025-09-28 15:06:07 +00:00
answer += block . text ;
2025-10-03 12:50:42 +00:00
// Extract citations if present
if ( 'citations' in block && Array . isArray ( block . citations ) ) {
for ( const citation of block . citations ) {
if ( citation . type === 'web_search_result_location' ) {
sources . push ( {
title : citation.title || '' ,
url : citation.url || '' ,
snippet : citation.cited_text || ''
} ) ;
}
}
}
} else if ( 'type' in block && block . type === 'server_tool_use' ) {
// Extract search queries from server tool use
if ( block . name === 'web_search' && block . input && typeof block . input === 'object' && 'query' in block . input ) {
searchQueries . push ( ( block . input as any ) . query ) ;
}
} else if ( 'type' in block && block . type === 'web_search_tool_result' ) {
// Extract sources from web search results
if ( Array . isArray ( block . content ) ) {
for ( const result of block . content ) {
if ( result . type === 'web_search_result' ) {
// Only add if not already in sources (avoid duplicates from citations)
if ( ! sources . some ( s = > s . url === result . url ) ) {
sources . push ( {
title : result.title || '' ,
url : result.url || '' ,
snippet : '' // Search results don't include snippets, only citations do
} ) ;
}
}
}
}
}
2025-09-28 15:06:07 +00:00
}
2025-10-03 12:50:42 +00:00
// Fallback: Parse markdown-style links if no citations found
if ( sources . length === 0 ) {
const urlRegex = /\[([^\]]+)\]\(([^)]+)\)/g ;
let match : RegExpExecArray | null ;
2025-09-28 15:06:07 +00:00
2025-10-03 12:50:42 +00:00
while ( ( match = urlRegex . exec ( answer ) ) !== null ) {
2025-09-28 15:06:07 +00:00
sources . push ( {
2025-10-03 12:50:42 +00:00
title : match [ 1 ] ,
url : match [ 2 ] ,
2025-09-28 15:06:07 +00:00
snippet : ''
} ) ;
}
}
2025-10-03 12:50:42 +00:00
// Check if web search was used based on usage info
const webSearchCount = result . usage ? . server_tool_use ? . web_search_requests || 0 ;
2025-09-28 15:06:07 +00:00
return {
answer ,
sources ,
searchQueries : searchQueries.length > 0 ? searchQueries : undefined ,
metadata : {
2025-10-03 12:50:42 +00:00
model : 'claude-sonnet-4-5-20250929' ,
2025-09-28 15:06:07 +00:00
searchDepth : optionsArg.searchDepth || 'basic' ,
2025-10-03 12:50:42 +00:00
tokensUsed : result.usage?.output_tokens ,
webSearchesPerformed : webSearchCount
2025-09-28 15:06:07 +00:00
}
} ;
} catch ( error ) {
console . error ( 'Anthropic research error:' , error ) ;
throw new Error ( ` Failed to perform research: ${ error . message } ` ) ;
}
}
2025-10-03 13:43:29 +00:00
/ * *
* Image generation is not supported by Anthropic
* /
public async imageGenerate ( optionsArg : ImageGenerateOptions ) : Promise < ImageResponse > {
throw new Error ( 'Image generation is not supported by Anthropic. Claude can only analyze images, not generate them. Please use OpenAI provider for image generation.' ) ;
}
/ * *
* Image editing is not supported by Anthropic
* /
public async imageEdit ( optionsArg : ImageEditOptions ) : Promise < ImageResponse > {
throw new Error ( 'Image editing is not supported by Anthropic. Claude can only analyze images, not edit them. Please use OpenAI provider for image editing.' ) ;
}
2024-04-04 02:47:44 +02:00
}