fix(provider): fix anthropic integration

This commit is contained in:
2024-04-29 11:18:40 +02:00
parent 9e19d320e1
commit d1465fc868
7 changed files with 90 additions and 70 deletions

View File

@@ -2,74 +2,61 @@ import * as plugins from './plugins.js';
import * as paths from './paths.js';
import { MultiModalModel } from './abstract.classes.multimodal.js';
export interface IAnthropicProviderOptions {
anthropicToken: string;
}
export class AnthropicProvider extends MultiModalModel {
private anthropicToken: string;
private options: IAnthropicProviderOptions;
public anthropicApiClient: plugins.anthropic.default;
constructor(anthropicToken: string) {
constructor(optionsArg: IAnthropicProviderOptions) {
super();
this.anthropicToken = anthropicToken; // Ensure the token is stored
this.options = optionsArg // Ensure the token is stored
}
async start() {
this.anthropicApiClient = new plugins.anthropic.default({
apiKey: this.anthropicToken,
apiKey: this.options.anthropicToken,
});
}
async stop() {}
chatStream(input: ReadableStream<string>): ReadableStream<string> {
const decoder = new TextDecoder();
let messageHistory: { role: 'assistant' | 'user'; content: string }[] = [];
public async chatStream(input: ReadableStream<string>): Promise<ReadableStream<string>> {
// TODO: implement for OpenAI
return new ReadableStream({
async start(controller) {
const reader = input.getReader();
try {
let done, value;
while ((({ done, value } = await reader.read()), !done)) {
const userMessage = decoder.decode(value, { stream: true });
messageHistory.push({ role: 'user', content: userMessage });
const aiResponse = await this.chat('', userMessage, messageHistory);
messageHistory.push({ role: 'assistant', content: aiResponse.message });
// Directly enqueue the string response instead of encoding it first
controller.enqueue(aiResponse.message);
}
controller.close();
} catch (err) {
controller.error(err);
}
},
});
const returnStream = new ReadableStream();
return returnStream;
}
// Implementing the synchronous chat interaction
public async chat(
systemMessage: string,
userMessage: string,
public async chat(optionsArg: {
systemMessage: string;
userMessage: string;
messageHistory: {
role: 'assistant' | 'user';
content: string;
}[]
) {
}[];
}) {
const result = await this.anthropicApiClient.messages.create({
model: 'claude-3-opus-20240229',
system: systemMessage,
system: optionsArg.systemMessage,
messages: [
...messageHistory,
{ role: 'user', content: userMessage },
...optionsArg.messageHistory,
{ role: 'user', content: optionsArg.userMessage },
],
max_tokens: 4000,
});
return {
message: result.content,
role: result.role as 'assistant',
message: result.content.join('\n'),
};
}
public async audio(messageArg: string) {
private async audio(messageArg: string) {
// Anthropic does not provide an audio API, so this method is not implemented.
throw new Error('Audio generation is not supported by Anthropic.');
throw new Error('Audio generation is not yet supported by Anthropic.');
}
}