smartai/ts/provider.anthropic.ts

62 lines
1.7 KiB
TypeScript
Raw Normal View History

2024-04-04 00:47:44 +00:00
import * as plugins from './plugins.js';
import * as paths from './paths.js';
import { MultiModalModel } from './abstract.classes.multimodal.js';
export interface IAnthropicProviderOptions {
anthropicToken: string;
}
2024-04-04 00:47:44 +00:00
export class AnthropicProvider extends MultiModalModel {
private options: IAnthropicProviderOptions;
2024-04-04 00:47:44 +00:00
public anthropicApiClient: plugins.anthropic.default;
constructor(optionsArg: IAnthropicProviderOptions) {
2024-04-04 00:47:44 +00:00
super();
this.options = optionsArg // Ensure the token is stored
2024-04-04 00:47:44 +00:00
}
async start() {
this.anthropicApiClient = new plugins.anthropic.default({
apiKey: this.options.anthropicToken,
2024-04-04 00:47:44 +00:00
});
}
async stop() {}
public async chatStream(input: ReadableStream<string>): Promise<ReadableStream<string>> {
// TODO: implement for OpenAI
2024-04-04 00:47:44 +00:00
const returnStream = new ReadableStream();
return returnStream;
2024-04-04 00:47:44 +00:00
}
// Implementing the synchronous chat interaction
public async chat(optionsArg: {
systemMessage: string;
userMessage: string;
2024-04-04 00:47:44 +00:00
messageHistory: {
role: 'assistant' | 'user';
content: string;
}[];
}) {
2024-04-04 00:47:44 +00:00
const result = await this.anthropicApiClient.messages.create({
model: 'claude-3-opus-20240229',
system: optionsArg.systemMessage,
2024-04-04 00:47:44 +00:00
messages: [
...optionsArg.messageHistory,
{ role: 'user', content: optionsArg.userMessage },
2024-04-04 00:47:44 +00:00
],
max_tokens: 4000,
});
return {
role: result.role as 'assistant',
message: result.content.join('\n'),
2024-04-04 00:47:44 +00:00
};
}
private async audio(messageArg: string) {
2024-04-04 00:47:44 +00:00
// Anthropic does not provide an audio API, so this method is not implemented.
throw new Error('Audio generation is not yet supported by Anthropic.');
2024-04-04 00:47:44 +00:00
}
}