fix(provider): fix anthropic integration

This commit is contained in:
Philipp Kunz 2024-04-29 11:18:40 +02:00
parent 9e19d320e1
commit d1465fc868
7 changed files with 90 additions and 70 deletions

View File

@ -12,16 +12,6 @@ let testSmartai: smartai.SmartAi;
tap.test('should create a smartai instance', async () => {
testSmartai = new smartai.SmartAi({
openaiToken: await testQenv.getEnvVarOnDemand('OPENAI_TOKEN'),
exposeCredentials: {
exposedBaseUrl: await testQenv.getEnvVarOnDemand('EXPOSED_BASE_URL'),
webdav: {
webdavCredentials: {
password: await testQenv.getEnvVarOnDemand('WEBDAV_SERVER_TOKEN'),
serverUrl: await testQenv.getEnvVarOnDemand('WEBDAV_SERVER_URL'),
},
webdavSubPath: 'smartai'
}
}
});
await testSmartai.start();
});

View File

@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@push.rocks/smartai',
version: '0.0.11',
version: '0.0.12',
description: 'Provides a standardized interface for integrating and conversing with multiple AI models, supporting operations like chat and potentially audio responses.'
}

View File

@ -16,7 +16,10 @@ export abstract class MultiModalModel {
role: 'assistant' | 'user';
content: string;
}[]
}): Promise<{}>
}): Promise<{
role: 'assistant';
message: string;
}>
/**
* Defines a streaming interface for chat interactions.

View File

@ -12,9 +12,11 @@ export interface IConversationOptions {
*/
export class Conversation {
// STATIC
public static async createWithOpenAi(smartaiRef: SmartAi) {
const openaiProvider = new OpenAiProvider(smartaiRef.options.openaiToken);
const conversation = new Conversation(smartaiRef, {
public static async createWithOpenAi(smartaiRefArg: SmartAi) {
if (!smartaiRefArg.openaiProvider) {
throw new Error('OpenAI provider not available');
}
const conversation = new Conversation(smartaiRefArg, {
processFunction: async (input) => {
return '' // TODO implement proper streaming
}
@ -22,9 +24,11 @@ export class Conversation {
return conversation;
}
public static async createWithAnthropic(smartaiRef: SmartAi) {
const anthropicProvider = new OpenAiProvider(smartaiRef.options.anthropicToken);
const conversation = new Conversation(smartaiRef, {
public static async createWithAnthropic(smartaiRefArg: SmartAi) {
if (!smartaiRefArg.anthropicProvider) {
throw new Error('Anthropic provider not available');
}
const conversation = new Conversation(smartaiRefArg, {
processFunction: async (input) => {
return '' // TODO implement proper streaming
}
@ -32,6 +36,29 @@ export class Conversation {
return conversation;
}
public static async createWithPerplexity(smartaiRefArg: SmartAi) {
if (!smartaiRefArg.perplexityProvider) {
throw new Error('Perplexity provider not available');
}
const conversation = new Conversation(smartaiRefArg, {
processFunction: async (input) => {
return '' // TODO implement proper streaming
}
});
return conversation;
}
public static async createWithOllama(smartaiRefArg: SmartAi) {
if (!smartaiRefArg.ollamaProvider) {
throw new Error('Ollama provider not available');
}
const conversation = new Conversation(smartaiRefArg, {
processFunction: async (input) => {
return '' // TODO implement proper streaming
}
});
return conversation;
}
// INSTANCE
smartaiRef: SmartAi
@ -44,8 +71,8 @@ export class Conversation {
this.processFunction = options.processFunction;
}
setSystemMessage(systemMessage: string) {
this.systemMessage = systemMessage;
public async setSystemMessage(systemMessageArg: string) {
this.systemMessage = systemMessageArg;
}
private setupOutputStream(): ReadableStream<string> {
@ -57,7 +84,7 @@ export class Conversation {
}
private setupInputStream(): WritableStream<string> {
return new WritableStream<string>({
const writableStream = new WritableStream<string>({
write: async (chunk) => {
const processedData = await this.processFunction(chunk);
if (this.outputStreamController) {
@ -72,6 +99,7 @@ export class Conversation {
this.outputStreamController?.error(err);
}
});
return writableStream;
}
public getInputStreamWriter(): WritableStreamDefaultWriter<string> {

View File

@ -1,6 +1,6 @@
import { Conversation } from './classes.conversation.js';
import * as plugins from './plugins.js';
import type { AnthropicProvider } from './provider.anthropic.js';
import { AnthropicProvider } from './provider.anthropic.js';
import type { OllamaProvider } from './provider.ollama.js';
import { OpenAiProvider } from './provider.openai.js';
import type { PerplexityProvider } from './provider.perplexity.js';
@ -12,6 +12,8 @@ export interface ISmartAiOptions {
perplexityToken?: string;
}
export type TProvider = 'openai' | 'anthropic' | 'perplexity' | 'ollama';
export class SmartAi {
public options: ISmartAiOptions;
@ -31,21 +33,30 @@ export class SmartAi {
});
await this.openaiProvider.start();
}
if (this.options.anthropicToken) {
this.anthropicProvider = new AnthropicProvider({
anthropicToken: this.options.anthropicToken,
});
}
}
public async stop() {}
/**
* creates an OpenAI conversation
* create a new conversation
*/
public async createOpenApiConversation() {
const conversation = await Conversation.createWithOpenAi(this);
}
/**
* creates an OpenAI conversation
*/
public async createAnthropicConversation() {
const conversation = await Conversation.createWithAnthropic(this);
createConversation(provider: TProvider) {
switch (provider) {
case 'openai':
return Conversation.createWithOpenAi(this);
case 'anthropic':
return Conversation.createWithAnthropic(this);
case 'perplexity':
return Conversation.createWithPerplexity(this);
case 'ollama':
return Conversation.createWithOllama(this);
default:
throw new Error('Provider not available');
}
}
}

View File

@ -2,74 +2,61 @@ import * as plugins from './plugins.js';
import * as paths from './paths.js';
import { MultiModalModel } from './abstract.classes.multimodal.js';
export interface IAnthropicProviderOptions {
anthropicToken: string;
}
export class AnthropicProvider extends MultiModalModel {
private anthropicToken: string;
private options: IAnthropicProviderOptions;
public anthropicApiClient: plugins.anthropic.default;
constructor(anthropicToken: string) {
constructor(optionsArg: IAnthropicProviderOptions) {
super();
this.anthropicToken = anthropicToken; // Ensure the token is stored
this.options = optionsArg // Ensure the token is stored
}
async start() {
this.anthropicApiClient = new plugins.anthropic.default({
apiKey: this.anthropicToken,
apiKey: this.options.anthropicToken,
});
}
async stop() {}
chatStream(input: ReadableStream<string>): ReadableStream<string> {
const decoder = new TextDecoder();
let messageHistory: { role: 'assistant' | 'user'; content: string }[] = [];
public async chatStream(input: ReadableStream<string>): Promise<ReadableStream<string>> {
// TODO: implement for OpenAI
return new ReadableStream({
async start(controller) {
const reader = input.getReader();
try {
let done, value;
while ((({ done, value } = await reader.read()), !done)) {
const userMessage = decoder.decode(value, { stream: true });
messageHistory.push({ role: 'user', content: userMessage });
const aiResponse = await this.chat('', userMessage, messageHistory);
messageHistory.push({ role: 'assistant', content: aiResponse.message });
// Directly enqueue the string response instead of encoding it first
controller.enqueue(aiResponse.message);
}
controller.close();
} catch (err) {
controller.error(err);
}
},
});
const returnStream = new ReadableStream();
return returnStream;
}
// Implementing the synchronous chat interaction
public async chat(
systemMessage: string,
userMessage: string,
public async chat(optionsArg: {
systemMessage: string;
userMessage: string;
messageHistory: {
role: 'assistant' | 'user';
content: string;
}[]
) {
}[];
}) {
const result = await this.anthropicApiClient.messages.create({
model: 'claude-3-opus-20240229',
system: systemMessage,
system: optionsArg.systemMessage,
messages: [
...messageHistory,
{ role: 'user', content: userMessage },
...optionsArg.messageHistory,
{ role: 'user', content: optionsArg.userMessage },
],
max_tokens: 4000,
});
return {
message: result.content,
role: result.role as 'assistant',
message: result.content.join('\n'),
};
}
public async audio(messageArg: string) {
private async audio(messageArg: string) {
// Anthropic does not provide an audio API, so this method is not implemented.
throw new Error('Audio generation is not supported by Anthropic.');
throw new Error('Audio generation is not yet supported by Anthropic.');
}
}

View File

@ -53,7 +53,8 @@ export class OpenAiProvider extends MultiModalModel {
],
});
return {
message: result.choices[0].message,
role: result.choices[0].message.role as 'assistant',
message: result.choices[0].message.content,
};
}