fix(provider): fix anthropic integration
This commit is contained in:
parent
9e19d320e1
commit
d1465fc868
10
test/test.ts
10
test/test.ts
@ -12,16 +12,6 @@ let testSmartai: smartai.SmartAi;
|
|||||||
tap.test('should create a smartai instance', async () => {
|
tap.test('should create a smartai instance', async () => {
|
||||||
testSmartai = new smartai.SmartAi({
|
testSmartai = new smartai.SmartAi({
|
||||||
openaiToken: await testQenv.getEnvVarOnDemand('OPENAI_TOKEN'),
|
openaiToken: await testQenv.getEnvVarOnDemand('OPENAI_TOKEN'),
|
||||||
exposeCredentials: {
|
|
||||||
exposedBaseUrl: await testQenv.getEnvVarOnDemand('EXPOSED_BASE_URL'),
|
|
||||||
webdav: {
|
|
||||||
webdavCredentials: {
|
|
||||||
password: await testQenv.getEnvVarOnDemand('WEBDAV_SERVER_TOKEN'),
|
|
||||||
serverUrl: await testQenv.getEnvVarOnDemand('WEBDAV_SERVER_URL'),
|
|
||||||
},
|
|
||||||
webdavSubPath: 'smartai'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
});
|
||||||
await testSmartai.start();
|
await testSmartai.start();
|
||||||
});
|
});
|
||||||
|
@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@push.rocks/smartai',
|
name: '@push.rocks/smartai',
|
||||||
version: '0.0.11',
|
version: '0.0.12',
|
||||||
description: 'Provides a standardized interface for integrating and conversing with multiple AI models, supporting operations like chat and potentially audio responses.'
|
description: 'Provides a standardized interface for integrating and conversing with multiple AI models, supporting operations like chat and potentially audio responses.'
|
||||||
}
|
}
|
||||||
|
@ -16,7 +16,10 @@ export abstract class MultiModalModel {
|
|||||||
role: 'assistant' | 'user';
|
role: 'assistant' | 'user';
|
||||||
content: string;
|
content: string;
|
||||||
}[]
|
}[]
|
||||||
}): Promise<{}>
|
}): Promise<{
|
||||||
|
role: 'assistant';
|
||||||
|
message: string;
|
||||||
|
}>
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Defines a streaming interface for chat interactions.
|
* Defines a streaming interface for chat interactions.
|
||||||
|
@ -12,9 +12,11 @@ export interface IConversationOptions {
|
|||||||
*/
|
*/
|
||||||
export class Conversation {
|
export class Conversation {
|
||||||
// STATIC
|
// STATIC
|
||||||
public static async createWithOpenAi(smartaiRef: SmartAi) {
|
public static async createWithOpenAi(smartaiRefArg: SmartAi) {
|
||||||
const openaiProvider = new OpenAiProvider(smartaiRef.options.openaiToken);
|
if (!smartaiRefArg.openaiProvider) {
|
||||||
const conversation = new Conversation(smartaiRef, {
|
throw new Error('OpenAI provider not available');
|
||||||
|
}
|
||||||
|
const conversation = new Conversation(smartaiRefArg, {
|
||||||
processFunction: async (input) => {
|
processFunction: async (input) => {
|
||||||
return '' // TODO implement proper streaming
|
return '' // TODO implement proper streaming
|
||||||
}
|
}
|
||||||
@ -22,9 +24,11 @@ export class Conversation {
|
|||||||
return conversation;
|
return conversation;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static async createWithAnthropic(smartaiRef: SmartAi) {
|
public static async createWithAnthropic(smartaiRefArg: SmartAi) {
|
||||||
const anthropicProvider = new OpenAiProvider(smartaiRef.options.anthropicToken);
|
if (!smartaiRefArg.anthropicProvider) {
|
||||||
const conversation = new Conversation(smartaiRef, {
|
throw new Error('Anthropic provider not available');
|
||||||
|
}
|
||||||
|
const conversation = new Conversation(smartaiRefArg, {
|
||||||
processFunction: async (input) => {
|
processFunction: async (input) => {
|
||||||
return '' // TODO implement proper streaming
|
return '' // TODO implement proper streaming
|
||||||
}
|
}
|
||||||
@ -32,6 +36,29 @@ export class Conversation {
|
|||||||
return conversation;
|
return conversation;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static async createWithPerplexity(smartaiRefArg: SmartAi) {
|
||||||
|
if (!smartaiRefArg.perplexityProvider) {
|
||||||
|
throw new Error('Perplexity provider not available');
|
||||||
|
}
|
||||||
|
const conversation = new Conversation(smartaiRefArg, {
|
||||||
|
processFunction: async (input) => {
|
||||||
|
return '' // TODO implement proper streaming
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return conversation;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async createWithOllama(smartaiRefArg: SmartAi) {
|
||||||
|
if (!smartaiRefArg.ollamaProvider) {
|
||||||
|
throw new Error('Ollama provider not available');
|
||||||
|
}
|
||||||
|
const conversation = new Conversation(smartaiRefArg, {
|
||||||
|
processFunction: async (input) => {
|
||||||
|
return '' // TODO implement proper streaming
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return conversation;
|
||||||
|
}
|
||||||
|
|
||||||
// INSTANCE
|
// INSTANCE
|
||||||
smartaiRef: SmartAi
|
smartaiRef: SmartAi
|
||||||
@ -44,8 +71,8 @@ export class Conversation {
|
|||||||
this.processFunction = options.processFunction;
|
this.processFunction = options.processFunction;
|
||||||
}
|
}
|
||||||
|
|
||||||
setSystemMessage(systemMessage: string) {
|
public async setSystemMessage(systemMessageArg: string) {
|
||||||
this.systemMessage = systemMessage;
|
this.systemMessage = systemMessageArg;
|
||||||
}
|
}
|
||||||
|
|
||||||
private setupOutputStream(): ReadableStream<string> {
|
private setupOutputStream(): ReadableStream<string> {
|
||||||
@ -57,7 +84,7 @@ export class Conversation {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private setupInputStream(): WritableStream<string> {
|
private setupInputStream(): WritableStream<string> {
|
||||||
return new WritableStream<string>({
|
const writableStream = new WritableStream<string>({
|
||||||
write: async (chunk) => {
|
write: async (chunk) => {
|
||||||
const processedData = await this.processFunction(chunk);
|
const processedData = await this.processFunction(chunk);
|
||||||
if (this.outputStreamController) {
|
if (this.outputStreamController) {
|
||||||
@ -72,6 +99,7 @@ export class Conversation {
|
|||||||
this.outputStreamController?.error(err);
|
this.outputStreamController?.error(err);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
return writableStream;
|
||||||
}
|
}
|
||||||
|
|
||||||
public getInputStreamWriter(): WritableStreamDefaultWriter<string> {
|
public getInputStreamWriter(): WritableStreamDefaultWriter<string> {
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import { Conversation } from './classes.conversation.js';
|
import { Conversation } from './classes.conversation.js';
|
||||||
import * as plugins from './plugins.js';
|
import * as plugins from './plugins.js';
|
||||||
import type { AnthropicProvider } from './provider.anthropic.js';
|
import { AnthropicProvider } from './provider.anthropic.js';
|
||||||
import type { OllamaProvider } from './provider.ollama.js';
|
import type { OllamaProvider } from './provider.ollama.js';
|
||||||
import { OpenAiProvider } from './provider.openai.js';
|
import { OpenAiProvider } from './provider.openai.js';
|
||||||
import type { PerplexityProvider } from './provider.perplexity.js';
|
import type { PerplexityProvider } from './provider.perplexity.js';
|
||||||
@ -12,6 +12,8 @@ export interface ISmartAiOptions {
|
|||||||
perplexityToken?: string;
|
perplexityToken?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type TProvider = 'openai' | 'anthropic' | 'perplexity' | 'ollama';
|
||||||
|
|
||||||
export class SmartAi {
|
export class SmartAi {
|
||||||
public options: ISmartAiOptions;
|
public options: ISmartAiOptions;
|
||||||
|
|
||||||
@ -31,21 +33,30 @@ export class SmartAi {
|
|||||||
});
|
});
|
||||||
await this.openaiProvider.start();
|
await this.openaiProvider.start();
|
||||||
}
|
}
|
||||||
|
if (this.options.anthropicToken) {
|
||||||
|
this.anthropicProvider = new AnthropicProvider({
|
||||||
|
anthropicToken: this.options.anthropicToken,
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public async stop() {}
|
public async stop() {}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* creates an OpenAI conversation
|
* create a new conversation
|
||||||
*/
|
*/
|
||||||
public async createOpenApiConversation() {
|
createConversation(provider: TProvider) {
|
||||||
const conversation = await Conversation.createWithOpenAi(this);
|
switch (provider) {
|
||||||
|
case 'openai':
|
||||||
|
return Conversation.createWithOpenAi(this);
|
||||||
|
case 'anthropic':
|
||||||
|
return Conversation.createWithAnthropic(this);
|
||||||
|
case 'perplexity':
|
||||||
|
return Conversation.createWithPerplexity(this);
|
||||||
|
case 'ollama':
|
||||||
|
return Conversation.createWithOllama(this);
|
||||||
|
default:
|
||||||
|
throw new Error('Provider not available');
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* creates an OpenAI conversation
|
|
||||||
*/
|
|
||||||
public async createAnthropicConversation() {
|
|
||||||
const conversation = await Conversation.createWithAnthropic(this);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -2,74 +2,61 @@ import * as plugins from './plugins.js';
|
|||||||
import * as paths from './paths.js';
|
import * as paths from './paths.js';
|
||||||
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
||||||
|
|
||||||
|
export interface IAnthropicProviderOptions {
|
||||||
|
anthropicToken: string;
|
||||||
|
}
|
||||||
|
|
||||||
export class AnthropicProvider extends MultiModalModel {
|
export class AnthropicProvider extends MultiModalModel {
|
||||||
private anthropicToken: string;
|
private options: IAnthropicProviderOptions;
|
||||||
public anthropicApiClient: plugins.anthropic.default;
|
public anthropicApiClient: plugins.anthropic.default;
|
||||||
|
|
||||||
constructor(anthropicToken: string) {
|
constructor(optionsArg: IAnthropicProviderOptions) {
|
||||||
super();
|
super();
|
||||||
this.anthropicToken = anthropicToken; // Ensure the token is stored
|
this.options = optionsArg // Ensure the token is stored
|
||||||
}
|
}
|
||||||
|
|
||||||
async start() {
|
async start() {
|
||||||
this.anthropicApiClient = new plugins.anthropic.default({
|
this.anthropicApiClient = new plugins.anthropic.default({
|
||||||
apiKey: this.anthropicToken,
|
apiKey: this.options.anthropicToken,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async stop() {}
|
async stop() {}
|
||||||
|
|
||||||
chatStream(input: ReadableStream<string>): ReadableStream<string> {
|
public async chatStream(input: ReadableStream<string>): Promise<ReadableStream<string>> {
|
||||||
const decoder = new TextDecoder();
|
// TODO: implement for OpenAI
|
||||||
let messageHistory: { role: 'assistant' | 'user'; content: string }[] = [];
|
|
||||||
|
|
||||||
return new ReadableStream({
|
const returnStream = new ReadableStream();
|
||||||
async start(controller) {
|
return returnStream;
|
||||||
const reader = input.getReader();
|
|
||||||
try {
|
|
||||||
let done, value;
|
|
||||||
while ((({ done, value } = await reader.read()), !done)) {
|
|
||||||
const userMessage = decoder.decode(value, { stream: true });
|
|
||||||
messageHistory.push({ role: 'user', content: userMessage });
|
|
||||||
const aiResponse = await this.chat('', userMessage, messageHistory);
|
|
||||||
messageHistory.push({ role: 'assistant', content: aiResponse.message });
|
|
||||||
// Directly enqueue the string response instead of encoding it first
|
|
||||||
controller.enqueue(aiResponse.message);
|
|
||||||
}
|
|
||||||
controller.close();
|
|
||||||
} catch (err) {
|
|
||||||
controller.error(err);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Implementing the synchronous chat interaction
|
// Implementing the synchronous chat interaction
|
||||||
public async chat(
|
public async chat(optionsArg: {
|
||||||
systemMessage: string,
|
systemMessage: string;
|
||||||
userMessage: string,
|
userMessage: string;
|
||||||
messageHistory: {
|
messageHistory: {
|
||||||
role: 'assistant' | 'user';
|
role: 'assistant' | 'user';
|
||||||
content: string;
|
content: string;
|
||||||
}[]
|
}[];
|
||||||
) {
|
}) {
|
||||||
const result = await this.anthropicApiClient.messages.create({
|
const result = await this.anthropicApiClient.messages.create({
|
||||||
model: 'claude-3-opus-20240229',
|
model: 'claude-3-opus-20240229',
|
||||||
system: systemMessage,
|
system: optionsArg.systemMessage,
|
||||||
messages: [
|
messages: [
|
||||||
...messageHistory,
|
...optionsArg.messageHistory,
|
||||||
{ role: 'user', content: userMessage },
|
{ role: 'user', content: optionsArg.userMessage },
|
||||||
],
|
],
|
||||||
max_tokens: 4000,
|
max_tokens: 4000,
|
||||||
});
|
});
|
||||||
|
|
||||||
return {
|
return {
|
||||||
message: result.content,
|
role: result.role as 'assistant',
|
||||||
|
message: result.content.join('\n'),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
public async audio(messageArg: string) {
|
private async audio(messageArg: string) {
|
||||||
// Anthropic does not provide an audio API, so this method is not implemented.
|
// Anthropic does not provide an audio API, so this method is not implemented.
|
||||||
throw new Error('Audio generation is not supported by Anthropic.');
|
throw new Error('Audio generation is not yet supported by Anthropic.');
|
||||||
}
|
}
|
||||||
}
|
}
|
@ -53,7 +53,8 @@ export class OpenAiProvider extends MultiModalModel {
|
|||||||
],
|
],
|
||||||
});
|
});
|
||||||
return {
|
return {
|
||||||
message: result.choices[0].message,
|
role: result.choices[0].message.role as 'assistant',
|
||||||
|
message: result.choices[0].message.content,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user