fix(core): update

This commit is contained in:
2024-04-04 02:47:44 +02:00
parent a636556fdb
commit 04d505d29e
13 changed files with 335 additions and 39 deletions

View File

@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@push.rocks/smartai',
version: '0.0.8',
description: 'a standardaized interface to talk to AI models'
version: '0.0.9',
description: 'Provides a standardized interface for integrating and conversing with multiple AI models, supporting operations like chat and potentially audio responses.'
}

View File

@ -1,5 +1,12 @@
export abstract class MultiModal {
export abstract class MultiModalModel {
/**
* starts the model
*/
abstract start(): Promise<void>;
/**
* stops the model
*/
abstract stop(): Promise<void>;
// Defines a streaming interface for chat interactions.

View File

@ -1,18 +1,53 @@
import type { SmartAi } from "./classes.smartai.js";
import { OpenAiProvider } from "./provider.openai.js";
type TProcessFunction = (input: string) => Promise<string>;
export interface ISmartAiOptions {
export interface IConversationOptions {
processFunction: TProcessFunction;
}
export class SmartAi {
/**
* a conversation
*/
export class Conversation {
// STATIC
public static async createWithOpenAi(smartaiRef: SmartAi) {
const openaiProvider = new OpenAiProvider(smartaiRef.options.openaiToken);
const conversation = new Conversation(smartaiRef, {
processFunction: async (input) => {
return '' // TODO implement proper streaming
}
});
return conversation;
}
public static async createWithAnthropic(smartaiRef: SmartAi) {
const anthropicProvider = new OpenAiProvider(smartaiRef.options.anthropicToken);
const conversation = new Conversation(smartaiRef, {
processFunction: async (input) => {
return '' // TODO implement proper streaming
}
});
return conversation;
}
// INSTANCE
smartaiRef: SmartAi
private systemMessage: string;
private processFunction: TProcessFunction;
private inputStreamWriter: WritableStreamDefaultWriter<string> | null = null;
private outputStreamController: ReadableStreamDefaultController<string> | null = null;
constructor(options: ISmartAiOptions) {
constructor(smartairefArg: SmartAi, options: IConversationOptions) {
this.processFunction = options.processFunction;
}
setSystemMessage(systemMessage: string) {
this.systemMessage = systemMessage;
}
private setupOutputStream(): ReadableStream<string> {
return new ReadableStream<string>({
start: (controller) => {

30
ts/classes.smartai.ts Normal file
View File

@ -0,0 +1,30 @@
import { Conversation } from './classes.conversation.js';
import * as plugins from './plugins.js';
export interface ISmartAiOptions {
openaiToken: string;
anthropicToken: string;
}
export class SmartAi {
public options: ISmartAiOptions;
constructor(optionsArg: ISmartAiOptions) {
this.options = optionsArg;
}
/**
* creates an OpenAI conversation
*/
public async createOpenApiConversation() {
const conversation = await Conversation.createWithOpenAi(this);
}
/**
* creates an OpenAI conversation
*/
public async createAnthropicConversation() {
const conversation = await Conversation.createWithAnthropic(this);
}
}

15
ts/classes.tts.ts Normal file
View File

@ -0,0 +1,15 @@
import type { SmartAi } from './classes.smartai.js';
import * as plugins from './plugins.js';
export class TTS {
public static async createWithOpenAi(smartaiRef: SmartAi): Promise<TTS> {
return new TTS(smartaiRef);
}
// INSTANCE
smartaiRef: SmartAi;
constructor(smartairefArg: SmartAi) {
this.smartaiRef = smartairefArg;
}
}

View File

@ -1,3 +1,3 @@
export * from './smartai.classes.smartai.js';
export * from './classes.smartai.js';
export * from './abstract.classes.multimodal.js';
export * from './provider.openai.js';

View File

@ -19,8 +19,10 @@ export {
}
// third party
import * as anthropic from '@anthropic-ai/sdk';
import * as openai from 'openai';
export {
anthropic,
openai,
}

75
ts/provider.anthropic.ts Normal file
View File

@ -0,0 +1,75 @@
import * as plugins from './plugins.js';
import * as paths from './paths.js';
import { MultiModalModel } from './abstract.classes.multimodal.js';
export class AnthropicProvider extends MultiModalModel {
private anthropicToken: string;
public anthropicApiClient: plugins.anthropic.default;
constructor(anthropicToken: string) {
super();
this.anthropicToken = anthropicToken; // Ensure the token is stored
}
async start() {
this.anthropicApiClient = new plugins.anthropic.default({
apiKey: this.anthropicToken,
});
}
async stop() {}
chatStream(input: ReadableStream<string>): ReadableStream<string> {
const decoder = new TextDecoder();
let messageHistory: { role: 'assistant' | 'user'; content: string }[] = [];
return new ReadableStream({
async start(controller) {
const reader = input.getReader();
try {
let done, value;
while ((({ done, value } = await reader.read()), !done)) {
const userMessage = decoder.decode(value, { stream: true });
messageHistory.push({ role: 'user', content: userMessage });
const aiResponse = await this.chat('', userMessage, messageHistory);
messageHistory.push({ role: 'assistant', content: aiResponse.message });
// Directly enqueue the string response instead of encoding it first
controller.enqueue(aiResponse.message);
}
controller.close();
} catch (err) {
controller.error(err);
}
},
});
}
// Implementing the synchronous chat interaction
public async chat(
systemMessage: string,
userMessage: string,
messageHistory: {
role: 'assistant' | 'user';
content: string;
}[]
) {
const result = await this.anthropicApiClient.messages.create({
model: 'claude-3-opus-20240229',
system: systemMessage,
messages: [
...messageHistory,
{ role: 'user', content: userMessage },
],
max_tokens: 4000,
});
return {
message: result.content,
};
}
public async audio(messageArg: string) {
// Anthropic does not provide an audio API, so this method is not implemented.
throw new Error('Audio generation is not supported by Anthropic.');
}
}

View File

@ -1,9 +1,9 @@
import * as plugins from './plugins.js';
import * as paths from './paths.js';
import { MultiModal } from './abstract.classes.multimodal.js';
import { MultiModalModel } from './abstract.classes.multimodal.js';
export class OpenAiProvider extends MultiModal {
export class OpenAiProvider extends MultiModalModel {
private openAiToken: string;
public openAiApiClient: plugins.openai.default;
@ -59,6 +59,7 @@ export class OpenAiProvider extends MultiModal {
) {
const result = await this.openAiApiClient.chat.completions.create({
model: 'gpt-4-turbo-preview',
messages: [
{ role: 'system', content: systemMessage },
...messageHistory,