117 lines
3.4 KiB
TypeScript
117 lines
3.4 KiB
TypeScript
import type { SmartAi } from "./classes.smartai.js";
|
|
import { OpenAiProvider } from "./provider.openai.js";
|
|
|
|
type TProcessFunction = (input: string) => Promise<string>;
|
|
|
|
export interface IConversationOptions {
|
|
processFunction: TProcessFunction;
|
|
}
|
|
|
|
/**
|
|
* a conversation
|
|
*/
|
|
export class Conversation {
|
|
// STATIC
|
|
public static async createWithOpenAi(smartaiRefArg: SmartAi) {
|
|
if (!smartaiRefArg.openaiProvider) {
|
|
throw new Error('OpenAI provider not available');
|
|
}
|
|
const conversation = new Conversation(smartaiRefArg, {
|
|
processFunction: async (input) => {
|
|
return '' // TODO implement proper streaming
|
|
}
|
|
});
|
|
return conversation;
|
|
}
|
|
|
|
public static async createWithAnthropic(smartaiRefArg: SmartAi) {
|
|
if (!smartaiRefArg.anthropicProvider) {
|
|
throw new Error('Anthropic provider not available');
|
|
}
|
|
const conversation = new Conversation(smartaiRefArg, {
|
|
processFunction: async (input) => {
|
|
return '' // TODO implement proper streaming
|
|
}
|
|
});
|
|
return conversation;
|
|
}
|
|
|
|
public static async createWithPerplexity(smartaiRefArg: SmartAi) {
|
|
if (!smartaiRefArg.perplexityProvider) {
|
|
throw new Error('Perplexity provider not available');
|
|
}
|
|
const conversation = new Conversation(smartaiRefArg, {
|
|
processFunction: async (input) => {
|
|
return '' // TODO implement proper streaming
|
|
}
|
|
});
|
|
return conversation;
|
|
}
|
|
|
|
public static async createWithOllama(smartaiRefArg: SmartAi) {
|
|
if (!smartaiRefArg.ollamaProvider) {
|
|
throw new Error('Ollama provider not available');
|
|
}
|
|
const conversation = new Conversation(smartaiRefArg, {
|
|
processFunction: async (input) => {
|
|
return '' // TODO implement proper streaming
|
|
}
|
|
});
|
|
return conversation;
|
|
}
|
|
|
|
// INSTANCE
|
|
smartaiRef: SmartAi
|
|
private systemMessage: string;
|
|
private processFunction: TProcessFunction;
|
|
private inputStreamWriter: WritableStreamDefaultWriter<string> | null = null;
|
|
private outputStreamController: ReadableStreamDefaultController<string> | null = null;
|
|
|
|
constructor(smartairefArg: SmartAi, options: IConversationOptions) {
|
|
this.processFunction = options.processFunction;
|
|
}
|
|
|
|
public async setSystemMessage(systemMessageArg: string) {
|
|
this.systemMessage = systemMessageArg;
|
|
}
|
|
|
|
private setupOutputStream(): ReadableStream<string> {
|
|
return new ReadableStream<string>({
|
|
start: (controller) => {
|
|
this.outputStreamController = controller;
|
|
}
|
|
});
|
|
}
|
|
|
|
private setupInputStream(): WritableStream<string> {
|
|
const writableStream = new WritableStream<string>({
|
|
write: async (chunk) => {
|
|
const processedData = await this.processFunction(chunk);
|
|
if (this.outputStreamController) {
|
|
this.outputStreamController.enqueue(processedData);
|
|
}
|
|
},
|
|
close: () => {
|
|
this.outputStreamController?.close();
|
|
},
|
|
abort: (err) => {
|
|
console.error('Stream aborted', err);
|
|
this.outputStreamController?.error(err);
|
|
}
|
|
});
|
|
return writableStream;
|
|
}
|
|
|
|
public getInputStreamWriter(): WritableStreamDefaultWriter<string> {
|
|
if (!this.inputStreamWriter) {
|
|
const inputStream = this.setupInputStream();
|
|
this.inputStreamWriter = inputStream.getWriter();
|
|
}
|
|
return this.inputStreamWriter;
|
|
}
|
|
|
|
public getOutputStream(): ReadableStream<string> {
|
|
return this.setupOutputStream();
|
|
}
|
|
}
|