smartai/ts/provider.openai.ts

103 lines
2.7 KiB
TypeScript
Raw Normal View History

2024-03-31 00:32:37 +00:00
import * as plugins from './plugins.js';
import * as paths from './paths.js';
2024-04-04 00:47:44 +00:00
import { MultiModalModel } from './abstract.classes.multimodal.js';
2024-03-31 00:32:37 +00:00
2024-04-04 00:47:44 +00:00
export class OpenAiProvider extends MultiModalModel {
2024-04-25 08:49:07 +00:00
public smartexposeInstance: plugins.smartexpose.SmartExpose;
2024-03-31 00:32:37 +00:00
private openAiToken: string;
public openAiApiClient: plugins.openai.default;
2024-04-25 08:49:07 +00:00
constructor(openaiToken: string, expose) {
2024-03-31 00:32:37 +00:00
super();
this.openAiToken = openaiToken; // Ensure the token is stored
}
async start() {
this.openAiApiClient = new plugins.openai.default({
apiKey: this.openAiToken,
dangerouslyAllowBrowser: true,
});
}
async stop() {}
2024-04-25 08:49:07 +00:00
public async chatStream(input: ReadableStream<string>): Promise<ReadableStream<string>> {
// TODO: implement for OpenAI
2024-03-31 00:32:37 +00:00
2024-04-25 08:49:07 +00:00
const returnStream = new ReadableStream();
return returnStream;
2024-03-31 00:32:37 +00:00
}
// Implementing the synchronous chat interaction
public async chat(
2024-04-25 08:49:07 +00:00
optionsArg: {
systemMessage: string,
userMessage: string,
messageHistory: {
role: 'assistant' | 'user';
content: string;
}[]
}
2024-03-31 00:32:37 +00:00
) {
const result = await this.openAiApiClient.chat.completions.create({
2024-04-01 01:00:23 +00:00
model: 'gpt-4-turbo-preview',
2024-04-25 08:49:07 +00:00
2024-03-31 00:32:37 +00:00
messages: [
2024-04-25 08:49:07 +00:00
{ role: 'system', content: optionsArg.systemMessage },
...optionsArg.messageHistory,
{ role: 'user', content: optionsArg.userMessage },
2024-03-31 00:32:37 +00:00
],
});
return {
message: result.choices[0].message,
};
}
2024-04-25 08:49:07 +00:00
public async audio(optionsArg: { message: string }): Promise<NodeJS.ReadableStream> {
const done = plugins.smartpromise.defer<NodeJS.ReadableStream>();
2024-03-31 00:32:37 +00:00
const result = await this.openAiApiClient.audio.speech.create({
model: 'tts-1-hd',
2024-04-25 08:49:07 +00:00
input: optionsArg.message,
2024-03-31 00:32:37 +00:00
voice: 'nova',
response_format: 'mp3',
speed: 1,
});
2024-04-25 08:49:07 +00:00
const stream = result.body;
done.resolve(stream);
2024-03-31 00:32:37 +00:00
return done.promise;
}
2024-04-25 08:49:07 +00:00
public async document(optionsArg: {
systemMessage: string,
userMessage: string,
documents: Uint8Array[],
messageHistory: {
role: 'assistant' | 'user';
content: any;
}[];
}) {
const result = await this.openAiApiClient.chat.completions.create({
model: 'gpt-4-vision-preview',
messages: [
{ role: 'system', content: optionsArg.systemMessage },
...optionsArg.messageHistory,
{ role: 'user', content: [
{type: 'text', text: optionsArg.userMessage},
...(() => {
const returnArray = [];
for (const document of optionsArg.documents) {
returnArray.push({type: 'image_url', image_url: })
}
return returnArray;
})()
] },
],
});
return {
message: result.choices[0].message,
};
}
2024-03-31 00:32:37 +00:00
}