platformservice/ts/aibridge/aibridge.classes.openaibridge.ts

59 lines
1.6 KiB
TypeScript
Raw Normal View History

2024-03-19 17:37:24 +00:00
import { AiBridge } from './aibridge.classes.aibridge.js';
import * as plugins from './aibridge.plugins.js';
import * as paths from './aibridge.paths.js';
export class OpenAiBridge {
public aiBridgeRef: AiBridge;
public openAiApiClient: plugins.openai.default;
constructor(aiBridgeRefArg: AiBridge) {
this.aiBridgeRef = aiBridgeRefArg;
}
public async start() {
const openAiToken = await this.aiBridgeRef.serviceQenv.getEnvVarOnDemand('OPENAI_TOKEN');
this.openAiApiClient = new plugins.openai.default({
apiKey: openAiToken,
dangerouslyAllowBrowser: true,
});
}
public async stop() {}
public async chat(
systemMessage: string,
userMessage: string,
messageHistory: {
role: 'assistant' | 'user';
content: string;
}[]
) {
const result = await this.openAiApiClient.chat.completions.create({
2024-04-01 00:58:27 +00:00
model: 'gpt-4-turbo-preview',
2024-03-19 17:37:24 +00:00
messages: [
{ role: 'system', content: systemMessage },
...messageHistory,
{ role: 'user', content: userMessage },
],
});
return {
message: result.choices[0].message,
};
}
public async audio(messageArg: string) {
const done = plugins.smartpromise.defer();
const result = await this.openAiApiClient.audio.speech.create({
model: 'tts-1-hd',
input: messageArg,
voice: 'nova',
response_format: 'mp3',
speed: 1,
});
const stream = result.body.pipe(plugins.smartfile.fsStream.createWriteStream(plugins.path.join(paths.nogitDir, 'output.mp3')));
stream.on('finish', () => {
done.resolve();
});
return done.promise;
}
}