2024-03-31 01:32:37 +01:00
|
|
|
import * as plugins from './plugins.js';
|
|
|
|
import * as paths from './paths.js';
|
|
|
|
|
2024-04-04 02:47:44 +02:00
|
|
|
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
2024-03-31 01:32:37 +01:00
|
|
|
|
2024-04-27 12:47:49 +02:00
|
|
|
export interface IOpenaiProviderOptions {
|
|
|
|
openaiToken: string;
|
|
|
|
}
|
|
|
|
|
2024-04-04 02:47:44 +02:00
|
|
|
export class OpenAiProvider extends MultiModalModel {
|
2024-04-27 12:47:49 +02:00
|
|
|
private options: IOpenaiProviderOptions;
|
2024-03-31 01:32:37 +01:00
|
|
|
public openAiApiClient: plugins.openai.default;
|
2024-04-27 12:47:49 +02:00
|
|
|
public smartpdfInstance: plugins.smartpdf.SmartPdf;
|
2024-03-31 01:32:37 +01:00
|
|
|
|
2024-04-27 12:47:49 +02:00
|
|
|
constructor(optionsArg: IOpenaiProviderOptions) {
|
2024-03-31 01:32:37 +01:00
|
|
|
super();
|
2024-04-27 12:47:49 +02:00
|
|
|
this.options = optionsArg;
|
2024-03-31 01:32:37 +01:00
|
|
|
}
|
|
|
|
|
2024-04-27 12:47:49 +02:00
|
|
|
public async start() {
|
2024-03-31 01:32:37 +01:00
|
|
|
this.openAiApiClient = new plugins.openai.default({
|
2024-04-27 12:47:49 +02:00
|
|
|
apiKey: this.options.openaiToken,
|
2024-03-31 01:32:37 +01:00
|
|
|
dangerouslyAllowBrowser: true,
|
|
|
|
});
|
2024-04-27 12:47:49 +02:00
|
|
|
this.smartpdfInstance = new plugins.smartpdf.SmartPdf();
|
2024-03-31 01:32:37 +01:00
|
|
|
}
|
|
|
|
|
2024-04-27 12:47:49 +02:00
|
|
|
public async stop() {}
|
2024-03-31 01:32:37 +01:00
|
|
|
|
2025-02-03 15:16:58 +01:00
|
|
|
public async chatStream(input: ReadableStream<Uint8Array>): Promise<ReadableStream<string>> {
|
|
|
|
// Create a TextDecoder to handle incoming chunks
|
|
|
|
const decoder = new TextDecoder();
|
|
|
|
let buffer = '';
|
|
|
|
let currentMessage: { role: string; content: string; } | null = null;
|
2024-03-31 01:32:37 +01:00
|
|
|
|
2025-02-03 15:16:58 +01:00
|
|
|
// Create a TransformStream to process the input
|
|
|
|
const transform = new TransformStream<Uint8Array, string>({
|
|
|
|
async transform(chunk, controller) {
|
|
|
|
buffer += decoder.decode(chunk, { stream: true });
|
|
|
|
|
|
|
|
// Try to parse complete JSON messages from the buffer
|
|
|
|
while (true) {
|
|
|
|
const newlineIndex = buffer.indexOf('\n');
|
|
|
|
if (newlineIndex === -1) break;
|
|
|
|
|
|
|
|
const line = buffer.slice(0, newlineIndex);
|
|
|
|
buffer = buffer.slice(newlineIndex + 1);
|
|
|
|
|
|
|
|
if (line.trim()) {
|
|
|
|
try {
|
|
|
|
const message = JSON.parse(line);
|
|
|
|
currentMessage = {
|
|
|
|
role: message.role || 'user',
|
|
|
|
content: message.content || '',
|
|
|
|
};
|
|
|
|
} catch (e) {
|
|
|
|
console.error('Failed to parse message:', e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// If we have a complete message, send it to OpenAI
|
|
|
|
if (currentMessage) {
|
|
|
|
const stream = await this.openAiApiClient.chat.completions.create({
|
|
|
|
model: 'gpt-4',
|
|
|
|
messages: [{ role: currentMessage.role, content: currentMessage.content }],
|
|
|
|
stream: true,
|
|
|
|
});
|
|
|
|
|
|
|
|
// Process each chunk from OpenAI
|
|
|
|
for await (const chunk of stream) {
|
|
|
|
const content = chunk.choices[0]?.delta?.content;
|
|
|
|
if (content) {
|
|
|
|
controller.enqueue(content);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
currentMessage = null;
|
|
|
|
}
|
|
|
|
},
|
|
|
|
|
|
|
|
flush(controller) {
|
|
|
|
if (buffer) {
|
|
|
|
try {
|
|
|
|
const message = JSON.parse(buffer);
|
|
|
|
controller.enqueue(message.content || '');
|
|
|
|
} catch (e) {
|
|
|
|
console.error('Failed to parse remaining buffer:', e);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
// Connect the input to our transform stream
|
|
|
|
return input.pipeThrough(transform);
|
2024-03-31 01:32:37 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
// Implementing the synchronous chat interaction
|
2024-04-27 12:47:49 +02:00
|
|
|
public async chat(optionsArg: {
|
|
|
|
systemMessage: string;
|
|
|
|
userMessage: string;
|
|
|
|
messageHistory: {
|
|
|
|
role: 'assistant' | 'user';
|
|
|
|
content: string;
|
|
|
|
}[];
|
|
|
|
}) {
|
2024-03-31 01:32:37 +01:00
|
|
|
const result = await this.openAiApiClient.chat.completions.create({
|
2024-05-17 16:25:22 +02:00
|
|
|
model: 'gpt-4o',
|
2024-04-25 10:49:07 +02:00
|
|
|
|
2024-03-31 01:32:37 +01:00
|
|
|
messages: [
|
2024-04-25 10:49:07 +02:00
|
|
|
{ role: 'system', content: optionsArg.systemMessage },
|
|
|
|
...optionsArg.messageHistory,
|
|
|
|
{ role: 'user', content: optionsArg.userMessage },
|
2024-03-31 01:32:37 +01:00
|
|
|
],
|
|
|
|
});
|
|
|
|
return {
|
2024-04-29 11:18:40 +02:00
|
|
|
role: result.choices[0].message.role as 'assistant',
|
|
|
|
message: result.choices[0].message.content,
|
2024-03-31 01:32:37 +01:00
|
|
|
};
|
|
|
|
}
|
|
|
|
|
2024-04-25 10:49:07 +02:00
|
|
|
public async audio(optionsArg: { message: string }): Promise<NodeJS.ReadableStream> {
|
|
|
|
const done = plugins.smartpromise.defer<NodeJS.ReadableStream>();
|
2024-03-31 01:32:37 +01:00
|
|
|
const result = await this.openAiApiClient.audio.speech.create({
|
|
|
|
model: 'tts-1-hd',
|
2024-04-25 10:49:07 +02:00
|
|
|
input: optionsArg.message,
|
2024-03-31 01:32:37 +01:00
|
|
|
voice: 'nova',
|
|
|
|
response_format: 'mp3',
|
|
|
|
speed: 1,
|
|
|
|
});
|
2024-04-25 10:49:07 +02:00
|
|
|
const stream = result.body;
|
|
|
|
done.resolve(stream);
|
2024-03-31 01:32:37 +01:00
|
|
|
return done.promise;
|
|
|
|
}
|
2024-04-25 10:49:07 +02:00
|
|
|
|
|
|
|
public async document(optionsArg: {
|
2024-04-27 12:47:49 +02:00
|
|
|
systemMessage: string;
|
|
|
|
userMessage: string;
|
|
|
|
pdfDocuments: Uint8Array[];
|
2024-04-25 10:49:07 +02:00
|
|
|
messageHistory: {
|
|
|
|
role: 'assistant' | 'user';
|
|
|
|
content: any;
|
|
|
|
}[];
|
|
|
|
}) {
|
2024-04-27 12:47:49 +02:00
|
|
|
let pdfDocumentImageBytesArray: Uint8Array[] = [];
|
|
|
|
|
|
|
|
for (const pdfDocument of optionsArg.pdfDocuments) {
|
|
|
|
const documentImageArray = await this.smartpdfInstance.convertPDFToPngBytes(pdfDocument);
|
|
|
|
pdfDocumentImageBytesArray = pdfDocumentImageBytesArray.concat(documentImageArray);
|
|
|
|
}
|
|
|
|
|
|
|
|
console.log(`image smartfile array`);
|
|
|
|
console.log(pdfDocumentImageBytesArray.map((smartfile) => smartfile.length));
|
|
|
|
|
|
|
|
const smartfileArray = await plugins.smartarray.map(
|
|
|
|
pdfDocumentImageBytesArray,
|
|
|
|
async (pdfDocumentImageBytes) => {
|
|
|
|
return plugins.smartfile.SmartFile.fromBuffer(
|
|
|
|
'pdfDocumentImage.jpg',
|
|
|
|
Buffer.from(pdfDocumentImageBytes)
|
|
|
|
);
|
|
|
|
}
|
|
|
|
);
|
|
|
|
|
2024-04-25 10:49:07 +02:00
|
|
|
const result = await this.openAiApiClient.chat.completions.create({
|
2024-05-17 16:25:22 +02:00
|
|
|
model: 'gpt-4o',
|
2024-04-27 12:47:49 +02:00
|
|
|
// response_format: { type: "json_object" }, // not supported for now
|
2024-04-25 10:49:07 +02:00
|
|
|
messages: [
|
|
|
|
{ role: 'system', content: optionsArg.systemMessage },
|
|
|
|
...optionsArg.messageHistory,
|
2024-04-27 12:47:49 +02:00
|
|
|
{
|
|
|
|
role: 'user',
|
|
|
|
content: [
|
|
|
|
{ type: 'text', text: optionsArg.userMessage },
|
|
|
|
...(() => {
|
|
|
|
const returnArray = [];
|
|
|
|
for (const imageBytes of pdfDocumentImageBytesArray) {
|
|
|
|
returnArray.push({
|
|
|
|
type: 'image_url',
|
|
|
|
image_url: {
|
|
|
|
url: 'data:image/png;base64,' + Buffer.from(imageBytes).toString('base64'),
|
|
|
|
},
|
|
|
|
});
|
|
|
|
}
|
|
|
|
return returnArray;
|
|
|
|
})(),
|
|
|
|
],
|
|
|
|
},
|
2024-04-25 10:49:07 +02:00
|
|
|
],
|
|
|
|
});
|
|
|
|
return {
|
|
|
|
message: result.choices[0].message,
|
|
|
|
};
|
|
|
|
}
|
2025-02-03 15:26:00 +01:00
|
|
|
|
|
|
|
public async vision(optionsArg: { image: Buffer; prompt: string }): Promise<string> {
|
|
|
|
const result = await this.openAiApiClient.chat.completions.create({
|
|
|
|
model: 'gpt-4-vision-preview',
|
|
|
|
messages: [
|
|
|
|
{
|
|
|
|
role: 'user',
|
|
|
|
content: [
|
|
|
|
{ type: 'text', text: optionsArg.prompt },
|
|
|
|
{
|
|
|
|
type: 'image_url',
|
|
|
|
image_url: {
|
|
|
|
url: `data:image/jpeg;base64,${optionsArg.image.toString('base64')}`
|
|
|
|
}
|
|
|
|
}
|
|
|
|
]
|
|
|
|
}
|
|
|
|
],
|
|
|
|
max_tokens: 300
|
|
|
|
});
|
|
|
|
|
|
|
|
return result.choices[0].message.content || '';
|
|
|
|
}
|
2024-03-31 01:32:37 +01:00
|
|
|
}
|