fix(core): Enhanced chat streaming and error handling across providers
This commit is contained in:
@@ -27,11 +27,72 @@ export class OpenAiProvider extends MultiModalModel {
|
||||
|
||||
public async stop() {}
|
||||
|
||||
public async chatStream(input: ReadableStream<string>): Promise<ReadableStream<string>> {
|
||||
// TODO: implement for OpenAI
|
||||
public async chatStream(input: ReadableStream<Uint8Array>): Promise<ReadableStream<string>> {
|
||||
// Create a TextDecoder to handle incoming chunks
|
||||
const decoder = new TextDecoder();
|
||||
let buffer = '';
|
||||
let currentMessage: { role: string; content: string; } | null = null;
|
||||
|
||||
const returnStream = new ReadableStream();
|
||||
return returnStream;
|
||||
// Create a TransformStream to process the input
|
||||
const transform = new TransformStream<Uint8Array, string>({
|
||||
async transform(chunk, controller) {
|
||||
buffer += decoder.decode(chunk, { stream: true });
|
||||
|
||||
// Try to parse complete JSON messages from the buffer
|
||||
while (true) {
|
||||
const newlineIndex = buffer.indexOf('\n');
|
||||
if (newlineIndex === -1) break;
|
||||
|
||||
const line = buffer.slice(0, newlineIndex);
|
||||
buffer = buffer.slice(newlineIndex + 1);
|
||||
|
||||
if (line.trim()) {
|
||||
try {
|
||||
const message = JSON.parse(line);
|
||||
currentMessage = {
|
||||
role: message.role || 'user',
|
||||
content: message.content || '',
|
||||
};
|
||||
} catch (e) {
|
||||
console.error('Failed to parse message:', e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we have a complete message, send it to OpenAI
|
||||
if (currentMessage) {
|
||||
const stream = await this.openAiApiClient.chat.completions.create({
|
||||
model: 'gpt-4',
|
||||
messages: [{ role: currentMessage.role, content: currentMessage.content }],
|
||||
stream: true,
|
||||
});
|
||||
|
||||
// Process each chunk from OpenAI
|
||||
for await (const chunk of stream) {
|
||||
const content = chunk.choices[0]?.delta?.content;
|
||||
if (content) {
|
||||
controller.enqueue(content);
|
||||
}
|
||||
}
|
||||
|
||||
currentMessage = null;
|
||||
}
|
||||
},
|
||||
|
||||
flush(controller) {
|
||||
if (buffer) {
|
||||
try {
|
||||
const message = JSON.parse(buffer);
|
||||
controller.enqueue(message.content || '');
|
||||
} catch (e) {
|
||||
console.error('Failed to parse remaining buffer:', e);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Connect the input to our transform stream
|
||||
return input.pipeThrough(transform);
|
||||
}
|
||||
|
||||
// Implementing the synchronous chat interaction
|
||||
|
Reference in New Issue
Block a user