fix(core): update
This commit is contained in:
parent
25db0618d6
commit
f2685164e5
@ -21,7 +21,13 @@
|
||||
"@push.rocks/tapbundle": "^5.0.15",
|
||||
"@types/node": "^20.8.7"
|
||||
},
|
||||
"dependencies": {},
|
||||
"dependencies": {
|
||||
"@push.rocks/qenv": "^6.0.5",
|
||||
"@push.rocks/smartfile": "^11.0.4",
|
||||
"@push.rocks/smartpath": "^5.0.11",
|
||||
"@push.rocks/smartpromise": "^4.0.3",
|
||||
"openai": "^4.31.0"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://code.foss.global/push.rocks/smartai.git"
|
||||
|
283
pnpm-lock.yaml
generated
283
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartai',
|
||||
version: '0.0.5',
|
||||
version: '0.0.6',
|
||||
description: 'a standardaized interface to talk to AI models'
|
||||
}
|
||||
|
8
ts/abstract.classes.multimodal.ts
Normal file
8
ts/abstract.classes.multimodal.ts
Normal file
@ -0,0 +1,8 @@
|
||||
export abstract class MultiModal {
|
||||
abstract start(): Promise<void>;
|
||||
abstract stop(): Promise<void>;
|
||||
|
||||
// Defines a streaming interface for chat interactions.
|
||||
// The implementation will vary based on the specific AI model.
|
||||
abstract chatStream(input: ReadableStream<string>): ReadableStream<string>;
|
||||
}
|
@ -1 +1,3 @@
|
||||
export * from './smartai.classes.smartai.js';
|
||||
export * from './smartai.classes.smartai.js';
|
||||
export * from './abstract.classes.multimodal.js';
|
||||
export * from './provider.openai.js';
|
||||
|
4
ts/paths.ts
Normal file
4
ts/paths.ts
Normal file
@ -0,0 +1,4 @@
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
export const packageDir = plugins.path.join(plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url), '../');
|
||||
export const nogitDir = plugins.path.join(packageDir, './.nogit');
|
26
ts/plugins.ts
Normal file
26
ts/plugins.ts
Normal file
@ -0,0 +1,26 @@
|
||||
// node native
|
||||
import * as path from 'path';
|
||||
|
||||
export {
|
||||
path,
|
||||
}
|
||||
|
||||
// @push.rocks scope
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartpromise from '@push.rocks/smartpromise';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
|
||||
export {
|
||||
qenv,
|
||||
smartpath,
|
||||
smartpromise,
|
||||
smartfile,
|
||||
}
|
||||
|
||||
// third party
|
||||
import * as openai from 'openai';
|
||||
|
||||
export {
|
||||
openai,
|
||||
}
|
88
ts/provider.openai.ts
Normal file
88
ts/provider.openai.ts
Normal file
@ -0,0 +1,88 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import * as paths from './paths.js';
|
||||
|
||||
import { MultiModal } from './abstract.classes.multimodal.js';
|
||||
|
||||
export class OpenAiProvider extends MultiModal {
|
||||
private openAiToken: string;
|
||||
public openAiApiClient: plugins.openai.default;
|
||||
|
||||
constructor(openaiToken: string) {
|
||||
super();
|
||||
this.openAiToken = openaiToken; // Ensure the token is stored
|
||||
}
|
||||
|
||||
async start() {
|
||||
this.openAiApiClient = new plugins.openai.default({
|
||||
apiKey: this.openAiToken,
|
||||
dangerouslyAllowBrowser: true,
|
||||
});
|
||||
}
|
||||
|
||||
async stop() {}
|
||||
|
||||
chatStream(input: ReadableStream<string>): ReadableStream<string> {
|
||||
const decoder = new TextDecoder();
|
||||
let messageHistory: { role: 'assistant' | 'user'; content: string }[] = [];
|
||||
|
||||
return new ReadableStream({
|
||||
async start(controller) {
|
||||
const reader = input.getReader();
|
||||
try {
|
||||
let done, value;
|
||||
while ((({ done, value } = await reader.read()), !done)) {
|
||||
const userMessage = decoder.decode(value, { stream: true });
|
||||
messageHistory.push({ role: 'user', content: userMessage });
|
||||
|
||||
const aiResponse = await this.chat('', userMessage, messageHistory);
|
||||
messageHistory.push({ role: 'assistant', content: aiResponse.message });
|
||||
|
||||
// Directly enqueue the string response instead of encoding it first
|
||||
controller.enqueue(aiResponse.message);
|
||||
}
|
||||
controller.close();
|
||||
} catch (err) {
|
||||
controller.error(err);
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Implementing the synchronous chat interaction
|
||||
public async chat(
|
||||
systemMessage: string,
|
||||
userMessage: string,
|
||||
messageHistory: {
|
||||
role: 'assistant' | 'user';
|
||||
content: string;
|
||||
}[]
|
||||
) {
|
||||
const result = await this.openAiApiClient.chat.completions.create({
|
||||
model: 'gpt-3.5-turbo-16k-0613',
|
||||
messages: [
|
||||
{ role: 'system', content: systemMessage },
|
||||
...messageHistory,
|
||||
{ role: 'user', content: userMessage },
|
||||
],
|
||||
});
|
||||
return {
|
||||
message: result.choices[0].message,
|
||||
};
|
||||
}
|
||||
|
||||
public async audio(messageArg: string) {
|
||||
const done = plugins.smartpromise.defer();
|
||||
const result = await this.openAiApiClient.audio.speech.create({
|
||||
model: 'tts-1-hd',
|
||||
input: messageArg,
|
||||
voice: 'nova',
|
||||
response_format: 'mp3',
|
||||
speed: 1,
|
||||
});
|
||||
const stream = result.body.pipe(plugins.smartfile.fsStream.createWriteStream(plugins.path.join(paths.nogitDir, 'output.mp3')));
|
||||
stream.on('finish', () => {
|
||||
done.resolve();
|
||||
});
|
||||
return done.promise;
|
||||
}
|
||||
}
|
@ -1,4 +0,0 @@
|
||||
const removeme = {};
|
||||
export {
|
||||
removeme
|
||||
}
|
Loading…
Reference in New Issue
Block a user