4 Commits

Author SHA1 Message Date
f628a71184 0.0.12 2024-04-29 11:18:41 +02:00
d1465fc868 fix(provider): fix anthropic integration 2024-04-29 11:18:40 +02:00
9e19d320e1 0.0.11 2024-04-27 12:47:50 +02:00
158d49fa95 fix(core): update 2024-04-27 12:47:49 +02:00
10 changed files with 374 additions and 378 deletions

View File

@ -1,6 +1,6 @@
{ {
"name": "@push.rocks/smartai", "name": "@push.rocks/smartai",
"version": "0.0.10", "version": "0.0.12",
"private": false, "private": false,
"description": "Provides a standardized interface for integrating and conversing with multiple AI models, supporting operations like chat and potentially audio responses.", "description": "Provides a standardized interface for integrating and conversing with multiple AI models, supporting operations like chat and potentially audio responses.",
"main": "dist_ts/index.js", "main": "dist_ts/index.js",
@ -24,12 +24,14 @@
}, },
"dependencies": { "dependencies": {
"@anthropic-ai/sdk": "^0.20.7", "@anthropic-ai/sdk": "^0.20.7",
"@push.rocks/smartexpose": "^1.0.5", "@push.rocks/smartarray": "^1.0.8",
"@push.rocks/smartfile": "^11.0.14", "@push.rocks/smartfile": "^11.0.14",
"@push.rocks/smartpath": "^5.0.18", "@push.rocks/smartpath": "^5.0.18",
"@push.rocks/smartpdf": "^3.1.5",
"@push.rocks/smartpromise": "^4.0.3", "@push.rocks/smartpromise": "^4.0.3",
"@push.rocks/smartrequest": "^2.0.22",
"@push.rocks/webstream": "^1.0.8", "@push.rocks/webstream": "^1.0.8",
"openai": "^4.38.3" "openai": "^4.38.5"
}, },
"repository": { "repository": {
"type": "git", "type": "git",

400
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,7 @@
import { expect, expectAsync, tap } from '@push.rocks/tapbundle'; import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
import * as qenv from '@push.rocks/qenv'; import * as qenv from '@push.rocks/qenv';
import * as smartrequest from '@push.rocks/smartrequest';
import * as smartfile from '@push.rocks/smartfile';
const testQenv = new qenv.Qenv('./', './.nogit/'); const testQenv = new qenv.Qenv('./', './.nogit/');
@ -10,8 +12,73 @@ let testSmartai: smartai.SmartAi;
tap.test('should create a smartai instance', async () => { tap.test('should create a smartai instance', async () => {
testSmartai = new smartai.SmartAi({ testSmartai = new smartai.SmartAi({
openaiToken: await testQenv.getEnvVarOnDemand('OPENAI_TOKEN'), openaiToken: await testQenv.getEnvVarOnDemand('OPENAI_TOKEN'),
}); });
await testSmartai.start();
});
tap.test('should create chat response with openai', async () => {
const userMessage = 'How are you?';
const response = await testSmartai.openaiProvider.chat({
systemMessage: 'Hello',
userMessage: userMessage,
messageHistory: [
],
});
console.log(`userMessage: ${userMessage}`);
console.log(response.message.content);
});
tap.test('should document a pdf', async () => {
const pdfUrl = 'https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf';
const pdfResponse = await smartrequest.getBinary(pdfUrl);
const result = await testSmartai.openaiProvider.document({
systemMessage: 'Classify the document. Only the following answers are allowed: "invoice", "bank account statement", "contract", "other"',
userMessage: "Classify the document.",
messageHistory: [],
pdfDocuments: [pdfResponse.body],
});
console.log(result);
});
tap.test('should recognize companies in a pdf', async () => {
const pdfBuffer = await smartfile.fs.toBuffer('./.nogit/demo_without_textlayer.pdf');
const result = await testSmartai.openaiProvider.document({
systemMessage: `
summarize the document.
answer in JSON format, adhering to the following schema:
\`\`\`typescript
type TAnswer = {
entitySender: {
type: 'official state entity' | 'company' | 'person';
name: string;
address: string;
city: string;
country: string;
EU: boolean; // wether the entity is within EU
};
entityReceiver: {
type: 'official state entity' | 'company' | 'person';
name: string;
address: string;
city: string;
country: string;
EU: boolean; // wether the entity is within EU
};
date: string; // the date of the document as YYYY-MM-DD
title: string; // a short title, suitable for a filename
}
\`\`\`
`,
userMessage: "Classify the document.",
messageHistory: [],
pdfDocuments: [pdfBuffer],
});
console.log(result);
}) })
tap.start() tap.test('should stop the smartai instance', async () => {
await testSmartai.stop();
});
export default tap.start();

View File

@ -3,6 +3,6 @@
*/ */
export const commitinfo = { export const commitinfo = {
name: '@push.rocks/smartai', name: '@push.rocks/smartai',
version: '0.0.10', version: '0.0.12',
description: 'Provides a standardized interface for integrating and conversing with multiple AI models, supporting operations like chat and potentially audio responses.' description: 'Provides a standardized interface for integrating and conversing with multiple AI models, supporting operations like chat and potentially audio responses.'
} }

View File

@ -16,7 +16,10 @@ export abstract class MultiModalModel {
role: 'assistant' | 'user'; role: 'assistant' | 'user';
content: string; content: string;
}[] }[]
}): Promise<{}> }): Promise<{
role: 'assistant';
message: string;
}>
/** /**
* Defines a streaming interface for chat interactions. * Defines a streaming interface for chat interactions.

View File

@ -12,9 +12,11 @@ export interface IConversationOptions {
*/ */
export class Conversation { export class Conversation {
// STATIC // STATIC
public static async createWithOpenAi(smartaiRef: SmartAi) { public static async createWithOpenAi(smartaiRefArg: SmartAi) {
const openaiProvider = new OpenAiProvider(smartaiRef.options.openaiToken); if (!smartaiRefArg.openaiProvider) {
const conversation = new Conversation(smartaiRef, { throw new Error('OpenAI provider not available');
}
const conversation = new Conversation(smartaiRefArg, {
processFunction: async (input) => { processFunction: async (input) => {
return '' // TODO implement proper streaming return '' // TODO implement proper streaming
} }
@ -22,9 +24,11 @@ export class Conversation {
return conversation; return conversation;
} }
public static async createWithAnthropic(smartaiRef: SmartAi) { public static async createWithAnthropic(smartaiRefArg: SmartAi) {
const anthropicProvider = new OpenAiProvider(smartaiRef.options.anthropicToken); if (!smartaiRefArg.anthropicProvider) {
const conversation = new Conversation(smartaiRef, { throw new Error('Anthropic provider not available');
}
const conversation = new Conversation(smartaiRefArg, {
processFunction: async (input) => { processFunction: async (input) => {
return '' // TODO implement proper streaming return '' // TODO implement proper streaming
} }
@ -32,6 +36,29 @@ export class Conversation {
return conversation; return conversation;
} }
public static async createWithPerplexity(smartaiRefArg: SmartAi) {
if (!smartaiRefArg.perplexityProvider) {
throw new Error('Perplexity provider not available');
}
const conversation = new Conversation(smartaiRefArg, {
processFunction: async (input) => {
return '' // TODO implement proper streaming
}
});
return conversation;
}
public static async createWithOllama(smartaiRefArg: SmartAi) {
if (!smartaiRefArg.ollamaProvider) {
throw new Error('Ollama provider not available');
}
const conversation = new Conversation(smartaiRefArg, {
processFunction: async (input) => {
return '' // TODO implement proper streaming
}
});
return conversation;
}
// INSTANCE // INSTANCE
smartaiRef: SmartAi smartaiRef: SmartAi
@ -44,8 +71,8 @@ export class Conversation {
this.processFunction = options.processFunction; this.processFunction = options.processFunction;
} }
setSystemMessage(systemMessage: string) { public async setSystemMessage(systemMessageArg: string) {
this.systemMessage = systemMessage; this.systemMessage = systemMessageArg;
} }
private setupOutputStream(): ReadableStream<string> { private setupOutputStream(): ReadableStream<string> {
@ -57,7 +84,7 @@ export class Conversation {
} }
private setupInputStream(): WritableStream<string> { private setupInputStream(): WritableStream<string> {
return new WritableStream<string>({ const writableStream = new WritableStream<string>({
write: async (chunk) => { write: async (chunk) => {
const processedData = await this.processFunction(chunk); const processedData = await this.processFunction(chunk);
if (this.outputStreamController) { if (this.outputStreamController) {
@ -72,6 +99,7 @@ export class Conversation {
this.outputStreamController?.error(err); this.outputStreamController?.error(err);
} }
}); });
return writableStream;
} }
public getInputStreamWriter(): WritableStreamDefaultWriter<string> { public getInputStreamWriter(): WritableStreamDefaultWriter<string> {

View File

@ -1,8 +1,8 @@
import { Conversation } from './classes.conversation.js'; import { Conversation } from './classes.conversation.js';
import * as plugins from './plugins.js'; import * as plugins from './plugins.js';
import type { AnthropicProvider } from './provider.anthropic.js'; import { AnthropicProvider } from './provider.anthropic.js';
import type { OllamaProvider } from './provider.ollama.js'; import type { OllamaProvider } from './provider.ollama.js';
import type { OpenAiProvider } from './provider.openai.js'; import { OpenAiProvider } from './provider.openai.js';
import type { PerplexityProvider } from './provider.perplexity.js'; import type { PerplexityProvider } from './provider.perplexity.js';
@ -10,9 +10,10 @@ export interface ISmartAiOptions {
openaiToken?: string; openaiToken?: string;
anthropicToken?: string; anthropicToken?: string;
perplexityToken?: string; perplexityToken?: string;
exposeCredentials?: plugins.smartexpose.ISmartExposeOptions;
} }
export type TProvider = 'openai' | 'anthropic' | 'perplexity' | 'ollama';
export class SmartAi { export class SmartAi {
public options: ISmartAiOptions; public options: ISmartAiOptions;
@ -26,22 +27,36 @@ export class SmartAi {
} }
public async start() { public async start() {
if (this.options.openaiToken) {
this.openaiProvider = new OpenAiProvider({
openaiToken: this.options.openaiToken,
});
await this.openaiProvider.start();
}
if (this.options.anthropicToken) {
this.anthropicProvider = new AnthropicProvider({
anthropicToken: this.options.anthropicToken,
});
}
} }
public async stop() {} public async stop() {}
/** /**
* creates an OpenAI conversation * create a new conversation
*/ */
public async createOpenApiConversation() { createConversation(provider: TProvider) {
const conversation = await Conversation.createWithOpenAi(this); switch (provider) {
} case 'openai':
return Conversation.createWithOpenAi(this);
/** case 'anthropic':
* creates an OpenAI conversation return Conversation.createWithAnthropic(this);
*/ case 'perplexity':
public async createAnthropicConversation() { return Conversation.createWithPerplexity(this);
const conversation = await Conversation.createWithAnthropic(this); case 'ollama':
return Conversation.createWithOllama(this);
default:
throw new Error('Provider not available');
}
} }
} }

View File

@ -7,18 +7,22 @@ export {
// @push.rocks scope // @push.rocks scope
import * as qenv from '@push.rocks/qenv'; import * as qenv from '@push.rocks/qenv';
import * as smartexpose from '@push.rocks/smartexpose'; import * as smartarray from '@push.rocks/smartarray';
import * as smartpath from '@push.rocks/smartpath';
import * as smartpromise from '@push.rocks/smartpromise';
import * as smartfile from '@push.rocks/smartfile'; import * as smartfile from '@push.rocks/smartfile';
import * as smartpath from '@push.rocks/smartpath';
import * as smartpdf from '@push.rocks/smartpdf';
import * as smartpromise from '@push.rocks/smartpromise';
import * as smartrequest from '@push.rocks/smartrequest';
import * as webstream from '@push.rocks/webstream'; import * as webstream from '@push.rocks/webstream';
export { export {
smartarray,
qenv, qenv,
smartexpose,
smartpath,
smartpromise,
smartfile, smartfile,
smartpath,
smartpdf,
smartpromise,
smartrequest,
webstream, webstream,
} }

View File

@ -2,74 +2,61 @@ import * as plugins from './plugins.js';
import * as paths from './paths.js'; import * as paths from './paths.js';
import { MultiModalModel } from './abstract.classes.multimodal.js'; import { MultiModalModel } from './abstract.classes.multimodal.js';
export interface IAnthropicProviderOptions {
anthropicToken: string;
}
export class AnthropicProvider extends MultiModalModel { export class AnthropicProvider extends MultiModalModel {
private anthropicToken: string; private options: IAnthropicProviderOptions;
public anthropicApiClient: plugins.anthropic.default; public anthropicApiClient: plugins.anthropic.default;
constructor(anthropicToken: string) { constructor(optionsArg: IAnthropicProviderOptions) {
super(); super();
this.anthropicToken = anthropicToken; // Ensure the token is stored this.options = optionsArg // Ensure the token is stored
} }
async start() { async start() {
this.anthropicApiClient = new plugins.anthropic.default({ this.anthropicApiClient = new plugins.anthropic.default({
apiKey: this.anthropicToken, apiKey: this.options.anthropicToken,
}); });
} }
async stop() {} async stop() {}
chatStream(input: ReadableStream<string>): ReadableStream<string> { public async chatStream(input: ReadableStream<string>): Promise<ReadableStream<string>> {
const decoder = new TextDecoder(); // TODO: implement for OpenAI
let messageHistory: { role: 'assistant' | 'user'; content: string }[] = [];
return new ReadableStream({ const returnStream = new ReadableStream();
async start(controller) { return returnStream;
const reader = input.getReader();
try {
let done, value;
while ((({ done, value } = await reader.read()), !done)) {
const userMessage = decoder.decode(value, { stream: true });
messageHistory.push({ role: 'user', content: userMessage });
const aiResponse = await this.chat('', userMessage, messageHistory);
messageHistory.push({ role: 'assistant', content: aiResponse.message });
// Directly enqueue the string response instead of encoding it first
controller.enqueue(aiResponse.message);
}
controller.close();
} catch (err) {
controller.error(err);
}
},
});
} }
// Implementing the synchronous chat interaction // Implementing the synchronous chat interaction
public async chat( public async chat(optionsArg: {
systemMessage: string, systemMessage: string;
userMessage: string, userMessage: string;
messageHistory: { messageHistory: {
role: 'assistant' | 'user'; role: 'assistant' | 'user';
content: string; content: string;
}[] }[];
) { }) {
const result = await this.anthropicApiClient.messages.create({ const result = await this.anthropicApiClient.messages.create({
model: 'claude-3-opus-20240229', model: 'claude-3-opus-20240229',
system: systemMessage, system: optionsArg.systemMessage,
messages: [ messages: [
...messageHistory, ...optionsArg.messageHistory,
{ role: 'user', content: userMessage }, { role: 'user', content: optionsArg.userMessage },
], ],
max_tokens: 4000, max_tokens: 4000,
}); });
return { return {
message: result.content, role: result.role as 'assistant',
message: result.content.join('\n'),
}; };
} }
public async audio(messageArg: string) { private async audio(messageArg: string) {
// Anthropic does not provide an audio API, so this method is not implemented. // Anthropic does not provide an audio API, so this method is not implemented.
throw new Error('Audio generation is not supported by Anthropic.'); throw new Error('Audio generation is not yet supported by Anthropic.');
} }
} }

View File

@ -3,24 +3,29 @@ import * as paths from './paths.js';
import { MultiModalModel } from './abstract.classes.multimodal.js'; import { MultiModalModel } from './abstract.classes.multimodal.js';
export class OpenAiProvider extends MultiModalModel { export interface IOpenaiProviderOptions {
public smartexposeInstance: plugins.smartexpose.SmartExpose; openaiToken: string;
private openAiToken: string; }
public openAiApiClient: plugins.openai.default;
constructor(openaiToken: string, expose) { export class OpenAiProvider extends MultiModalModel {
private options: IOpenaiProviderOptions;
public openAiApiClient: plugins.openai.default;
public smartpdfInstance: plugins.smartpdf.SmartPdf;
constructor(optionsArg: IOpenaiProviderOptions) {
super(); super();
this.openAiToken = openaiToken; // Ensure the token is stored this.options = optionsArg;
} }
async start() { public async start() {
this.openAiApiClient = new plugins.openai.default({ this.openAiApiClient = new plugins.openai.default({
apiKey: this.openAiToken, apiKey: this.options.openaiToken,
dangerouslyAllowBrowser: true, dangerouslyAllowBrowser: true,
}); });
this.smartpdfInstance = new plugins.smartpdf.SmartPdf();
} }
async stop() {} public async stop() {}
public async chatStream(input: ReadableStream<string>): Promise<ReadableStream<string>> { public async chatStream(input: ReadableStream<string>): Promise<ReadableStream<string>> {
// TODO: implement for OpenAI // TODO: implement for OpenAI
@ -30,16 +35,14 @@ export class OpenAiProvider extends MultiModalModel {
} }
// Implementing the synchronous chat interaction // Implementing the synchronous chat interaction
public async chat( public async chat(optionsArg: {
optionsArg: { systemMessage: string;
systemMessage: string, userMessage: string;
userMessage: string, messageHistory: {
messageHistory: { role: 'assistant' | 'user';
role: 'assistant' | 'user'; content: string;
content: string; }[];
}[] }) {
}
) {
const result = await this.openAiApiClient.chat.completions.create({ const result = await this.openAiApiClient.chat.completions.create({
model: 'gpt-4-turbo-preview', model: 'gpt-4-turbo-preview',
@ -50,7 +53,8 @@ export class OpenAiProvider extends MultiModalModel {
], ],
}); });
return { return {
message: result.choices[0].message, role: result.choices[0].message.role as 'assistant',
message: result.choices[0].message.content,
}; };
} }
@ -69,30 +73,58 @@ export class OpenAiProvider extends MultiModalModel {
} }
public async document(optionsArg: { public async document(optionsArg: {
systemMessage: string, systemMessage: string;
userMessage: string, userMessage: string;
documents: Uint8Array[], pdfDocuments: Uint8Array[];
messageHistory: { messageHistory: {
role: 'assistant' | 'user'; role: 'assistant' | 'user';
content: any; content: any;
}[]; }[];
}) { }) {
let pdfDocumentImageBytesArray: Uint8Array[] = [];
for (const pdfDocument of optionsArg.pdfDocuments) {
const documentImageArray = await this.smartpdfInstance.convertPDFToPngBytes(pdfDocument);
pdfDocumentImageBytesArray = pdfDocumentImageBytesArray.concat(documentImageArray);
}
console.log(`image smartfile array`);
console.log(pdfDocumentImageBytesArray.map((smartfile) => smartfile.length));
const smartfileArray = await plugins.smartarray.map(
pdfDocumentImageBytesArray,
async (pdfDocumentImageBytes) => {
return plugins.smartfile.SmartFile.fromBuffer(
'pdfDocumentImage.jpg',
Buffer.from(pdfDocumentImageBytes)
);
}
);
const result = await this.openAiApiClient.chat.completions.create({ const result = await this.openAiApiClient.chat.completions.create({
model: 'gpt-4-vision-preview', model: 'gpt-4-vision-preview',
// response_format: { type: "json_object" }, // not supported for now
messages: [ messages: [
{ role: 'system', content: optionsArg.systemMessage }, { role: 'system', content: optionsArg.systemMessage },
...optionsArg.messageHistory, ...optionsArg.messageHistory,
{ role: 'user', content: [ {
{type: 'text', text: optionsArg.userMessage}, role: 'user',
...(() => { content: [
const returnArray = []; { type: 'text', text: optionsArg.userMessage },
for (const document of optionsArg.documents) { ...(() => {
returnArray.push({type: 'image_url', image_url: }) const returnArray = [];
} for (const imageBytes of pdfDocumentImageBytesArray) {
return returnArray; returnArray.push({
})() type: 'image_url',
] }, image_url: {
url: 'data:image/png;base64,' + Buffer.from(imageBytes).toString('base64'),
},
});
}
return returnArray;
})(),
],
},
], ],
}); });
return { return {