fix(core): update
This commit is contained in:
parent
1ce412fd00
commit
158d49fa95
@ -24,12 +24,14 @@
|
|||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@anthropic-ai/sdk": "^0.20.7",
|
"@anthropic-ai/sdk": "^0.20.7",
|
||||||
"@push.rocks/smartexpose": "^1.0.5",
|
"@push.rocks/smartarray": "^1.0.8",
|
||||||
"@push.rocks/smartfile": "^11.0.14",
|
"@push.rocks/smartfile": "^11.0.14",
|
||||||
"@push.rocks/smartpath": "^5.0.18",
|
"@push.rocks/smartpath": "^5.0.18",
|
||||||
|
"@push.rocks/smartpdf": "^3.1.5",
|
||||||
"@push.rocks/smartpromise": "^4.0.3",
|
"@push.rocks/smartpromise": "^4.0.3",
|
||||||
|
"@push.rocks/smartrequest": "^2.0.22",
|
||||||
"@push.rocks/webstream": "^1.0.8",
|
"@push.rocks/webstream": "^1.0.8",
|
||||||
"openai": "^4.38.3"
|
"openai": "^4.38.5"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
|
400
pnpm-lock.yaml
generated
400
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
81
test/test.ts
81
test/test.ts
@ -1,5 +1,7 @@
|
|||||||
import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
|
import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
|
||||||
import * as qenv from '@push.rocks/qenv';
|
import * as qenv from '@push.rocks/qenv';
|
||||||
|
import * as smartrequest from '@push.rocks/smartrequest';
|
||||||
|
import * as smartfile from '@push.rocks/smartfile';
|
||||||
|
|
||||||
const testQenv = new qenv.Qenv('./', './.nogit/');
|
const testQenv = new qenv.Qenv('./', './.nogit/');
|
||||||
|
|
||||||
@ -10,8 +12,83 @@ let testSmartai: smartai.SmartAi;
|
|||||||
tap.test('should create a smartai instance', async () => {
|
tap.test('should create a smartai instance', async () => {
|
||||||
testSmartai = new smartai.SmartAi({
|
testSmartai = new smartai.SmartAi({
|
||||||
openaiToken: await testQenv.getEnvVarOnDemand('OPENAI_TOKEN'),
|
openaiToken: await testQenv.getEnvVarOnDemand('OPENAI_TOKEN'),
|
||||||
|
exposeCredentials: {
|
||||||
|
exposedBaseUrl: await testQenv.getEnvVarOnDemand('EXPOSED_BASE_URL'),
|
||||||
|
webdav: {
|
||||||
|
webdavCredentials: {
|
||||||
|
password: await testQenv.getEnvVarOnDemand('WEBDAV_SERVER_TOKEN'),
|
||||||
|
serverUrl: await testQenv.getEnvVarOnDemand('WEBDAV_SERVER_URL'),
|
||||||
|
},
|
||||||
|
webdavSubPath: 'smartai'
|
||||||
|
}
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
await testSmartai.start();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should create chat response with openai', async () => {
|
||||||
|
const userMessage = 'How are you?';
|
||||||
|
const response = await testSmartai.openaiProvider.chat({
|
||||||
|
systemMessage: 'Hello',
|
||||||
|
userMessage: userMessage,
|
||||||
|
messageHistory: [
|
||||||
|
],
|
||||||
|
});
|
||||||
|
console.log(`userMessage: ${userMessage}`);
|
||||||
|
console.log(response.message.content);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should document a pdf', async () => {
|
||||||
|
const pdfUrl = 'https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf';
|
||||||
|
const pdfResponse = await smartrequest.getBinary(pdfUrl);
|
||||||
|
const result = await testSmartai.openaiProvider.document({
|
||||||
|
systemMessage: 'Classify the document. Only the following answers are allowed: "invoice", "bank account statement", "contract", "other"',
|
||||||
|
userMessage: "Classify the document.",
|
||||||
|
messageHistory: [],
|
||||||
|
pdfDocuments: [pdfResponse.body],
|
||||||
|
});
|
||||||
|
console.log(result);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should recognize companies in a pdf', async () => {
|
||||||
|
const pdfBuffer = await smartfile.fs.toBuffer('./.nogit/demo_without_textlayer.pdf');
|
||||||
|
const result = await testSmartai.openaiProvider.document({
|
||||||
|
systemMessage: `
|
||||||
|
summarize the document.
|
||||||
|
|
||||||
|
answer in JSON format, adhering to the following schema:
|
||||||
|
\`\`\`typescript
|
||||||
|
type TAnswer = {
|
||||||
|
entitySender: {
|
||||||
|
type: 'official state entity' | 'company' | 'person';
|
||||||
|
name: string;
|
||||||
|
address: string;
|
||||||
|
city: string;
|
||||||
|
country: string;
|
||||||
|
EU: boolean; // wether the entity is within EU
|
||||||
|
};
|
||||||
|
entityReceiver: {
|
||||||
|
type: 'official state entity' | 'company' | 'person';
|
||||||
|
name: string;
|
||||||
|
address: string;
|
||||||
|
city: string;
|
||||||
|
country: string;
|
||||||
|
EU: boolean; // wether the entity is within EU
|
||||||
|
};
|
||||||
|
date: string; // the date of the document as YYYY-MM-DD
|
||||||
|
title: string; // a short title, suitable for a filename
|
||||||
|
}
|
||||||
|
\`\`\`
|
||||||
|
`,
|
||||||
|
userMessage: "Classify the document.",
|
||||||
|
messageHistory: [],
|
||||||
|
pdfDocuments: [pdfBuffer],
|
||||||
|
});
|
||||||
|
console.log(result);
|
||||||
})
|
})
|
||||||
|
|
||||||
tap.start()
|
tap.test('should stop the smartai instance', async () => {
|
||||||
|
await testSmartai.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
|
@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@push.rocks/smartai',
|
name: '@push.rocks/smartai',
|
||||||
version: '0.0.10',
|
version: '0.0.11',
|
||||||
description: 'Provides a standardized interface for integrating and conversing with multiple AI models, supporting operations like chat and potentially audio responses.'
|
description: 'Provides a standardized interface for integrating and conversing with multiple AI models, supporting operations like chat and potentially audio responses.'
|
||||||
}
|
}
|
||||||
|
@ -2,7 +2,7 @@ import { Conversation } from './classes.conversation.js';
|
|||||||
import * as plugins from './plugins.js';
|
import * as plugins from './plugins.js';
|
||||||
import type { AnthropicProvider } from './provider.anthropic.js';
|
import type { AnthropicProvider } from './provider.anthropic.js';
|
||||||
import type { OllamaProvider } from './provider.ollama.js';
|
import type { OllamaProvider } from './provider.ollama.js';
|
||||||
import type { OpenAiProvider } from './provider.openai.js';
|
import { OpenAiProvider } from './provider.openai.js';
|
||||||
import type { PerplexityProvider } from './provider.perplexity.js';
|
import type { PerplexityProvider } from './provider.perplexity.js';
|
||||||
|
|
||||||
|
|
||||||
@ -10,7 +10,6 @@ export interface ISmartAiOptions {
|
|||||||
openaiToken?: string;
|
openaiToken?: string;
|
||||||
anthropicToken?: string;
|
anthropicToken?: string;
|
||||||
perplexityToken?: string;
|
perplexityToken?: string;
|
||||||
exposeCredentials?: plugins.smartexpose.ISmartExposeOptions;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export class SmartAi {
|
export class SmartAi {
|
||||||
@ -26,7 +25,12 @@ export class SmartAi {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public async start() {
|
public async start() {
|
||||||
|
if (this.options.openaiToken) {
|
||||||
|
this.openaiProvider = new OpenAiProvider({
|
||||||
|
openaiToken: this.options.openaiToken,
|
||||||
|
});
|
||||||
|
await this.openaiProvider.start();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public async stop() {}
|
public async stop() {}
|
||||||
|
@ -7,18 +7,22 @@ export {
|
|||||||
|
|
||||||
// @push.rocks scope
|
// @push.rocks scope
|
||||||
import * as qenv from '@push.rocks/qenv';
|
import * as qenv from '@push.rocks/qenv';
|
||||||
import * as smartexpose from '@push.rocks/smartexpose';
|
import * as smartarray from '@push.rocks/smartarray';
|
||||||
import * as smartpath from '@push.rocks/smartpath';
|
|
||||||
import * as smartpromise from '@push.rocks/smartpromise';
|
|
||||||
import * as smartfile from '@push.rocks/smartfile';
|
import * as smartfile from '@push.rocks/smartfile';
|
||||||
|
import * as smartpath from '@push.rocks/smartpath';
|
||||||
|
import * as smartpdf from '@push.rocks/smartpdf';
|
||||||
|
import * as smartpromise from '@push.rocks/smartpromise';
|
||||||
|
import * as smartrequest from '@push.rocks/smartrequest';
|
||||||
import * as webstream from '@push.rocks/webstream';
|
import * as webstream from '@push.rocks/webstream';
|
||||||
|
|
||||||
export {
|
export {
|
||||||
|
smartarray,
|
||||||
qenv,
|
qenv,
|
||||||
smartexpose,
|
|
||||||
smartpath,
|
|
||||||
smartpromise,
|
|
||||||
smartfile,
|
smartfile,
|
||||||
|
smartpath,
|
||||||
|
smartpdf,
|
||||||
|
smartpromise,
|
||||||
|
smartrequest,
|
||||||
webstream,
|
webstream,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,24 +3,29 @@ import * as paths from './paths.js';
|
|||||||
|
|
||||||
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
||||||
|
|
||||||
export class OpenAiProvider extends MultiModalModel {
|
export interface IOpenaiProviderOptions {
|
||||||
public smartexposeInstance: plugins.smartexpose.SmartExpose;
|
openaiToken: string;
|
||||||
private openAiToken: string;
|
}
|
||||||
public openAiApiClient: plugins.openai.default;
|
|
||||||
|
|
||||||
constructor(openaiToken: string, expose) {
|
export class OpenAiProvider extends MultiModalModel {
|
||||||
|
private options: IOpenaiProviderOptions;
|
||||||
|
public openAiApiClient: plugins.openai.default;
|
||||||
|
public smartpdfInstance: plugins.smartpdf.SmartPdf;
|
||||||
|
|
||||||
|
constructor(optionsArg: IOpenaiProviderOptions) {
|
||||||
super();
|
super();
|
||||||
this.openAiToken = openaiToken; // Ensure the token is stored
|
this.options = optionsArg;
|
||||||
}
|
}
|
||||||
|
|
||||||
async start() {
|
public async start() {
|
||||||
this.openAiApiClient = new plugins.openai.default({
|
this.openAiApiClient = new plugins.openai.default({
|
||||||
apiKey: this.openAiToken,
|
apiKey: this.options.openaiToken,
|
||||||
dangerouslyAllowBrowser: true,
|
dangerouslyAllowBrowser: true,
|
||||||
});
|
});
|
||||||
|
this.smartpdfInstance = new plugins.smartpdf.SmartPdf();
|
||||||
}
|
}
|
||||||
|
|
||||||
async stop() {}
|
public async stop() {}
|
||||||
|
|
||||||
public async chatStream(input: ReadableStream<string>): Promise<ReadableStream<string>> {
|
public async chatStream(input: ReadableStream<string>): Promise<ReadableStream<string>> {
|
||||||
// TODO: implement for OpenAI
|
// TODO: implement for OpenAI
|
||||||
@ -30,16 +35,14 @@ export class OpenAiProvider extends MultiModalModel {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Implementing the synchronous chat interaction
|
// Implementing the synchronous chat interaction
|
||||||
public async chat(
|
public async chat(optionsArg: {
|
||||||
optionsArg: {
|
systemMessage: string;
|
||||||
systemMessage: string,
|
userMessage: string;
|
||||||
userMessage: string,
|
|
||||||
messageHistory: {
|
messageHistory: {
|
||||||
role: 'assistant' | 'user';
|
role: 'assistant' | 'user';
|
||||||
content: string;
|
content: string;
|
||||||
}[]
|
}[];
|
||||||
}
|
}) {
|
||||||
) {
|
|
||||||
const result = await this.openAiApiClient.chat.completions.create({
|
const result = await this.openAiApiClient.chat.completions.create({
|
||||||
model: 'gpt-4-turbo-preview',
|
model: 'gpt-4-turbo-preview',
|
||||||
|
|
||||||
@ -69,30 +72,58 @@ export class OpenAiProvider extends MultiModalModel {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public async document(optionsArg: {
|
public async document(optionsArg: {
|
||||||
systemMessage: string,
|
systemMessage: string;
|
||||||
userMessage: string,
|
userMessage: string;
|
||||||
documents: Uint8Array[],
|
pdfDocuments: Uint8Array[];
|
||||||
messageHistory: {
|
messageHistory: {
|
||||||
role: 'assistant' | 'user';
|
role: 'assistant' | 'user';
|
||||||
content: any;
|
content: any;
|
||||||
}[];
|
}[];
|
||||||
}) {
|
}) {
|
||||||
|
let pdfDocumentImageBytesArray: Uint8Array[] = [];
|
||||||
|
|
||||||
|
for (const pdfDocument of optionsArg.pdfDocuments) {
|
||||||
|
const documentImageArray = await this.smartpdfInstance.convertPDFToPngBytes(pdfDocument);
|
||||||
|
pdfDocumentImageBytesArray = pdfDocumentImageBytesArray.concat(documentImageArray);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`image smartfile array`);
|
||||||
|
console.log(pdfDocumentImageBytesArray.map((smartfile) => smartfile.length));
|
||||||
|
|
||||||
|
const smartfileArray = await plugins.smartarray.map(
|
||||||
|
pdfDocumentImageBytesArray,
|
||||||
|
async (pdfDocumentImageBytes) => {
|
||||||
|
return plugins.smartfile.SmartFile.fromBuffer(
|
||||||
|
'pdfDocumentImage.jpg',
|
||||||
|
Buffer.from(pdfDocumentImageBytes)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
const result = await this.openAiApiClient.chat.completions.create({
|
const result = await this.openAiApiClient.chat.completions.create({
|
||||||
model: 'gpt-4-vision-preview',
|
model: 'gpt-4-vision-preview',
|
||||||
|
// response_format: { type: "json_object" }, // not supported for now
|
||||||
messages: [
|
messages: [
|
||||||
{ role: 'system', content: optionsArg.systemMessage },
|
{ role: 'system', content: optionsArg.systemMessage },
|
||||||
...optionsArg.messageHistory,
|
...optionsArg.messageHistory,
|
||||||
{ role: 'user', content: [
|
{
|
||||||
{type: 'text', text: optionsArg.userMessage},
|
role: 'user',
|
||||||
|
content: [
|
||||||
|
{ type: 'text', text: optionsArg.userMessage },
|
||||||
...(() => {
|
...(() => {
|
||||||
const returnArray = [];
|
const returnArray = [];
|
||||||
for (const document of optionsArg.documents) {
|
for (const imageBytes of pdfDocumentImageBytesArray) {
|
||||||
returnArray.push({type: 'image_url', image_url: })
|
returnArray.push({
|
||||||
|
type: 'image_url',
|
||||||
|
image_url: {
|
||||||
|
url: 'data:image/png;base64,' + Buffer.from(imageBytes).toString('base64'),
|
||||||
|
},
|
||||||
|
});
|
||||||
}
|
}
|
||||||
return returnArray;
|
return returnArray;
|
||||||
})()
|
})(),
|
||||||
] },
|
],
|
||||||
|
},
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
return {
|
return {
|
||||||
|
Loading…
Reference in New Issue
Block a user