Compare commits
17 Commits
Author | SHA1 | Date | |
---|---|---|---|
490d4996d2 | |||
f099a8f1ed | |||
a0228a0abc | |||
a5257b52e7 | |||
a4144fc071 | |||
af46b3e81e | |||
d50427937c | |||
ffde2e0bf1 | |||
82abc06da4 | |||
3a5f2d52e5 | |||
f628a71184 | |||
d1465fc868 | |||
9e19d320e1 | |||
158d49fa95 | |||
1ce412fd00 | |||
92c382c16e | |||
63d3b7c9bb |
@ -5,18 +5,20 @@
|
|||||||
"githost": "code.foss.global",
|
"githost": "code.foss.global",
|
||||||
"gitscope": "push.rocks",
|
"gitscope": "push.rocks",
|
||||||
"gitrepo": "smartai",
|
"gitrepo": "smartai",
|
||||||
"description": "Provides a standardized interface for integrating and conversing with multiple AI models, supporting operations like chat and potentially audio responses.",
|
"description": "A TypeScript library for integrating and interacting with multiple AI models, offering capabilities for chat and potentially audio responses.",
|
||||||
"npmPackagename": "@push.rocks/smartai",
|
"npmPackagename": "@push.rocks/smartai",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"projectDomain": "push.rocks",
|
"projectDomain": "push.rocks",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"AI models integration",
|
"AI integration",
|
||||||
"OpenAI GPT",
|
"chatbot",
|
||||||
"Anthropic AI",
|
|
||||||
"text-to-speech",
|
|
||||||
"conversation stream",
|
|
||||||
"TypeScript",
|
"TypeScript",
|
||||||
"ESM"
|
"OpenAI",
|
||||||
|
"Anthropic",
|
||||||
|
"multi-model support",
|
||||||
|
"audio responses",
|
||||||
|
"text-to-speech",
|
||||||
|
"streaming chat"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
40
package.json
40
package.json
@ -1,8 +1,8 @@
|
|||||||
{
|
{
|
||||||
"name": "@push.rocks/smartai",
|
"name": "@push.rocks/smartai",
|
||||||
"version": "0.0.9",
|
"version": "0.0.17",
|
||||||
"private": false,
|
"private": false,
|
||||||
"description": "Provides a standardized interface for integrating and conversing with multiple AI models, supporting operations like chat and potentially audio responses.",
|
"description": "A TypeScript library for integrating and interacting with multiple AI models, offering capabilities for chat and potentially audio responses.",
|
||||||
"main": "dist_ts/index.js",
|
"main": "dist_ts/index.js",
|
||||||
"typings": "dist_ts/index.d.ts",
|
"typings": "dist_ts/index.d.ts",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
@ -14,20 +14,24 @@
|
|||||||
"buildDocs": "(tsdoc)"
|
"buildDocs": "(tsdoc)"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.1.25",
|
"@git.zone/tsbuild": "^2.1.76",
|
||||||
"@git.zone/tsbundle": "^2.0.5",
|
"@git.zone/tsbundle": "^2.0.5",
|
||||||
"@git.zone/tsrun": "^1.2.46",
|
"@git.zone/tsrun": "^1.2.46",
|
||||||
"@git.zone/tstest": "^1.0.44",
|
"@git.zone/tstest": "^1.0.90",
|
||||||
"@push.rocks/tapbundle": "^5.0.15",
|
"@push.rocks/qenv": "^6.0.5",
|
||||||
"@types/node": "^20.8.7"
|
"@push.rocks/tapbundle": "^5.0.23",
|
||||||
|
"@types/node": "^20.12.12"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@anthropic-ai/sdk": "^0.19.1",
|
"@anthropic-ai/sdk": "^0.21.0",
|
||||||
"@push.rocks/qenv": "^6.0.5",
|
"@push.rocks/smartarray": "^1.0.8",
|
||||||
"@push.rocks/smartfile": "^11.0.4",
|
"@push.rocks/smartfile": "^11.0.14",
|
||||||
"@push.rocks/smartpath": "^5.0.11",
|
"@push.rocks/smartpath": "^5.0.18",
|
||||||
|
"@push.rocks/smartpdf": "^3.1.6",
|
||||||
"@push.rocks/smartpromise": "^4.0.3",
|
"@push.rocks/smartpromise": "^4.0.3",
|
||||||
"openai": "^4.31.0"
|
"@push.rocks/smartrequest": "^2.0.22",
|
||||||
|
"@push.rocks/webstream": "^1.0.8",
|
||||||
|
"openai": "^4.47.1"
|
||||||
},
|
},
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
@ -53,12 +57,14 @@
|
|||||||
"readme.md"
|
"readme.md"
|
||||||
],
|
],
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"AI models integration",
|
"AI integration",
|
||||||
"OpenAI GPT",
|
"chatbot",
|
||||||
"Anthropic AI",
|
|
||||||
"text-to-speech",
|
|
||||||
"conversation stream",
|
|
||||||
"TypeScript",
|
"TypeScript",
|
||||||
"ESM"
|
"OpenAI",
|
||||||
|
"Anthropic",
|
||||||
|
"multi-model support",
|
||||||
|
"audio responses",
|
||||||
|
"text-to-speech",
|
||||||
|
"streaming chat"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
7477
pnpm-lock.yaml
generated
7477
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
4
qenv.yml
4
qenv.yml
@ -1,2 +1,4 @@
|
|||||||
required:
|
required:
|
||||||
- OPENAI_TOKEN
|
- OPENAI_TOKEN
|
||||||
|
- ANTHROPIC_TOKEN
|
||||||
|
- PERPLEXITY_TOKEN
|
1
readme.hints.md
Normal file
1
readme.hints.md
Normal file
@ -0,0 +1 @@
|
|||||||
|
|
107
readme.md
107
readme.md
@ -1,108 +1,95 @@
|
|||||||
# @push.rocks/smartai
|
# @push.rocks/smartai
|
||||||
a standardized interface to talk to AI models
|
|
||||||
|
Provides a standardized interface for integrating and conversing with multiple AI models, supporting operations like chat and potentially audio responses.
|
||||||
|
|
||||||
## Install
|
## Install
|
||||||
To install `@push.rocks/smartai`, run the following command in your terminal:
|
|
||||||
|
To add @push.rocks/smartai to your project, run the following command in your terminal:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
npm install @push.rocks/smartai
|
npm install @push.rocks/smartai
|
||||||
```
|
```
|
||||||
|
|
||||||
This will add the package to your project's dependencies.
|
This command installs the package and adds it to your project's dependencies.
|
||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
In the following guide, you'll learn how to leverage `@push.rocks/smartai` for integrating AI models into your applications using TypeScript with ESM syntax.
|
The `@push.rocks/smartai` package is a comprehensive solution for integrating and interacting with various AI models, designed to support operations ranging from chat interactions to possibly handling audio responses. This documentation will guide you through the process of utilizing `@push.rocks/smartai` in your applications, focusing on TypeScript and ESM syntax to demonstrate its full capabilities.
|
||||||
|
|
||||||
### Getting Started
|
### Getting Started
|
||||||
|
|
||||||
First, you'll need to import the necessary modules from `@push.rocks/smartai`. This typically includes the main `SmartAi` class along with any specific provider classes you intend to use, such as `OpenAiProvider` or `AnthropicProvider`.
|
Before you begin, ensure you have installed the package in your project as described in the **Install** section above. Once installed, you can start integrating AI functionalities into your application.
|
||||||
|
|
||||||
|
### Initializing SmartAi
|
||||||
|
|
||||||
|
The first step is to import and initialize the `SmartAi` class with appropriate options, including tokens for the AI services you plan to use:
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
import { SmartAi, OpenAiProvider, AnthropicProvider } from '@push.rocks/smartai';
|
import { SmartAi } from '@push.rocks/smartai';
|
||||||
```
|
|
||||||
|
|
||||||
### Initialization
|
|
||||||
|
|
||||||
Create an instance of `SmartAi` by providing the required options, which include authentication tokens for the AI providers you plan to use.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const smartAi = new SmartAi({
|
const smartAi = new SmartAi({
|
||||||
openaiToken: 'your-openai-token-here',
|
openaiToken: 'your-openai-access-token',
|
||||||
anthropicToken: 'your-anthropic-token-here'
|
anthropicToken: 'your-anthropic-access-token'
|
||||||
});
|
});
|
||||||
|
|
||||||
|
await smartAi.start();
|
||||||
```
|
```
|
||||||
|
|
||||||
### Creating a Conversation
|
### Creating Conversations with AI
|
||||||
|
|
||||||
`@push.rocks/smartai` offers a versatile way to handle conversations with AI. To create a conversation using OpenAI, for instance:
|
`SmartAi` provides a flexible interface to create and manage conversations with different AI providers. You can create a conversation with any supported AI provider like OpenAI or Anthropic by specifying the provider you want to use:
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
async function createOpenAiConversation() {
|
const openAiConversation = await smartAi.createConversation('openai');
|
||||||
const conversation = await smartAi.createOpenApiConversation();
|
const anthropicConversation = await smartAi.createConversation('anthropic');
|
||||||
}
|
|
||||||
```
|
```
|
||||||
|
|
||||||
For Anthropic-based conversations:
|
### Chatting with AI
|
||||||
|
|
||||||
|
Once you have a conversation instance, you can start sending messages to the AI and receive responses. Each conversation object provides methods to interact in a synchronous or asynchronous manner, depending on your use case.
|
||||||
|
|
||||||
|
#### Synchronous Chat Example
|
||||||
|
|
||||||
|
Here's how you can have a synchronous chat with OpenAI:
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
async function createAnthropicConversation() {
|
const response = await openAiConversation.chat({
|
||||||
const conversation = await smartAi.createAnthropicConversation();
|
systemMessage: 'This is a greeting from the system.',
|
||||||
}
|
userMessage: 'Hello, AI! How are you today?',
|
||||||
|
messageHistory: [] // Previous messages in the conversation
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log(response.message); // Log the response from AI
|
||||||
```
|
```
|
||||||
|
|
||||||
### Advanced Usage: Streaming and Chat
|
#### Streaming Chat Example
|
||||||
|
|
||||||
Advanced use cases might require direct access to the streaming APIs provided by the AI models. For instance, handling a chat stream with OpenAI can be achieved as follows:
|
For real-time, streaming interactions, you can utilize the streaming capabilities provided by the conversation object. This enables a continuous exchange of messages between your application and the AI:
|
||||||
|
|
||||||
#### Set Up the Conversation Stream
|
|
||||||
|
|
||||||
First, create a conversation and obtain the input and output streams.
|
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
const conversation = await smartAi.createOpenApiConversation();
|
const inputStreamWriter = openAiConversation.getInputStreamWriter();
|
||||||
const inputStreamWriter = conversation.getInputStreamWriter();
|
const outputStream = openAiConversation.getOutputStream();
|
||||||
const outputStream = conversation.getOutputStream();
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Write to Input Stream
|
inputStreamWriter.write('Hello, AI! Can you stream responses?');
|
||||||
|
|
||||||
To send messages to the AI model, use the input stream writer.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
await inputStreamWriter.write('Hello, SmartAI!');
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Processing Output Stream
|
|
||||||
|
|
||||||
Output from the AI model can be processed by reading from the output stream.
|
|
||||||
|
|
||||||
```typescript
|
|
||||||
const reader = outputStream.getReader();
|
const reader = outputStream.getReader();
|
||||||
reader.read().then(function processText({ done, value }) {
|
reader.read().then(function processText({done, value}) {
|
||||||
if (done) {
|
if (done) {
|
||||||
console.log("Stream complete");
|
console.log('Stream finished.');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
console.log("Received from AI:", value);
|
console.log('AI says:', value);
|
||||||
reader.read().then(processText);
|
reader.read().then(processText); // Continue reading messages
|
||||||
});
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
### Handling Audio
|
### Extending Conversations
|
||||||
|
|
||||||
`@push.rocks/smartai` also supports handling audio responses from AI models. To generate and retrieve audio output:
|
The modular design of `@push.rocks/smartai` allows you to extend conversations with additional features, such as handling audio responses or integrating other AI-powered functionalities. Utilize the provided AI providers' APIs to explore and implement a wide range of AI interactions within your conversations.
|
||||||
|
|
||||||
```typescript
|
|
||||||
const tts = await TTS.createWithOpenAi(smartAi);
|
|
||||||
```
|
|
||||||
|
|
||||||
This code snippet initializes text-to-speech (TTS) capabilities using the OpenAI model. Further customization and usage of audio APIs will depend on the capabilities offered by the specific AI model and provider you are working with.
|
|
||||||
|
|
||||||
### Conclusion
|
### Conclusion
|
||||||
|
|
||||||
`@push.rocks/smartai` offers a flexible and standardized interface for interacting with AI models, streamlining the development of applications that leverage AI capabilities. Through the outlined examples, you've seen how to initialize the library, create conversations, and handle both text and audio interactions with AI models in a TypeScript environment following ESM syntax.
|
With `@push.rocks/smartai`, integrating AI functionalities into your applications becomes streamlined and efficient. By leveraging the standardized interface provided by the package, you can easily converse with multiple AI models, expanding the capabilities of your applications with cutting-edge AI features. Whether you're implementing simple chat interactions or complex, real-time communication flows, `@push.rocks/smartai` offers the tools and flexibility needed to create engaging, AI-enhanced experiences.
|
||||||
|
|
||||||
For a comprehensive understanding of all features and to explore more advanced use cases, refer to the official [documentation](https://code.foss.global/push.rocks/smartai#readme) and check the `npmextra.json` file's `tsdocs` section for additional insights on module usage.
|
|
||||||
|
|
||||||
## License and Legal Information
|
## License and Legal Information
|
||||||
|
|
||||||
|
84
test/test.ts
84
test/test.ts
@ -1,8 +1,84 @@
|
|||||||
import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
|
import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
|
||||||
import * as smartai from '../ts/index.js'
|
import * as qenv from '@push.rocks/qenv';
|
||||||
|
import * as smartrequest from '@push.rocks/smartrequest';
|
||||||
|
import * as smartfile from '@push.rocks/smartfile';
|
||||||
|
|
||||||
tap.test('first test', async () => {
|
const testQenv = new qenv.Qenv('./', './.nogit/');
|
||||||
console.log(smartai)
|
|
||||||
|
import * as smartai from '../ts/index.js';
|
||||||
|
|
||||||
|
let testSmartai: smartai.SmartAi;
|
||||||
|
|
||||||
|
tap.test('should create a smartai instance', async () => {
|
||||||
|
testSmartai = new smartai.SmartAi({
|
||||||
|
openaiToken: await testQenv.getEnvVarOnDemand('OPENAI_TOKEN'),
|
||||||
|
});
|
||||||
|
await testSmartai.start();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should create chat response with openai', async () => {
|
||||||
|
const userMessage = 'How are you?';
|
||||||
|
const response = await testSmartai.openaiProvider.chat({
|
||||||
|
systemMessage: 'Hello',
|
||||||
|
userMessage: userMessage,
|
||||||
|
messageHistory: [
|
||||||
|
],
|
||||||
|
});
|
||||||
|
console.log(`userMessage: ${userMessage}`);
|
||||||
|
console.log(response.message);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should document a pdf', async () => {
|
||||||
|
const pdfUrl = 'https://www.w3.org/WAI/ER/tests/xhtml/testfiles/resources/pdf/dummy.pdf';
|
||||||
|
const pdfResponse = await smartrequest.getBinary(pdfUrl);
|
||||||
|
const result = await testSmartai.openaiProvider.document({
|
||||||
|
systemMessage: 'Classify the document. Only the following answers are allowed: "invoice", "bank account statement", "contract", "other"',
|
||||||
|
userMessage: "Classify the document.",
|
||||||
|
messageHistory: [],
|
||||||
|
pdfDocuments: [pdfResponse.body],
|
||||||
|
});
|
||||||
|
console.log(result);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should recognize companies in a pdf', async () => {
|
||||||
|
const pdfBuffer = await smartfile.fs.toBuffer('./.nogit/demo_without_textlayer.pdf');
|
||||||
|
const result = await testSmartai.openaiProvider.document({
|
||||||
|
systemMessage: `
|
||||||
|
summarize the document.
|
||||||
|
|
||||||
|
answer in JSON format, adhering to the following schema:
|
||||||
|
\`\`\`typescript
|
||||||
|
type TAnswer = {
|
||||||
|
entitySender: {
|
||||||
|
type: 'official state entity' | 'company' | 'person';
|
||||||
|
name: string;
|
||||||
|
address: string;
|
||||||
|
city: string;
|
||||||
|
country: string;
|
||||||
|
EU: boolean; // wether the entity is within EU
|
||||||
|
};
|
||||||
|
entityReceiver: {
|
||||||
|
type: 'official state entity' | 'company' | 'person';
|
||||||
|
name: string;
|
||||||
|
address: string;
|
||||||
|
city: string;
|
||||||
|
country: string;
|
||||||
|
EU: boolean; // wether the entity is within EU
|
||||||
|
};
|
||||||
|
date: string; // the date of the document as YYYY-MM-DD
|
||||||
|
title: string; // a short title, suitable for a filename
|
||||||
|
}
|
||||||
|
\`\`\`
|
||||||
|
`,
|
||||||
|
userMessage: "Classify the document.",
|
||||||
|
messageHistory: [],
|
||||||
|
pdfDocuments: [pdfBuffer],
|
||||||
|
});
|
||||||
|
console.log(result);
|
||||||
})
|
})
|
||||||
|
|
||||||
tap.start()
|
tap.test('should stop the smartai instance', async () => {
|
||||||
|
await testSmartai.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
|
@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@push.rocks/smartai',
|
name: '@push.rocks/smartai',
|
||||||
version: '0.0.9',
|
version: '0.0.17',
|
||||||
description: 'Provides a standardized interface for integrating and conversing with multiple AI models, supporting operations like chat and potentially audio responses.'
|
description: 'A TypeScript library for integrating and interacting with multiple AI models, offering capabilities for chat and potentially audio responses.'
|
||||||
}
|
}
|
||||||
|
@ -8,8 +8,25 @@ export abstract class MultiModalModel {
|
|||||||
* stops the model
|
* stops the model
|
||||||
*/
|
*/
|
||||||
abstract stop(): Promise<void>;
|
abstract stop(): Promise<void>;
|
||||||
|
|
||||||
|
public abstract chat(optionsArg: {
|
||||||
|
systemMessage: string,
|
||||||
|
userMessage: string,
|
||||||
|
messageHistory: {
|
||||||
|
role: 'assistant' | 'user';
|
||||||
|
content: string;
|
||||||
|
}[]
|
||||||
|
}): Promise<{
|
||||||
|
role: 'assistant';
|
||||||
|
message: string;
|
||||||
|
}>
|
||||||
|
|
||||||
// Defines a streaming interface for chat interactions.
|
/**
|
||||||
// The implementation will vary based on the specific AI model.
|
* Defines a streaming interface for chat interactions.
|
||||||
abstract chatStream(input: ReadableStream<string>): ReadableStream<string>;
|
* The implementation will vary based on the specific AI model.
|
||||||
|
* @param input
|
||||||
|
*/
|
||||||
|
public abstract chatStream(input: ReadableStream<string>): Promise<ReadableStream<string>>;
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -12,9 +12,11 @@ export interface IConversationOptions {
|
|||||||
*/
|
*/
|
||||||
export class Conversation {
|
export class Conversation {
|
||||||
// STATIC
|
// STATIC
|
||||||
public static async createWithOpenAi(smartaiRef: SmartAi) {
|
public static async createWithOpenAi(smartaiRefArg: SmartAi) {
|
||||||
const openaiProvider = new OpenAiProvider(smartaiRef.options.openaiToken);
|
if (!smartaiRefArg.openaiProvider) {
|
||||||
const conversation = new Conversation(smartaiRef, {
|
throw new Error('OpenAI provider not available');
|
||||||
|
}
|
||||||
|
const conversation = new Conversation(smartaiRefArg, {
|
||||||
processFunction: async (input) => {
|
processFunction: async (input) => {
|
||||||
return '' // TODO implement proper streaming
|
return '' // TODO implement proper streaming
|
||||||
}
|
}
|
||||||
@ -22,9 +24,11 @@ export class Conversation {
|
|||||||
return conversation;
|
return conversation;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static async createWithAnthropic(smartaiRef: SmartAi) {
|
public static async createWithAnthropic(smartaiRefArg: SmartAi) {
|
||||||
const anthropicProvider = new OpenAiProvider(smartaiRef.options.anthropicToken);
|
if (!smartaiRefArg.anthropicProvider) {
|
||||||
const conversation = new Conversation(smartaiRef, {
|
throw new Error('Anthropic provider not available');
|
||||||
|
}
|
||||||
|
const conversation = new Conversation(smartaiRefArg, {
|
||||||
processFunction: async (input) => {
|
processFunction: async (input) => {
|
||||||
return '' // TODO implement proper streaming
|
return '' // TODO implement proper streaming
|
||||||
}
|
}
|
||||||
@ -32,6 +36,29 @@ export class Conversation {
|
|||||||
return conversation;
|
return conversation;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static async createWithPerplexity(smartaiRefArg: SmartAi) {
|
||||||
|
if (!smartaiRefArg.perplexityProvider) {
|
||||||
|
throw new Error('Perplexity provider not available');
|
||||||
|
}
|
||||||
|
const conversation = new Conversation(smartaiRefArg, {
|
||||||
|
processFunction: async (input) => {
|
||||||
|
return '' // TODO implement proper streaming
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return conversation;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async createWithOllama(smartaiRefArg: SmartAi) {
|
||||||
|
if (!smartaiRefArg.ollamaProvider) {
|
||||||
|
throw new Error('Ollama provider not available');
|
||||||
|
}
|
||||||
|
const conversation = new Conversation(smartaiRefArg, {
|
||||||
|
processFunction: async (input) => {
|
||||||
|
return '' // TODO implement proper streaming
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return conversation;
|
||||||
|
}
|
||||||
|
|
||||||
// INSTANCE
|
// INSTANCE
|
||||||
smartaiRef: SmartAi
|
smartaiRef: SmartAi
|
||||||
@ -44,8 +71,8 @@ export class Conversation {
|
|||||||
this.processFunction = options.processFunction;
|
this.processFunction = options.processFunction;
|
||||||
}
|
}
|
||||||
|
|
||||||
setSystemMessage(systemMessage: string) {
|
public async setSystemMessage(systemMessageArg: string) {
|
||||||
this.systemMessage = systemMessage;
|
this.systemMessage = systemMessageArg;
|
||||||
}
|
}
|
||||||
|
|
||||||
private setupOutputStream(): ReadableStream<string> {
|
private setupOutputStream(): ReadableStream<string> {
|
||||||
@ -57,7 +84,7 @@ export class Conversation {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private setupInputStream(): WritableStream<string> {
|
private setupInputStream(): WritableStream<string> {
|
||||||
return new WritableStream<string>({
|
const writableStream = new WritableStream<string>({
|
||||||
write: async (chunk) => {
|
write: async (chunk) => {
|
||||||
const processedData = await this.processFunction(chunk);
|
const processedData = await this.processFunction(chunk);
|
||||||
if (this.outputStreamController) {
|
if (this.outputStreamController) {
|
||||||
@ -72,6 +99,7 @@ export class Conversation {
|
|||||||
this.outputStreamController?.error(err);
|
this.outputStreamController?.error(err);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
return writableStream;
|
||||||
}
|
}
|
||||||
|
|
||||||
public getInputStreamWriter(): WritableStreamDefaultWriter<string> {
|
public getInputStreamWriter(): WritableStreamDefaultWriter<string> {
|
||||||
|
@ -1,30 +1,62 @@
|
|||||||
import { Conversation } from './classes.conversation.js';
|
import { Conversation } from './classes.conversation.js';
|
||||||
import * as plugins from './plugins.js';
|
import * as plugins from './plugins.js';
|
||||||
|
import { AnthropicProvider } from './provider.anthropic.js';
|
||||||
|
import type { OllamaProvider } from './provider.ollama.js';
|
||||||
|
import { OpenAiProvider } from './provider.openai.js';
|
||||||
|
import type { PerplexityProvider } from './provider.perplexity.js';
|
||||||
|
|
||||||
|
|
||||||
export interface ISmartAiOptions {
|
export interface ISmartAiOptions {
|
||||||
openaiToken: string;
|
openaiToken?: string;
|
||||||
anthropicToken: string;
|
anthropicToken?: string;
|
||||||
|
perplexityToken?: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export type TProvider = 'openai' | 'anthropic' | 'perplexity' | 'ollama';
|
||||||
|
|
||||||
export class SmartAi {
|
export class SmartAi {
|
||||||
public options: ISmartAiOptions;
|
public options: ISmartAiOptions;
|
||||||
|
|
||||||
|
public openaiProvider: OpenAiProvider;
|
||||||
|
public anthropicProvider: AnthropicProvider;
|
||||||
|
public perplexityProvider: PerplexityProvider;
|
||||||
|
public ollamaProvider: OllamaProvider;
|
||||||
|
|
||||||
constructor(optionsArg: ISmartAiOptions) {
|
constructor(optionsArg: ISmartAiOptions) {
|
||||||
this.options = optionsArg;
|
this.options = optionsArg;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
public async start() {
|
||||||
* creates an OpenAI conversation
|
if (this.options.openaiToken) {
|
||||||
*/
|
this.openaiProvider = new OpenAiProvider({
|
||||||
public async createOpenApiConversation() {
|
openaiToken: this.options.openaiToken,
|
||||||
const conversation = await Conversation.createWithOpenAi(this);
|
});
|
||||||
|
await this.openaiProvider.start();
|
||||||
|
}
|
||||||
|
if (this.options.anthropicToken) {
|
||||||
|
this.anthropicProvider = new AnthropicProvider({
|
||||||
|
anthropicToken: this.options.anthropicToken,
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async stop() {}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* creates an OpenAI conversation
|
* create a new conversation
|
||||||
*/
|
*/
|
||||||
public async createAnthropicConversation() {
|
createConversation(provider: TProvider) {
|
||||||
const conversation = await Conversation.createWithAnthropic(this);
|
switch (provider) {
|
||||||
|
case 'openai':
|
||||||
|
return Conversation.createWithOpenAi(this);
|
||||||
|
case 'anthropic':
|
||||||
|
return Conversation.createWithAnthropic(this);
|
||||||
|
case 'perplexity':
|
||||||
|
return Conversation.createWithPerplexity(this);
|
||||||
|
case 'ollama':
|
||||||
|
return Conversation.createWithOllama(this);
|
||||||
|
default:
|
||||||
|
throw new Error('Provider not available');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
0
ts/interfaces.ts
Normal file
0
ts/interfaces.ts
Normal file
@ -7,15 +7,23 @@ export {
|
|||||||
|
|
||||||
// @push.rocks scope
|
// @push.rocks scope
|
||||||
import * as qenv from '@push.rocks/qenv';
|
import * as qenv from '@push.rocks/qenv';
|
||||||
import * as smartpath from '@push.rocks/smartpath';
|
import * as smartarray from '@push.rocks/smartarray';
|
||||||
import * as smartpromise from '@push.rocks/smartpromise';
|
|
||||||
import * as smartfile from '@push.rocks/smartfile';
|
import * as smartfile from '@push.rocks/smartfile';
|
||||||
|
import * as smartpath from '@push.rocks/smartpath';
|
||||||
|
import * as smartpdf from '@push.rocks/smartpdf';
|
||||||
|
import * as smartpromise from '@push.rocks/smartpromise';
|
||||||
|
import * as smartrequest from '@push.rocks/smartrequest';
|
||||||
|
import * as webstream from '@push.rocks/webstream';
|
||||||
|
|
||||||
export {
|
export {
|
||||||
|
smartarray,
|
||||||
qenv,
|
qenv,
|
||||||
smartpath,
|
|
||||||
smartpromise,
|
|
||||||
smartfile,
|
smartfile,
|
||||||
|
smartpath,
|
||||||
|
smartpdf,
|
||||||
|
smartpromise,
|
||||||
|
smartrequest,
|
||||||
|
webstream,
|
||||||
}
|
}
|
||||||
|
|
||||||
// third party
|
// third party
|
||||||
|
@ -2,74 +2,61 @@ import * as plugins from './plugins.js';
|
|||||||
import * as paths from './paths.js';
|
import * as paths from './paths.js';
|
||||||
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
||||||
|
|
||||||
|
export interface IAnthropicProviderOptions {
|
||||||
|
anthropicToken: string;
|
||||||
|
}
|
||||||
|
|
||||||
export class AnthropicProvider extends MultiModalModel {
|
export class AnthropicProvider extends MultiModalModel {
|
||||||
private anthropicToken: string;
|
private options: IAnthropicProviderOptions;
|
||||||
public anthropicApiClient: plugins.anthropic.default;
|
public anthropicApiClient: plugins.anthropic.default;
|
||||||
|
|
||||||
constructor(anthropicToken: string) {
|
constructor(optionsArg: IAnthropicProviderOptions) {
|
||||||
super();
|
super();
|
||||||
this.anthropicToken = anthropicToken; // Ensure the token is stored
|
this.options = optionsArg // Ensure the token is stored
|
||||||
}
|
}
|
||||||
|
|
||||||
async start() {
|
async start() {
|
||||||
this.anthropicApiClient = new plugins.anthropic.default({
|
this.anthropicApiClient = new plugins.anthropic.default({
|
||||||
apiKey: this.anthropicToken,
|
apiKey: this.options.anthropicToken,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async stop() {}
|
async stop() {}
|
||||||
|
|
||||||
chatStream(input: ReadableStream<string>): ReadableStream<string> {
|
public async chatStream(input: ReadableStream<string>): Promise<ReadableStream<string>> {
|
||||||
const decoder = new TextDecoder();
|
// TODO: implement for OpenAI
|
||||||
let messageHistory: { role: 'assistant' | 'user'; content: string }[] = [];
|
|
||||||
|
|
||||||
return new ReadableStream({
|
const returnStream = new ReadableStream();
|
||||||
async start(controller) {
|
return returnStream;
|
||||||
const reader = input.getReader();
|
|
||||||
try {
|
|
||||||
let done, value;
|
|
||||||
while ((({ done, value } = await reader.read()), !done)) {
|
|
||||||
const userMessage = decoder.decode(value, { stream: true });
|
|
||||||
messageHistory.push({ role: 'user', content: userMessage });
|
|
||||||
const aiResponse = await this.chat('', userMessage, messageHistory);
|
|
||||||
messageHistory.push({ role: 'assistant', content: aiResponse.message });
|
|
||||||
// Directly enqueue the string response instead of encoding it first
|
|
||||||
controller.enqueue(aiResponse.message);
|
|
||||||
}
|
|
||||||
controller.close();
|
|
||||||
} catch (err) {
|
|
||||||
controller.error(err);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Implementing the synchronous chat interaction
|
// Implementing the synchronous chat interaction
|
||||||
public async chat(
|
public async chat(optionsArg: {
|
||||||
systemMessage: string,
|
systemMessage: string;
|
||||||
userMessage: string,
|
userMessage: string;
|
||||||
messageHistory: {
|
messageHistory: {
|
||||||
role: 'assistant' | 'user';
|
role: 'assistant' | 'user';
|
||||||
content: string;
|
content: string;
|
||||||
}[]
|
}[];
|
||||||
) {
|
}) {
|
||||||
const result = await this.anthropicApiClient.messages.create({
|
const result = await this.anthropicApiClient.messages.create({
|
||||||
model: 'claude-3-opus-20240229',
|
model: 'claude-3-opus-20240229',
|
||||||
system: systemMessage,
|
system: optionsArg.systemMessage,
|
||||||
messages: [
|
messages: [
|
||||||
...messageHistory,
|
...optionsArg.messageHistory,
|
||||||
{ role: 'user', content: userMessage },
|
{ role: 'user', content: optionsArg.userMessage },
|
||||||
],
|
],
|
||||||
max_tokens: 4000,
|
max_tokens: 4000,
|
||||||
});
|
});
|
||||||
|
|
||||||
return {
|
return {
|
||||||
message: result.content,
|
role: result.role as 'assistant',
|
||||||
|
message: result.content.join('\n'),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
public async audio(messageArg: string) {
|
private async audio(messageArg: string) {
|
||||||
// Anthropic does not provide an audio API, so this method is not implemented.
|
// Anthropic does not provide an audio API, so this method is not implemented.
|
||||||
throw new Error('Audio generation is not supported by Anthropic.');
|
throw new Error('Audio generation is not yet supported by Anthropic.');
|
||||||
}
|
}
|
||||||
}
|
}
|
3
ts/provider.ollama.ts
Normal file
3
ts/provider.ollama.ts
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
|
||||||
|
export class OllamaProvider {}
|
@ -3,87 +3,132 @@ import * as paths from './paths.js';
|
|||||||
|
|
||||||
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
||||||
|
|
||||||
export class OpenAiProvider extends MultiModalModel {
|
export interface IOpenaiProviderOptions {
|
||||||
private openAiToken: string;
|
openaiToken: string;
|
||||||
public openAiApiClient: plugins.openai.default;
|
}
|
||||||
|
|
||||||
constructor(openaiToken: string) {
|
export class OpenAiProvider extends MultiModalModel {
|
||||||
|
private options: IOpenaiProviderOptions;
|
||||||
|
public openAiApiClient: plugins.openai.default;
|
||||||
|
public smartpdfInstance: plugins.smartpdf.SmartPdf;
|
||||||
|
|
||||||
|
constructor(optionsArg: IOpenaiProviderOptions) {
|
||||||
super();
|
super();
|
||||||
this.openAiToken = openaiToken; // Ensure the token is stored
|
this.options = optionsArg;
|
||||||
}
|
}
|
||||||
|
|
||||||
async start() {
|
public async start() {
|
||||||
this.openAiApiClient = new plugins.openai.default({
|
this.openAiApiClient = new plugins.openai.default({
|
||||||
apiKey: this.openAiToken,
|
apiKey: this.options.openaiToken,
|
||||||
dangerouslyAllowBrowser: true,
|
dangerouslyAllowBrowser: true,
|
||||||
});
|
});
|
||||||
|
this.smartpdfInstance = new plugins.smartpdf.SmartPdf();
|
||||||
}
|
}
|
||||||
|
|
||||||
async stop() {}
|
public async stop() {}
|
||||||
|
|
||||||
chatStream(input: ReadableStream<string>): ReadableStream<string> {
|
public async chatStream(input: ReadableStream<string>): Promise<ReadableStream<string>> {
|
||||||
const decoder = new TextDecoder();
|
// TODO: implement for OpenAI
|
||||||
let messageHistory: { role: 'assistant' | 'user'; content: string }[] = [];
|
|
||||||
|
|
||||||
return new ReadableStream({
|
const returnStream = new ReadableStream();
|
||||||
async start(controller) {
|
return returnStream;
|
||||||
const reader = input.getReader();
|
|
||||||
try {
|
|
||||||
let done, value;
|
|
||||||
while ((({ done, value } = await reader.read()), !done)) {
|
|
||||||
const userMessage = decoder.decode(value, { stream: true });
|
|
||||||
messageHistory.push({ role: 'user', content: userMessage });
|
|
||||||
|
|
||||||
const aiResponse = await this.chat('', userMessage, messageHistory);
|
|
||||||
messageHistory.push({ role: 'assistant', content: aiResponse.message });
|
|
||||||
|
|
||||||
// Directly enqueue the string response instead of encoding it first
|
|
||||||
controller.enqueue(aiResponse.message);
|
|
||||||
}
|
|
||||||
controller.close();
|
|
||||||
} catch (err) {
|
|
||||||
controller.error(err);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Implementing the synchronous chat interaction
|
// Implementing the synchronous chat interaction
|
||||||
public async chat(
|
public async chat(optionsArg: {
|
||||||
systemMessage: string,
|
systemMessage: string;
|
||||||
userMessage: string,
|
userMessage: string;
|
||||||
messageHistory: {
|
messageHistory: {
|
||||||
role: 'assistant' | 'user';
|
role: 'assistant' | 'user';
|
||||||
content: string;
|
content: string;
|
||||||
}[]
|
}[];
|
||||||
) {
|
}) {
|
||||||
const result = await this.openAiApiClient.chat.completions.create({
|
const result = await this.openAiApiClient.chat.completions.create({
|
||||||
model: 'gpt-4-turbo-preview',
|
model: 'gpt-4o',
|
||||||
|
|
||||||
messages: [
|
messages: [
|
||||||
{ role: 'system', content: systemMessage },
|
{ role: 'system', content: optionsArg.systemMessage },
|
||||||
...messageHistory,
|
...optionsArg.messageHistory,
|
||||||
{ role: 'user', content: userMessage },
|
{ role: 'user', content: optionsArg.userMessage },
|
||||||
|
],
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
role: result.choices[0].message.role as 'assistant',
|
||||||
|
message: result.choices[0].message.content,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
public async audio(optionsArg: { message: string }): Promise<NodeJS.ReadableStream> {
|
||||||
|
const done = plugins.smartpromise.defer<NodeJS.ReadableStream>();
|
||||||
|
const result = await this.openAiApiClient.audio.speech.create({
|
||||||
|
model: 'tts-1-hd',
|
||||||
|
input: optionsArg.message,
|
||||||
|
voice: 'nova',
|
||||||
|
response_format: 'mp3',
|
||||||
|
speed: 1,
|
||||||
|
});
|
||||||
|
const stream = result.body;
|
||||||
|
done.resolve(stream);
|
||||||
|
return done.promise;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async document(optionsArg: {
|
||||||
|
systemMessage: string;
|
||||||
|
userMessage: string;
|
||||||
|
pdfDocuments: Uint8Array[];
|
||||||
|
messageHistory: {
|
||||||
|
role: 'assistant' | 'user';
|
||||||
|
content: any;
|
||||||
|
}[];
|
||||||
|
}) {
|
||||||
|
let pdfDocumentImageBytesArray: Uint8Array[] = [];
|
||||||
|
|
||||||
|
for (const pdfDocument of optionsArg.pdfDocuments) {
|
||||||
|
const documentImageArray = await this.smartpdfInstance.convertPDFToPngBytes(pdfDocument);
|
||||||
|
pdfDocumentImageBytesArray = pdfDocumentImageBytesArray.concat(documentImageArray);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`image smartfile array`);
|
||||||
|
console.log(pdfDocumentImageBytesArray.map((smartfile) => smartfile.length));
|
||||||
|
|
||||||
|
const smartfileArray = await plugins.smartarray.map(
|
||||||
|
pdfDocumentImageBytesArray,
|
||||||
|
async (pdfDocumentImageBytes) => {
|
||||||
|
return plugins.smartfile.SmartFile.fromBuffer(
|
||||||
|
'pdfDocumentImage.jpg',
|
||||||
|
Buffer.from(pdfDocumentImageBytes)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
const result = await this.openAiApiClient.chat.completions.create({
|
||||||
|
model: 'gpt-4o',
|
||||||
|
// response_format: { type: "json_object" }, // not supported for now
|
||||||
|
messages: [
|
||||||
|
{ role: 'system', content: optionsArg.systemMessage },
|
||||||
|
...optionsArg.messageHistory,
|
||||||
|
{
|
||||||
|
role: 'user',
|
||||||
|
content: [
|
||||||
|
{ type: 'text', text: optionsArg.userMessage },
|
||||||
|
...(() => {
|
||||||
|
const returnArray = [];
|
||||||
|
for (const imageBytes of pdfDocumentImageBytesArray) {
|
||||||
|
returnArray.push({
|
||||||
|
type: 'image_url',
|
||||||
|
image_url: {
|
||||||
|
url: 'data:image/png;base64,' + Buffer.from(imageBytes).toString('base64'),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return returnArray;
|
||||||
|
})(),
|
||||||
|
],
|
||||||
|
},
|
||||||
],
|
],
|
||||||
});
|
});
|
||||||
return {
|
return {
|
||||||
message: result.choices[0].message,
|
message: result.choices[0].message,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
public async audio(messageArg: string) {
|
|
||||||
const done = plugins.smartpromise.defer();
|
|
||||||
const result = await this.openAiApiClient.audio.speech.create({
|
|
||||||
model: 'tts-1-hd',
|
|
||||||
input: messageArg,
|
|
||||||
voice: 'nova',
|
|
||||||
response_format: 'mp3',
|
|
||||||
speed: 1,
|
|
||||||
});
|
|
||||||
const stream = result.body.pipe(plugins.smartfile.fsStream.createWriteStream(plugins.path.join(paths.nogitDir, 'output.mp3')));
|
|
||||||
stream.on('finish', () => {
|
|
||||||
done.resolve();
|
|
||||||
});
|
|
||||||
return done.promise;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
3
ts/provider.perplexity.ts
Normal file
3
ts/provider.perplexity.ts
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
|
||||||
|
export class PerplexityProvider {}
|
Reference in New Issue
Block a user