Compare commits
6 Commits
Author | SHA1 | Date | |
---|---|---|---|
6916dd9e2a | |||
f89888a542 | |||
d93b198b09 | |||
9e390d0fdb | |||
8329ee861e | |||
b8585a0afb |
19
changelog.md
19
changelog.md
@ -1,5 +1,24 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 2025-02-08 - 0.4.0 - feat(core)
|
||||||
|
Added support for Exo AI provider
|
||||||
|
|
||||||
|
- Introduced ExoProvider with chat functionalities.
|
||||||
|
- Updated SmartAi class to initialize ExoProvider.
|
||||||
|
- Extended Conversation class to support ExoProvider.
|
||||||
|
|
||||||
|
## 2025-02-05 - 0.3.3 - fix(documentation)
|
||||||
|
Update readme with detailed license and legal information.
|
||||||
|
|
||||||
|
- Added explicit section on License and Legal Information in the README.
|
||||||
|
- Clarified the use of trademarks and company information.
|
||||||
|
|
||||||
|
## 2025-02-05 - 0.3.2 - fix(documentation)
|
||||||
|
Remove redundant badges from readme
|
||||||
|
|
||||||
|
- Removed Build Status badge from the readme file.
|
||||||
|
- Removed License badge from the readme file.
|
||||||
|
|
||||||
## 2025-02-05 - 0.3.1 - fix(documentation)
|
## 2025-02-05 - 0.3.1 - fix(documentation)
|
||||||
Updated README structure and added detailed usage examples
|
Updated README structure and added detailed usage examples
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@push.rocks/smartai",
|
"name": "@push.rocks/smartai",
|
||||||
"version": "0.3.1",
|
"version": "0.4.0",
|
||||||
"private": false,
|
"private": false,
|
||||||
"description": "A TypeScript library for integrating and interacting with multiple AI models, offering capabilities for chat and potentially audio responses.",
|
"description": "A TypeScript library for integrating and interacting with multiple AI models, offering capabilities for chat and potentially audio responses.",
|
||||||
"main": "dist_ts/index.js",
|
"main": "dist_ts/index.js",
|
||||||
|
38
readme.md
38
readme.md
@ -1,8 +1,6 @@
|
|||||||
# @push.rocks/smartai
|
# @push.rocks/smartai
|
||||||
|
|
||||||
[](https://www.npmjs.com/package/@push.rocks/smartai)
|
[](https://www.npmjs.com/package/@push.rocks/smartai)
|
||||||
[](https://github.com/push.rocks/smartai/actions)
|
|
||||||
[](LICENSE)
|
|
||||||
|
|
||||||
SmartAi is a comprehensive TypeScript library that provides a standardized interface for integrating and interacting with multiple AI models. It supports a range of operations from synchronous and streaming chat to audio generation, document processing, and vision tasks.
|
SmartAi is a comprehensive TypeScript library that provides a standardized interface for integrating and interacting with multiple AI models. It supports a range of operations from synchronous and streaming chat to audio generation, document processing, and vision tasks.
|
||||||
|
|
||||||
@ -112,6 +110,19 @@ SmartAi supports multiple AI providers. Configure each provider with its corresp
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Exo
|
||||||
|
|
||||||
|
- **Models:** Configurable (supports LLaMA, Mistral, LlaVA, Qwen, and Deepseek)
|
||||||
|
- **Features:** Chat, Streaming
|
||||||
|
- **Configuration Example:**
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
exo: {
|
||||||
|
baseUrl: 'http://localhost:8080/v1', // Optional
|
||||||
|
apiKey: 'your-api-key' // Optional for local deployments
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
## Quick Start
|
## Quick Start
|
||||||
|
|
||||||
Initialize SmartAi with the provider configurations you plan to use:
|
Initialize SmartAi with the provider configurations you plan to use:
|
||||||
@ -128,6 +139,10 @@ const smartAi = new SmartAi({
|
|||||||
ollama: {
|
ollama: {
|
||||||
baseUrl: 'http://localhost:11434',
|
baseUrl: 'http://localhost:11434',
|
||||||
model: 'llama2'
|
model: 'llama2'
|
||||||
|
},
|
||||||
|
exo: {
|
||||||
|
baseUrl: 'http://localhost:8080/v1',
|
||||||
|
apiKey: 'your-api-key'
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -310,24 +325,21 @@ Contributions are welcome! Please follow these steps:
|
|||||||
```
|
```
|
||||||
5. Open a Pull Request with a detailed description of your changes.
|
5. Open a Pull Request with a detailed description of your changes.
|
||||||
|
|
||||||
## License
|
## License and Legal Information
|
||||||
|
|
||||||
This project is licensed under the [MIT License](LICENSE).
|
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
||||||
|
|
||||||
## Legal Information
|
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
### Trademarks
|
### Trademarks
|
||||||
|
|
||||||
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and its related products or services are trademarks of Task Venture Capital GmbH and are not covered by the MIT License. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines.
|
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
|
||||||
|
|
||||||
### Company Information
|
### Company Information
|
||||||
|
|
||||||
Task Venture Capital GmbH
|
Task Venture Capital GmbH
|
||||||
Registered at District Court Bremen HRB 35230 HB, Germany
|
Registered at District court Bremen HRB 35230 HB, Germany
|
||||||
Contact: hello@task.vc
|
|
||||||
|
|
||||||
By using this repository, you agree to the terms outlined in this section.
|
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
||||||
|
|
||||||
---
|
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||||
|
|
||||||
Happy coding with SmartAi!
|
|
@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@push.rocks/smartai',
|
name: '@push.rocks/smartai',
|
||||||
version: '0.3.1',
|
version: '0.4.0',
|
||||||
description: 'A TypeScript library for integrating and interacting with multiple AI models, offering capabilities for chat and potentially audio responses.'
|
description: 'A TypeScript library for integrating and interacting with multiple AI models, offering capabilities for chat and potentially audio responses.'
|
||||||
}
|
}
|
||||||
|
@ -48,6 +48,18 @@ export class Conversation {
|
|||||||
return conversation;
|
return conversation;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static async createWithExo(smartaiRefArg: SmartAi) {
|
||||||
|
if (!smartaiRefArg.exoProvider) {
|
||||||
|
throw new Error('Exo provider not available');
|
||||||
|
}
|
||||||
|
const conversation = new Conversation(smartaiRefArg, {
|
||||||
|
processFunction: async (input) => {
|
||||||
|
return '' // TODO implement proper streaming
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return conversation;
|
||||||
|
}
|
||||||
|
|
||||||
public static async createWithOllama(smartaiRefArg: SmartAi) {
|
public static async createWithOllama(smartaiRefArg: SmartAi) {
|
||||||
if (!smartaiRefArg.ollamaProvider) {
|
if (!smartaiRefArg.ollamaProvider) {
|
||||||
throw new Error('Ollama provider not available');
|
throw new Error('Ollama provider not available');
|
||||||
@ -60,6 +72,30 @@ export class Conversation {
|
|||||||
return conversation;
|
return conversation;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static async createWithGroq(smartaiRefArg: SmartAi) {
|
||||||
|
if (!smartaiRefArg.groqProvider) {
|
||||||
|
throw new Error('Groq provider not available');
|
||||||
|
}
|
||||||
|
const conversation = new Conversation(smartaiRefArg, {
|
||||||
|
processFunction: async (input) => {
|
||||||
|
return '' // TODO implement proper streaming
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return conversation;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async createWithXai(smartaiRefArg: SmartAi) {
|
||||||
|
if (!smartaiRefArg.xaiProvider) {
|
||||||
|
throw new Error('XAI provider not available');
|
||||||
|
}
|
||||||
|
const conversation = new Conversation(smartaiRefArg, {
|
||||||
|
processFunction: async (input) => {
|
||||||
|
return '' // TODO implement proper streaming
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return conversation;
|
||||||
|
}
|
||||||
|
|
||||||
// INSTANCE
|
// INSTANCE
|
||||||
smartaiRef: SmartAi
|
smartaiRef: SmartAi
|
||||||
private systemMessage: string;
|
private systemMessage: string;
|
||||||
|
@ -1,18 +1,32 @@
|
|||||||
import { Conversation } from './classes.conversation.js';
|
import { Conversation } from './classes.conversation.js';
|
||||||
import * as plugins from './plugins.js';
|
import * as plugins from './plugins.js';
|
||||||
import { AnthropicProvider } from './provider.anthropic.js';
|
import { AnthropicProvider } from './provider.anthropic.js';
|
||||||
import type { OllamaProvider } from './provider.ollama.js';
|
import { OllamaProvider } from './provider.ollama.js';
|
||||||
import { OpenAiProvider } from './provider.openai.js';
|
import { OpenAiProvider } from './provider.openai.js';
|
||||||
import type { PerplexityProvider } from './provider.perplexity.js';
|
import { PerplexityProvider } from './provider.perplexity.js';
|
||||||
|
import { ExoProvider } from './provider.exo.js';
|
||||||
|
import { GroqProvider } from './provider.groq.js';
|
||||||
|
import { XAIProvider } from './provider.xai.js';
|
||||||
|
|
||||||
|
|
||||||
export interface ISmartAiOptions {
|
export interface ISmartAiOptions {
|
||||||
openaiToken?: string;
|
openaiToken?: string;
|
||||||
anthropicToken?: string;
|
anthropicToken?: string;
|
||||||
perplexityToken?: string;
|
perplexityToken?: string;
|
||||||
|
groqToken?: string;
|
||||||
|
xaiToken?: string;
|
||||||
|
exo?: {
|
||||||
|
baseUrl?: string;
|
||||||
|
apiKey?: string;
|
||||||
|
};
|
||||||
|
ollama?: {
|
||||||
|
baseUrl?: string;
|
||||||
|
model?: string;
|
||||||
|
visionModel?: string;
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
export type TProvider = 'openai' | 'anthropic' | 'perplexity' | 'ollama';
|
export type TProvider = 'openai' | 'anthropic' | 'perplexity' | 'ollama' | 'exo' | 'groq' | 'xai';
|
||||||
|
|
||||||
export class SmartAi {
|
export class SmartAi {
|
||||||
public options: ISmartAiOptions;
|
public options: ISmartAiOptions;
|
||||||
@ -21,6 +35,9 @@ export class SmartAi {
|
|||||||
public anthropicProvider: AnthropicProvider;
|
public anthropicProvider: AnthropicProvider;
|
||||||
public perplexityProvider: PerplexityProvider;
|
public perplexityProvider: PerplexityProvider;
|
||||||
public ollamaProvider: OllamaProvider;
|
public ollamaProvider: OllamaProvider;
|
||||||
|
public exoProvider: ExoProvider;
|
||||||
|
public groqProvider: GroqProvider;
|
||||||
|
public xaiProvider: XAIProvider;
|
||||||
|
|
||||||
constructor(optionsArg: ISmartAiOptions) {
|
constructor(optionsArg: ISmartAiOptions) {
|
||||||
this.options = optionsArg;
|
this.options = optionsArg;
|
||||||
@ -37,6 +54,40 @@ export class SmartAi {
|
|||||||
this.anthropicProvider = new AnthropicProvider({
|
this.anthropicProvider = new AnthropicProvider({
|
||||||
anthropicToken: this.options.anthropicToken,
|
anthropicToken: this.options.anthropicToken,
|
||||||
});
|
});
|
||||||
|
await this.anthropicProvider.start();
|
||||||
|
}
|
||||||
|
if (this.options.perplexityToken) {
|
||||||
|
this.perplexityProvider = new PerplexityProvider({
|
||||||
|
perplexityToken: this.options.perplexityToken,
|
||||||
|
});
|
||||||
|
await this.perplexityProvider.start();
|
||||||
|
}
|
||||||
|
if (this.options.groqToken) {
|
||||||
|
this.groqProvider = new GroqProvider({
|
||||||
|
groqToken: this.options.groqToken,
|
||||||
|
});
|
||||||
|
await this.groqProvider.start();
|
||||||
|
}
|
||||||
|
if (this.options.xaiToken) {
|
||||||
|
this.xaiProvider = new XAIProvider({
|
||||||
|
xaiToken: this.options.xaiToken,
|
||||||
|
});
|
||||||
|
await this.xaiProvider.start();
|
||||||
|
}
|
||||||
|
if (this.options.ollama) {
|
||||||
|
this.ollamaProvider = new OllamaProvider({
|
||||||
|
baseUrl: this.options.ollama.baseUrl,
|
||||||
|
model: this.options.ollama.model,
|
||||||
|
visionModel: this.options.ollama.visionModel,
|
||||||
|
});
|
||||||
|
await this.ollamaProvider.start();
|
||||||
|
}
|
||||||
|
if (this.options.exo) {
|
||||||
|
this.exoProvider = new ExoProvider({
|
||||||
|
exoBaseUrl: this.options.exo.baseUrl,
|
||||||
|
apiKey: this.options.exo.apiKey,
|
||||||
|
});
|
||||||
|
await this.exoProvider.start();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -47,6 +98,8 @@ export class SmartAi {
|
|||||||
*/
|
*/
|
||||||
createConversation(provider: TProvider) {
|
createConversation(provider: TProvider) {
|
||||||
switch (provider) {
|
switch (provider) {
|
||||||
|
case 'exo':
|
||||||
|
return Conversation.createWithExo(this);
|
||||||
case 'openai':
|
case 'openai':
|
||||||
return Conversation.createWithOpenAi(this);
|
return Conversation.createWithOpenAi(this);
|
||||||
case 'anthropic':
|
case 'anthropic':
|
||||||
@ -55,6 +108,10 @@ export class SmartAi {
|
|||||||
return Conversation.createWithPerplexity(this);
|
return Conversation.createWithPerplexity(this);
|
||||||
case 'ollama':
|
case 'ollama':
|
||||||
return Conversation.createWithOllama(this);
|
return Conversation.createWithOllama(this);
|
||||||
|
case 'groq':
|
||||||
|
return Conversation.createWithGroq(this);
|
||||||
|
case 'xai':
|
||||||
|
return Conversation.createWithXai(this);
|
||||||
default:
|
default:
|
||||||
throw new Error('Provider not available');
|
throw new Error('Provider not available');
|
||||||
}
|
}
|
||||||
|
128
ts/provider.exo.ts
Normal file
128
ts/provider.exo.ts
Normal file
@ -0,0 +1,128 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import * as paths from './paths.js';
|
||||||
|
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
||||||
|
import type { ChatOptions, ChatResponse, ChatMessage } from './abstract.classes.multimodal.js';
|
||||||
|
import type { ChatCompletionMessageParam } from 'openai/resources/chat/completions';
|
||||||
|
|
||||||
|
export interface IExoProviderOptions {
|
||||||
|
exoBaseUrl?: string;
|
||||||
|
apiKey?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export class ExoProvider extends MultiModalModel {
|
||||||
|
private options: IExoProviderOptions;
|
||||||
|
public openAiApiClient: plugins.openai.default;
|
||||||
|
|
||||||
|
constructor(optionsArg: IExoProviderOptions = {}) {
|
||||||
|
super();
|
||||||
|
this.options = {
|
||||||
|
exoBaseUrl: 'http://localhost:8080/v1', // Default Exo API endpoint
|
||||||
|
...optionsArg
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
public async start() {
|
||||||
|
this.openAiApiClient = new plugins.openai.default({
|
||||||
|
apiKey: this.options.apiKey || 'not-needed', // Exo might not require an API key for local deployment
|
||||||
|
baseURL: this.options.exoBaseUrl,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public async stop() {}
|
||||||
|
|
||||||
|
public async chatStream(input: ReadableStream<Uint8Array>): Promise<ReadableStream<string>> {
|
||||||
|
// Create a TextDecoder to handle incoming chunks
|
||||||
|
const decoder = new TextDecoder();
|
||||||
|
let buffer = '';
|
||||||
|
let currentMessage: { role: string; content: string; } | null = null;
|
||||||
|
|
||||||
|
// Create a TransformStream to process the input
|
||||||
|
const transform = new TransformStream<Uint8Array, string>({
|
||||||
|
async transform(chunk, controller) {
|
||||||
|
buffer += decoder.decode(chunk, { stream: true });
|
||||||
|
|
||||||
|
// Try to parse complete JSON messages from the buffer
|
||||||
|
while (true) {
|
||||||
|
const newlineIndex = buffer.indexOf('\n');
|
||||||
|
if (newlineIndex === -1) break;
|
||||||
|
|
||||||
|
const line = buffer.slice(0, newlineIndex);
|
||||||
|
buffer = buffer.slice(newlineIndex + 1);
|
||||||
|
|
||||||
|
if (line.trim()) {
|
||||||
|
try {
|
||||||
|
const message = JSON.parse(line);
|
||||||
|
currentMessage = message;
|
||||||
|
|
||||||
|
// Process the message based on its type
|
||||||
|
if (message.type === 'message') {
|
||||||
|
const response = await this.chat({
|
||||||
|
systemMessage: '',
|
||||||
|
userMessage: message.content,
|
||||||
|
messageHistory: [{ role: message.role as 'user' | 'assistant' | 'system', content: message.content }]
|
||||||
|
});
|
||||||
|
|
||||||
|
controller.enqueue(JSON.stringify(response) + '\n');
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error processing message:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
flush(controller) {
|
||||||
|
if (buffer) {
|
||||||
|
try {
|
||||||
|
const message = JSON.parse(buffer);
|
||||||
|
currentMessage = message;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error processing remaining buffer:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return input.pipeThrough(transform);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async chat(options: ChatOptions): Promise<ChatResponse> {
|
||||||
|
const messages: ChatCompletionMessageParam[] = [
|
||||||
|
{ role: 'system', content: options.systemMessage },
|
||||||
|
...options.messageHistory,
|
||||||
|
{ role: 'user', content: options.userMessage }
|
||||||
|
];
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await this.openAiApiClient.chat.completions.create({
|
||||||
|
model: 'local-model', // Exo uses local models
|
||||||
|
messages: messages,
|
||||||
|
stream: false
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
role: 'assistant',
|
||||||
|
message: response.choices[0]?.message?.content || ''
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error in chat completion:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async audio(optionsArg: { message: string }): Promise<NodeJS.ReadableStream> {
|
||||||
|
throw new Error('Audio generation is not supported by Exo provider');
|
||||||
|
}
|
||||||
|
|
||||||
|
public async vision(optionsArg: { image: Buffer; prompt: string }): Promise<string> {
|
||||||
|
throw new Error('Vision processing is not supported by Exo provider');
|
||||||
|
}
|
||||||
|
|
||||||
|
public async document(optionsArg: {
|
||||||
|
systemMessage: string;
|
||||||
|
userMessage: string;
|
||||||
|
pdfDocuments: Uint8Array[];
|
||||||
|
messageHistory: ChatMessage[];
|
||||||
|
}): Promise<{ message: any }> {
|
||||||
|
throw new Error('Document processing is not supported by Exo provider');
|
||||||
|
}
|
||||||
|
}
|
Reference in New Issue
Block a user