Compare commits
13 Commits
Author | SHA1 | Date | |
---|---|---|---|
1ce412fd00 | |||
92c382c16e | |||
63d3b7c9bb | |||
2e4c6aa80a | |||
04d505d29e | |||
a636556fdb | |||
a1558e6306 | |||
8b79cd025a | |||
268178f024 | |||
181193352e | |||
616ef168a5 | |||
1b814477ec | |||
f2685164e5 |
@ -5,14 +5,29 @@
|
||||
"githost": "code.foss.global",
|
||||
"gitscope": "push.rocks",
|
||||
"gitrepo": "smartai",
|
||||
"description": "a standardaized interface to talk to AI models",
|
||||
"description": "Provides a standardized interface for integrating and conversing with multiple AI models, supporting operations like chat and potentially audio responses.",
|
||||
"npmPackagename": "@push.rocks/smartai",
|
||||
"license": "MIT",
|
||||
"projectDomain": "push.rocks"
|
||||
"projectDomain": "push.rocks",
|
||||
"keywords": [
|
||||
"AI models integration",
|
||||
"OpenAI GPT",
|
||||
"Anthropic AI",
|
||||
"text-to-speech",
|
||||
"conversation stream",
|
||||
"TypeScript",
|
||||
"ESM",
|
||||
"streaming API",
|
||||
"modular design",
|
||||
"development tool"
|
||||
]
|
||||
}
|
||||
},
|
||||
"npmci": {
|
||||
"npmGlobalTools": [],
|
||||
"npmAccessLevel": "public"
|
||||
},
|
||||
"tsdoc": {
|
||||
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
||||
}
|
||||
}
|
33
package.json
33
package.json
@ -1,8 +1,8 @@
|
||||
{
|
||||
"name": "@push.rocks/smartai",
|
||||
"version": "0.0.5",
|
||||
"version": "0.0.10",
|
||||
"private": false,
|
||||
"description": "a standardaized interface to talk to AI models",
|
||||
"description": "Provides a standardized interface for integrating and conversing with multiple AI models, supporting operations like chat and potentially audio responses.",
|
||||
"main": "dist_ts/index.js",
|
||||
"typings": "dist_ts/index.d.ts",
|
||||
"type": "module",
|
||||
@ -17,11 +17,20 @@
|
||||
"@git.zone/tsbuild": "^2.1.25",
|
||||
"@git.zone/tsbundle": "^2.0.5",
|
||||
"@git.zone/tsrun": "^1.2.46",
|
||||
"@git.zone/tstest": "^1.0.44",
|
||||
"@push.rocks/tapbundle": "^5.0.15",
|
||||
"@types/node": "^20.8.7"
|
||||
"@git.zone/tstest": "^1.0.90",
|
||||
"@push.rocks/qenv": "^6.0.5",
|
||||
"@push.rocks/tapbundle": "^5.0.23",
|
||||
"@types/node": "^20.12.7"
|
||||
},
|
||||
"dependencies": {
|
||||
"@anthropic-ai/sdk": "^0.20.7",
|
||||
"@push.rocks/smartexpose": "^1.0.5",
|
||||
"@push.rocks/smartfile": "^11.0.14",
|
||||
"@push.rocks/smartpath": "^5.0.18",
|
||||
"@push.rocks/smartpromise": "^4.0.3",
|
||||
"@push.rocks/webstream": "^1.0.8",
|
||||
"openai": "^4.38.3"
|
||||
},
|
||||
"dependencies": {},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://code.foss.global/push.rocks/smartai.git"
|
||||
@ -44,5 +53,17 @@
|
||||
"cli.js",
|
||||
"npmextra.json",
|
||||
"readme.md"
|
||||
],
|
||||
"keywords": [
|
||||
"AI models integration",
|
||||
"OpenAI GPT",
|
||||
"Anthropic AI",
|
||||
"text-to-speech",
|
||||
"conversation stream",
|
||||
"TypeScript",
|
||||
"ESM",
|
||||
"streaming API",
|
||||
"modular design",
|
||||
"development tool"
|
||||
]
|
||||
}
|
||||
|
864
pnpm-lock.yaml
generated
864
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
4
qenv.yml
Normal file
4
qenv.yml
Normal file
@ -0,0 +1,4 @@
|
||||
required:
|
||||
- OPENAI_TOKEN
|
||||
- ANTHROPIC_TOKEN
|
||||
- PERPLEXITY_TOKEN
|
1
readme.hints.md
Normal file
1
readme.hints.md
Normal file
@ -0,0 +1 @@
|
||||
|
145
readme.md
145
readme.md
@ -1,31 +1,128 @@
|
||||
# @push.rocks/smartai
|
||||
a standardaized interface to talk to AI models
|
||||
|
||||
## Availabililty and Links
|
||||
* [npmjs.org (npm package)](https://www.npmjs.com/package/@push.rocks/smartai)
|
||||
* [gitlab.com (source)](https://code.foss.global/push.rocks/smartai)
|
||||
* [github.com (source mirror)](https://github.com/push.rocks/smartai)
|
||||
* [docs (typedoc)](https://push.rocks.gitlab.io/smartai/)
|
||||
Provides a standardized interface for integrating and conversing with multiple AI models, supporting operations like chat and potentially audio responses.
|
||||
|
||||
## Status for master
|
||||
## Install
|
||||
|
||||
Status Category | Status Badge
|
||||
-- | --
|
||||
GitLab Pipelines | [](https://lossless.cloud)
|
||||
GitLab Pipline Test Coverage | [](https://lossless.cloud)
|
||||
npm | [](https://lossless.cloud)
|
||||
Snyk | [](https://lossless.cloud)
|
||||
TypeScript Support | [](https://lossless.cloud)
|
||||
node Support | [](https://nodejs.org/dist/latest-v10.x/docs/api/)
|
||||
Code Style | [](https://lossless.cloud)
|
||||
PackagePhobia (total standalone install weight) | [](https://lossless.cloud)
|
||||
PackagePhobia (package size on registry) | [](https://lossless.cloud)
|
||||
BundlePhobia (total size when bundled) | [](https://lossless.cloud)
|
||||
To add @push.rocks/smartai to your project, run the following command in your terminal:
|
||||
|
||||
```bash
|
||||
npm install @push.rocks/smartai
|
||||
```
|
||||
|
||||
This command installs the package and adds it to your project's dependencies.
|
||||
|
||||
## Usage
|
||||
Use TypeScript for best in class intellisense
|
||||
For further information read the linked docs at the top of this readme.
|
||||
|
||||
## Legal
|
||||
> MIT licensed | **©** [Task Venture Capital GmbH](https://task.vc)
|
||||
| By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy)
|
||||
The usage section delves into how to leverage the `@push.rocks/smartai` package to interact with AI models in an application. This package simplifies the integration and conversation with AI models by providing a standardized interface. The examples below demonstrate the package's capabilities in engaging with AI models for chat operations and potentially handling audio responses using TypeScript and ESM syntax.
|
||||
|
||||
### Integrating AI Models
|
||||
|
||||
#### Importing the Module
|
||||
|
||||
Start by importing `SmartAi` and the AI providers you wish to use from `@push.rocks/smartai`.
|
||||
|
||||
```typescript
|
||||
import { SmartAi, OpenAiProvider, AnthropicProvider } from '@push.rocks/smartai';
|
||||
```
|
||||
|
||||
#### Initializing `SmartAi`
|
||||
|
||||
Create an instance of `SmartAi` with the necessary credentials for accessing the AI services.
|
||||
|
||||
```typescript
|
||||
const smartAi = new SmartAi({
|
||||
openaiToken: 'your-openai-access-token',
|
||||
anthropicToken: 'your-anthropic-access-token'
|
||||
});
|
||||
```
|
||||
|
||||
### Chatting with the AI
|
||||
|
||||
#### Creating a Conversation
|
||||
|
||||
To begin a conversation, choose the AI provider you'd like to use. For instance, to use OpenAI:
|
||||
|
||||
```typescript
|
||||
async function createOpenAiConversation() {
|
||||
const conversation = await smartAi.createOpenApiConversation();
|
||||
// Use the conversation for chatting
|
||||
}
|
||||
```
|
||||
|
||||
Similarly, for an Anthropic AI conversation:
|
||||
|
||||
```typescript
|
||||
async function createAnthropicConversation() {
|
||||
const conversation = await smartAi.createAnthropicConversation();
|
||||
// Use the conversation for chatting
|
||||
}
|
||||
```
|
||||
|
||||
### Streaming Chat with OpenAI
|
||||
|
||||
For more advanced scenarios, like a streaming chat with OpenAI, you would interact with the chat stream directly:
|
||||
|
||||
```typescript
|
||||
// Assuming a conversation has been created and initialized...
|
||||
const inputStreamWriter = conversation.getInputStreamWriter();
|
||||
const outputStream = conversation.getOutputStream();
|
||||
|
||||
// Write a message to the input stream for the AI to process
|
||||
await inputStreamWriter.write('Hello, how can I help you today?');
|
||||
|
||||
// Listen to the output stream for responses from AI
|
||||
const reader = outputStream.getReader();
|
||||
reader.read().then(function processText({ done, value }) {
|
||||
if (done) {
|
||||
console.log("No more messages from AI");
|
||||
return;
|
||||
}
|
||||
console.log("AI says:", value);
|
||||
// Continue reading messages
|
||||
reader.read().then(processText);
|
||||
});
|
||||
```
|
||||
|
||||
### Handling Audio Responses
|
||||
|
||||
The package may also support converting text responses from the AI into audio. While specific implementation details depend on the AI provider's capabilities, a generic approach would involve creating a text-to-speech instance and utilizing it:
|
||||
|
||||
```typescript
|
||||
// This is a hypothetical function call as the implementation might vary
|
||||
const tts = await TTS.createWithOpenAi(smartAi);
|
||||
|
||||
// The TTS instance would then be used to convert text to speech
|
||||
```
|
||||
|
||||
### Extensive Feature Set
|
||||
|
||||
`@push.rocks/smartai` provides comprehensive support for interacting with various AI models, not limited to text chat. It encompasses audio responses, potentially incorporating AI-powered analyses, and other multi-modal interactions.
|
||||
|
||||
Refer to the specific AI providers’ documentation through `@push.rocks/smartai`, such as OpenAI and Anthropic, for detailed guidance on utilizing the full spectrum of capabilities, including the implementation of custom conversation flows, handling streaming data efficiently, and generating audio responses from AI conversations.
|
||||
|
||||
### Conclusion
|
||||
|
||||
Equipped with `@push.rocks/smartai`, developers can streamline the integration of sophisticated AI interactions into their applications. The package facilitates robust communication with AI models, supporting diverse operations from simple chats to complex audio feedback mechanisms, all within a unified, easy-to-use interface.
|
||||
|
||||
Explore the package more to uncover its full potential in creating engaging, AI-enhanced interactions in your applications.
|
||||
|
||||
|
||||
## License and Legal Information
|
||||
|
||||
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
||||
|
||||
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||
|
||||
### Trademarks
|
||||
|
||||
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
|
||||
|
||||
### Company Information
|
||||
|
||||
Task Venture Capital GmbH
|
||||
Registered at District court Bremen HRB 35230 HB, Germany
|
||||
|
||||
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
||||
|
||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||
|
15
test/test.ts
15
test/test.ts
@ -1,8 +1,17 @@
|
||||
import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
|
||||
import * as smartai from '../ts/index.js'
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
|
||||
tap.test('first test', async () => {
|
||||
console.log(smartai)
|
||||
const testQenv = new qenv.Qenv('./', './.nogit/');
|
||||
|
||||
import * as smartai from '../ts/index.js';
|
||||
|
||||
let testSmartai: smartai.SmartAi;
|
||||
|
||||
tap.test('should create a smartai instance', async () => {
|
||||
testSmartai = new smartai.SmartAi({
|
||||
openaiToken: await testQenv.getEnvVarOnDemand('OPENAI_TOKEN'),
|
||||
|
||||
});
|
||||
})
|
||||
|
||||
tap.start()
|
||||
|
@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartai',
|
||||
version: '0.0.5',
|
||||
description: 'a standardaized interface to talk to AI models'
|
||||
version: '0.0.10',
|
||||
description: 'Provides a standardized interface for integrating and conversing with multiple AI models, supporting operations like chat and potentially audio responses.'
|
||||
}
|
||||
|
29
ts/abstract.classes.multimodal.ts
Normal file
29
ts/abstract.classes.multimodal.ts
Normal file
@ -0,0 +1,29 @@
|
||||
export abstract class MultiModalModel {
|
||||
/**
|
||||
* starts the model
|
||||
*/
|
||||
abstract start(): Promise<void>;
|
||||
|
||||
/**
|
||||
* stops the model
|
||||
*/
|
||||
abstract stop(): Promise<void>;
|
||||
|
||||
public abstract chat(optionsArg: {
|
||||
systemMessage: string,
|
||||
userMessage: string,
|
||||
messageHistory: {
|
||||
role: 'assistant' | 'user';
|
||||
content: string;
|
||||
}[]
|
||||
}): Promise<{}>
|
||||
|
||||
/**
|
||||
* Defines a streaming interface for chat interactions.
|
||||
* The implementation will vary based on the specific AI model.
|
||||
* @param input
|
||||
*/
|
||||
public abstract chatStream(input: ReadableStream<string>): Promise<ReadableStream<string>>;
|
||||
|
||||
|
||||
}
|
@ -1,18 +1,53 @@
|
||||
import type { SmartAi } from "./classes.smartai.js";
|
||||
import { OpenAiProvider } from "./provider.openai.js";
|
||||
|
||||
type TProcessFunction = (input: string) => Promise<string>;
|
||||
|
||||
interface ISmartAiOptions {
|
||||
export interface IConversationOptions {
|
||||
processFunction: TProcessFunction;
|
||||
}
|
||||
|
||||
class SmartAi {
|
||||
/**
|
||||
* a conversation
|
||||
*/
|
||||
export class Conversation {
|
||||
// STATIC
|
||||
public static async createWithOpenAi(smartaiRef: SmartAi) {
|
||||
const openaiProvider = new OpenAiProvider(smartaiRef.options.openaiToken);
|
||||
const conversation = new Conversation(smartaiRef, {
|
||||
processFunction: async (input) => {
|
||||
return '' // TODO implement proper streaming
|
||||
}
|
||||
});
|
||||
return conversation;
|
||||
}
|
||||
|
||||
public static async createWithAnthropic(smartaiRef: SmartAi) {
|
||||
const anthropicProvider = new OpenAiProvider(smartaiRef.options.anthropicToken);
|
||||
const conversation = new Conversation(smartaiRef, {
|
||||
processFunction: async (input) => {
|
||||
return '' // TODO implement proper streaming
|
||||
}
|
||||
});
|
||||
return conversation;
|
||||
}
|
||||
|
||||
|
||||
// INSTANCE
|
||||
smartaiRef: SmartAi
|
||||
private systemMessage: string;
|
||||
private processFunction: TProcessFunction;
|
||||
private inputStreamWriter: WritableStreamDefaultWriter<string> | null = null;
|
||||
private outputStreamController: ReadableStreamDefaultController<string> | null = null;
|
||||
|
||||
constructor(options: ISmartAiOptions) {
|
||||
constructor(smartairefArg: SmartAi, options: IConversationOptions) {
|
||||
this.processFunction = options.processFunction;
|
||||
}
|
||||
|
||||
setSystemMessage(systemMessage: string) {
|
||||
this.systemMessage = systemMessage;
|
||||
}
|
||||
|
||||
private setupOutputStream(): ReadableStream<string> {
|
||||
return new ReadableStream<string>({
|
||||
start: (controller) => {
|
47
ts/classes.smartai.ts
Normal file
47
ts/classes.smartai.ts
Normal file
@ -0,0 +1,47 @@
|
||||
import { Conversation } from './classes.conversation.js';
|
||||
import * as plugins from './plugins.js';
|
||||
import type { AnthropicProvider } from './provider.anthropic.js';
|
||||
import type { OllamaProvider } from './provider.ollama.js';
|
||||
import type { OpenAiProvider } from './provider.openai.js';
|
||||
import type { PerplexityProvider } from './provider.perplexity.js';
|
||||
|
||||
|
||||
export interface ISmartAiOptions {
|
||||
openaiToken?: string;
|
||||
anthropicToken?: string;
|
||||
perplexityToken?: string;
|
||||
exposeCredentials?: plugins.smartexpose.ISmartExposeOptions;
|
||||
}
|
||||
|
||||
export class SmartAi {
|
||||
public options: ISmartAiOptions;
|
||||
|
||||
public openaiProvider: OpenAiProvider;
|
||||
public anthropicProvider: AnthropicProvider;
|
||||
public perplexityProvider: PerplexityProvider;
|
||||
public ollamaProvider: OllamaProvider;
|
||||
|
||||
constructor(optionsArg: ISmartAiOptions) {
|
||||
this.options = optionsArg;
|
||||
}
|
||||
|
||||
public async start() {
|
||||
|
||||
}
|
||||
|
||||
public async stop() {}
|
||||
|
||||
/**
|
||||
* creates an OpenAI conversation
|
||||
*/
|
||||
public async createOpenApiConversation() {
|
||||
const conversation = await Conversation.createWithOpenAi(this);
|
||||
}
|
||||
|
||||
/**
|
||||
* creates an OpenAI conversation
|
||||
*/
|
||||
public async createAnthropicConversation() {
|
||||
const conversation = await Conversation.createWithAnthropic(this);
|
||||
}
|
||||
}
|
15
ts/classes.tts.ts
Normal file
15
ts/classes.tts.ts
Normal file
@ -0,0 +1,15 @@
|
||||
import type { SmartAi } from './classes.smartai.js';
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
export class TTS {
|
||||
public static async createWithOpenAi(smartaiRef: SmartAi): Promise<TTS> {
|
||||
return new TTS(smartaiRef);
|
||||
}
|
||||
|
||||
// INSTANCE
|
||||
smartaiRef: SmartAi;
|
||||
|
||||
constructor(smartairefArg: SmartAi) {
|
||||
this.smartaiRef = smartairefArg;
|
||||
}
|
||||
}
|
@ -1 +1,3 @@
|
||||
export * from './smartai.classes.smartai.js';
|
||||
export * from './classes.smartai.js';
|
||||
export * from './abstract.classes.multimodal.js';
|
||||
export * from './provider.openai.js';
|
||||
|
0
ts/interfaces.ts
Normal file
0
ts/interfaces.ts
Normal file
4
ts/paths.ts
Normal file
4
ts/paths.ts
Normal file
@ -0,0 +1,4 @@
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
export const packageDir = plugins.path.join(plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url), '../');
|
||||
export const nogitDir = plugins.path.join(packageDir, './.nogit');
|
32
ts/plugins.ts
Normal file
32
ts/plugins.ts
Normal file
@ -0,0 +1,32 @@
|
||||
// node native
|
||||
import * as path from 'path';
|
||||
|
||||
export {
|
||||
path,
|
||||
}
|
||||
|
||||
// @push.rocks scope
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
import * as smartexpose from '@push.rocks/smartexpose';
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartpromise from '@push.rocks/smartpromise';
|
||||
import * as smartfile from '@push.rocks/smartfile';
|
||||
import * as webstream from '@push.rocks/webstream';
|
||||
|
||||
export {
|
||||
qenv,
|
||||
smartexpose,
|
||||
smartpath,
|
||||
smartpromise,
|
||||
smartfile,
|
||||
webstream,
|
||||
}
|
||||
|
||||
// third party
|
||||
import * as anthropic from '@anthropic-ai/sdk';
|
||||
import * as openai from 'openai';
|
||||
|
||||
export {
|
||||
anthropic,
|
||||
openai,
|
||||
}
|
75
ts/provider.anthropic.ts
Normal file
75
ts/provider.anthropic.ts
Normal file
@ -0,0 +1,75 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import * as paths from './paths.js';
|
||||
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
||||
|
||||
export class AnthropicProvider extends MultiModalModel {
|
||||
private anthropicToken: string;
|
||||
public anthropicApiClient: plugins.anthropic.default;
|
||||
|
||||
constructor(anthropicToken: string) {
|
||||
super();
|
||||
this.anthropicToken = anthropicToken; // Ensure the token is stored
|
||||
}
|
||||
|
||||
async start() {
|
||||
this.anthropicApiClient = new plugins.anthropic.default({
|
||||
apiKey: this.anthropicToken,
|
||||
});
|
||||
}
|
||||
|
||||
async stop() {}
|
||||
|
||||
chatStream(input: ReadableStream<string>): ReadableStream<string> {
|
||||
const decoder = new TextDecoder();
|
||||
let messageHistory: { role: 'assistant' | 'user'; content: string }[] = [];
|
||||
|
||||
return new ReadableStream({
|
||||
async start(controller) {
|
||||
const reader = input.getReader();
|
||||
try {
|
||||
let done, value;
|
||||
while ((({ done, value } = await reader.read()), !done)) {
|
||||
const userMessage = decoder.decode(value, { stream: true });
|
||||
messageHistory.push({ role: 'user', content: userMessage });
|
||||
const aiResponse = await this.chat('', userMessage, messageHistory);
|
||||
messageHistory.push({ role: 'assistant', content: aiResponse.message });
|
||||
// Directly enqueue the string response instead of encoding it first
|
||||
controller.enqueue(aiResponse.message);
|
||||
}
|
||||
controller.close();
|
||||
} catch (err) {
|
||||
controller.error(err);
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// Implementing the synchronous chat interaction
|
||||
public async chat(
|
||||
systemMessage: string,
|
||||
userMessage: string,
|
||||
messageHistory: {
|
||||
role: 'assistant' | 'user';
|
||||
content: string;
|
||||
}[]
|
||||
) {
|
||||
const result = await this.anthropicApiClient.messages.create({
|
||||
model: 'claude-3-opus-20240229',
|
||||
system: systemMessage,
|
||||
messages: [
|
||||
...messageHistory,
|
||||
{ role: 'user', content: userMessage },
|
||||
],
|
||||
max_tokens: 4000,
|
||||
});
|
||||
|
||||
return {
|
||||
message: result.content,
|
||||
};
|
||||
}
|
||||
|
||||
public async audio(messageArg: string) {
|
||||
// Anthropic does not provide an audio API, so this method is not implemented.
|
||||
throw new Error('Audio generation is not supported by Anthropic.');
|
||||
}
|
||||
}
|
3
ts/provider.ollama.ts
Normal file
3
ts/provider.ollama.ts
Normal file
@ -0,0 +1,3 @@
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
export class OllamaProvider {}
|
102
ts/provider.openai.ts
Normal file
102
ts/provider.openai.ts
Normal file
@ -0,0 +1,102 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import * as paths from './paths.js';
|
||||
|
||||
import { MultiModalModel } from './abstract.classes.multimodal.js';
|
||||
|
||||
export class OpenAiProvider extends MultiModalModel {
|
||||
public smartexposeInstance: plugins.smartexpose.SmartExpose;
|
||||
private openAiToken: string;
|
||||
public openAiApiClient: plugins.openai.default;
|
||||
|
||||
constructor(openaiToken: string, expose) {
|
||||
super();
|
||||
this.openAiToken = openaiToken; // Ensure the token is stored
|
||||
}
|
||||
|
||||
async start() {
|
||||
this.openAiApiClient = new plugins.openai.default({
|
||||
apiKey: this.openAiToken,
|
||||
dangerouslyAllowBrowser: true,
|
||||
});
|
||||
}
|
||||
|
||||
async stop() {}
|
||||
|
||||
public async chatStream(input: ReadableStream<string>): Promise<ReadableStream<string>> {
|
||||
// TODO: implement for OpenAI
|
||||
|
||||
const returnStream = new ReadableStream();
|
||||
return returnStream;
|
||||
}
|
||||
|
||||
// Implementing the synchronous chat interaction
|
||||
public async chat(
|
||||
optionsArg: {
|
||||
systemMessage: string,
|
||||
userMessage: string,
|
||||
messageHistory: {
|
||||
role: 'assistant' | 'user';
|
||||
content: string;
|
||||
}[]
|
||||
}
|
||||
) {
|
||||
const result = await this.openAiApiClient.chat.completions.create({
|
||||
model: 'gpt-4-turbo-preview',
|
||||
|
||||
messages: [
|
||||
{ role: 'system', content: optionsArg.systemMessage },
|
||||
...optionsArg.messageHistory,
|
||||
{ role: 'user', content: optionsArg.userMessage },
|
||||
],
|
||||
});
|
||||
return {
|
||||
message: result.choices[0].message,
|
||||
};
|
||||
}
|
||||
|
||||
public async audio(optionsArg: { message: string }): Promise<NodeJS.ReadableStream> {
|
||||
const done = plugins.smartpromise.defer<NodeJS.ReadableStream>();
|
||||
const result = await this.openAiApiClient.audio.speech.create({
|
||||
model: 'tts-1-hd',
|
||||
input: optionsArg.message,
|
||||
voice: 'nova',
|
||||
response_format: 'mp3',
|
||||
speed: 1,
|
||||
});
|
||||
const stream = result.body;
|
||||
done.resolve(stream);
|
||||
return done.promise;
|
||||
}
|
||||
|
||||
public async document(optionsArg: {
|
||||
systemMessage: string,
|
||||
userMessage: string,
|
||||
documents: Uint8Array[],
|
||||
messageHistory: {
|
||||
role: 'assistant' | 'user';
|
||||
content: any;
|
||||
}[];
|
||||
}) {
|
||||
const result = await this.openAiApiClient.chat.completions.create({
|
||||
model: 'gpt-4-vision-preview',
|
||||
|
||||
messages: [
|
||||
{ role: 'system', content: optionsArg.systemMessage },
|
||||
...optionsArg.messageHistory,
|
||||
{ role: 'user', content: [
|
||||
{type: 'text', text: optionsArg.userMessage},
|
||||
...(() => {
|
||||
const returnArray = [];
|
||||
for (const document of optionsArg.documents) {
|
||||
returnArray.push({type: 'image_url', image_url: })
|
||||
}
|
||||
return returnArray;
|
||||
})()
|
||||
] },
|
||||
],
|
||||
});
|
||||
return {
|
||||
message: result.choices[0].message,
|
||||
};
|
||||
}
|
||||
}
|
3
ts/provider.perplexity.ts
Normal file
3
ts/provider.perplexity.ts
Normal file
@ -0,0 +1,3 @@
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
export class PerplexityProvider {}
|
@ -1,4 +0,0 @@
|
||||
const removeme = {};
|
||||
export {
|
||||
removeme
|
||||
}
|
Reference in New Issue
Block a user