diff --git a/package.json b/package.json
index 0fbf1ce..7279c76 100644
--- a/package.json
+++ b/package.json
@@ -18,22 +18,22 @@
"@git.zone/tsbuild": "^4.1.2",
"@git.zone/tsbundle": "^2.8.1",
"@git.zone/tsrun": "^2.0.1",
- "@git.zone/tstest": "^3.1.4",
+ "@git.zone/tstest": "^3.1.6",
"@push.rocks/qenv": "^6.1.3",
- "@types/node": "^22.15.17",
+ "@types/node": "^25.0.9",
"typescript": "^5.9.3"
},
"dependencies": {
"@anthropic-ai/sdk": "^0.71.2",
- "@mistralai/mistralai": "^1.11.0",
+ "@mistralai/mistralai": "^1.12.0",
"@push.rocks/smartarray": "^1.1.0",
- "@push.rocks/smartfile": "^11.2.7",
+ "@push.rocks/smartfs": "^1.3.1",
"@push.rocks/smartpath": "^6.0.0",
"@push.rocks/smartpdf": "^4.1.1",
"@push.rocks/smartpromise": "^4.2.3",
"@push.rocks/smartrequest": "^5.0.1",
"@push.rocks/webstream": "^1.0.10",
- "openai": "^5.12.2"
+ "openai": "^6.16.0"
},
"repository": {
"type": "git",
diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index 02de80b..cd41a03 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -12,14 +12,14 @@ importers:
specifier: ^0.71.2
version: 0.71.2(zod@3.25.76)
'@mistralai/mistralai':
- specifier: ^1.11.0
- version: 1.11.0
+ specifier: ^1.12.0
+ version: 1.12.0
'@push.rocks/smartarray':
specifier: ^1.1.0
version: 1.1.0
- '@push.rocks/smartfile':
- specifier: ^11.2.7
- version: 11.2.7
+ '@push.rocks/smartfs':
+ specifier: ^1.3.1
+ version: 1.3.1
'@push.rocks/smartpath':
specifier: ^6.0.0
version: 6.0.0
@@ -36,8 +36,8 @@ importers:
specifier: ^1.0.10
version: 1.0.10
openai:
- specifier: ^5.12.2
- version: 5.12.2(ws@8.18.3)(zod@3.25.76)
+ specifier: ^6.16.0
+ version: 6.16.0(ws@8.18.3)(zod@3.25.76)
devDependencies:
'@git.zone/tsbuild':
specifier: ^4.1.2
@@ -49,14 +49,14 @@ importers:
specifier: ^2.0.1
version: 2.0.1
'@git.zone/tstest':
- specifier: ^3.1.4
- version: 3.1.4(@aws-sdk/credential-providers@3.808.0)(socks@2.8.4)(typescript@5.9.3)
+ specifier: ^3.1.6
+ version: 3.1.6(@aws-sdk/credential-providers@3.808.0)(socks@2.8.4)(typescript@5.9.3)
'@push.rocks/qenv':
specifier: ^6.1.3
version: 6.1.3
'@types/node':
- specifier: ^22.15.17
- version: 22.15.17
+ specifier: ^25.0.9
+ version: 25.0.9
typescript:
specifier: ^5.9.3
version: 5.9.3
@@ -816,8 +816,8 @@ packages:
resolution: {integrity: sha512-NEcnsjvlC1o3Z6SS3VhKCf6Ev+Sh4EAinmggslrIR/ppMrvjDbXNFXoyr3PB+GLeSAR0JRZ1fGvVYjpEzjBdIg==}
hasBin: true
- '@git.zone/tstest@3.1.4':
- resolution: {integrity: sha512-S7kubbb0yLYOh/QAzFsjG6a20lZiyNKo4pt0yK1yvd9I7X8Rw6/mCT/BicLkan7G7Nk7scUfxaK9+aFsHmdQdw==}
+ '@git.zone/tstest@3.1.6':
+ resolution: {integrity: sha512-xRGc6wO4rJ6mohPCMIBDRH+oNjiIvX6Jeo8v/Y5o5VyKSHFmqol7FCKSBrojMcqgBpESnLHFPJAAOmT9W3JV8Q==}
hasBin: true
'@happy-dom/global-registrator@15.11.7':
@@ -1135,8 +1135,8 @@ packages:
'@lit/reactive-element@2.1.1':
resolution: {integrity: sha512-N+dm5PAYdQ8e6UlywyyrgI2t++wFGXfHx+dSJ1oBrg6FAxUj40jId++EaRm80MKX5JnlH1sBsyZ5h0bcZKemCg==}
- '@mistralai/mistralai@1.11.0':
- resolution: {integrity: sha512-6/BVj2mcaggYbpMzNSxtqtM2Tv/Jb5845XFd2CMYFO+O5VBkX70iLjtkBBTI4JFhh1l9vTCIMYXBVOjLoBVHGQ==}
+ '@mistralai/mistralai@1.12.0':
+ resolution: {integrity: sha512-oDr1hcS3wsIT/QupBG93TNiA5kilwBYoAIyl5BNYqMM2Ix/xsNq+wT8b++uhp/GTUMx44n+8Bn1mkATbwxe6bQ==}
'@mixmark-io/domino@2.2.0':
resolution: {integrity: sha512-Y28PR25bHXUg88kCV7nivXrP2Nj2RueZ3/l/jdx6J9f8J4nsEGcgX0Qe6lt7Pa+J79+kPiJU3LguR6O/6zrLOw==}
@@ -2497,8 +2497,11 @@ packages:
'@types/node@16.9.1':
resolution: {integrity: sha512-QpLcX9ZSsq3YYUUnD3nFDY8H7wctAhQj/TFKL8Ya8v5fMm3CFXxo8zStsLAl780ltoYoo1WvKUVGBQK+1ifr7g==}
- '@types/node@22.15.17':
- resolution: {integrity: sha512-wIX2aSZL5FE+MR0JlvF87BNVrtFWf6AE6rxSE9X7OwnVvoyCQjpzSRJ+M87se/4QCkCiebQAqrJ0y6fwIyi7nw==}
+ '@types/node@22.19.7':
+ resolution: {integrity: sha512-MciR4AKGHWl7xwxkBa6xUGxQJ4VBOmPTF7sL+iGzuahOFaO0jHCsuEfS80pan1ef4gWId1oWOweIhrDEYLuaOw==}
+
+ '@types/node@25.0.9':
+ resolution: {integrity: sha512-/rpCXHlCWeqClNBwUhDcusJxXYDjZTyE8v5oTO7WbL8eij2nKhUeU89/6xgjU7N4/Vh3He0BtyhJdQbDyhiXAw==}
'@types/ping@0.4.4':
resolution: {integrity: sha512-ifvo6w2f5eJYlXm+HiVx67iJe8WZp87sfa683nlqED5Vnt9Z93onkokNoWqOG21EaE8fMxyKPobE+mkPEyxsdw==}
@@ -4025,12 +4028,12 @@ packages:
resolution: {integrity: sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==}
engines: {node: '>=12'}
- openai@5.12.2:
- resolution: {integrity: sha512-xqzHHQch5Tws5PcKR2xsZGX9xtch+JQFz5zb14dGqlshmmDAFBFEWmeIpf7wVqWV+w7Emj7jRgkNJakyKE0tYQ==}
+ openai@6.16.0:
+ resolution: {integrity: sha512-fZ1uBqjFUjXzbGc35fFtYKEOxd20kd9fDpFeqWtsOZWiubY8CZ1NAlXHW3iathaFvqmNtCWMIsosCuyeI7Joxg==}
hasBin: true
peerDependencies:
ws: ^8.18.0
- zod: ^3.23.8
+ zod: ^3.25 || ^4.0
peerDependenciesMeta:
ws:
optional: true
@@ -4721,6 +4724,9 @@ packages:
undici-types@6.21.0:
resolution: {integrity: sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==}
+ undici-types@7.16.0:
+ resolution: {integrity: sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==}
+
unified@11.0.5:
resolution: {integrity: sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==}
@@ -6917,7 +6923,7 @@ snapshots:
'@push.rocks/smartshell': 3.3.0
tsx: 4.21.0
- '@git.zone/tstest@3.1.4(@aws-sdk/credential-providers@3.808.0)(socks@2.8.4)(typescript@5.9.3)':
+ '@git.zone/tstest@3.1.6(@aws-sdk/credential-providers@3.808.0)(socks@2.8.4)(typescript@5.9.3)':
dependencies:
'@api.global/typedserver': 3.0.80
'@git.zone/tsbundle': 2.8.1
@@ -7071,7 +7077,7 @@ snapshots:
'@inquirer/figures': 1.0.15
'@inquirer/type': 2.0.0
'@types/mute-stream': 0.0.4
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
'@types/wrap-ansi': 3.0.0
ansi-escapes: 4.3.2
cli-width: 4.1.0
@@ -7361,7 +7367,7 @@ snapshots:
dependencies:
'@lit-labs/ssr-dom-shim': 1.4.0
- '@mistralai/mistralai@1.11.0':
+ '@mistralai/mistralai@1.12.0':
dependencies:
zod: 3.25.76
zod-to-json-schema: 3.25.1(zod@3.25.76)
@@ -9651,27 +9657,27 @@ snapshots:
'@types/bn.js@5.2.0':
dependencies:
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
'@types/body-parser@1.19.6':
dependencies:
'@types/connect': 3.4.38
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
'@types/buffer-json@2.0.3': {}
'@types/clean-css@4.2.11':
dependencies:
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
source-map: 0.6.1
'@types/connect@3.4.38':
dependencies:
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
'@types/cors@2.8.19':
dependencies:
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
'@types/debug@4.1.12':
dependencies:
@@ -9681,7 +9687,7 @@ snapshots:
'@types/dns-packet@5.6.5':
dependencies:
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
'@types/elliptic@6.4.18':
dependencies:
@@ -9689,7 +9695,7 @@ snapshots:
'@types/express-serve-static-core@5.0.7':
dependencies:
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
'@types/qs': 6.14.0
'@types/range-parser': 1.2.7
'@types/send': 0.17.5
@@ -9713,16 +9719,16 @@ snapshots:
'@types/fs-extra@11.0.4':
dependencies:
'@types/jsonfile': 6.1.4
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
'@types/fs-extra@9.0.13':
dependencies:
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
'@types/glob@7.2.0':
dependencies:
'@types/minimatch': 5.1.2
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
'@types/hast@3.0.4':
dependencies:
@@ -9744,7 +9750,7 @@ snapshots:
'@types/jsonfile@6.1.4':
dependencies:
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
'@types/mdast@4.0.4':
dependencies:
@@ -9762,18 +9768,22 @@ snapshots:
'@types/mute-stream@0.0.4':
dependencies:
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
'@types/node-forge@1.3.11':
dependencies:
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
'@types/node@16.9.1': {}
- '@types/node@22.15.17':
+ '@types/node@22.19.7':
dependencies:
undici-types: 6.21.0
+ '@types/node@25.0.9':
+ dependencies:
+ undici-types: 7.16.0
+
'@types/ping@0.4.4': {}
'@types/qs@6.14.0': {}
@@ -9789,28 +9799,28 @@ snapshots:
'@types/send@0.17.5':
dependencies:
'@types/mime': 1.3.5
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
'@types/serve-static@1.15.8':
dependencies:
'@types/http-errors': 2.0.5
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
'@types/send': 0.17.5
'@types/serve-static@2.2.0':
dependencies:
'@types/http-errors': 2.0.5
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
'@types/symbol-tree@3.2.5': {}
'@types/tar-stream@3.1.4':
dependencies:
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
'@types/through2@2.0.41':
dependencies:
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
'@types/trusted-types@2.0.7': {}
@@ -9832,7 +9842,7 @@ snapshots:
'@types/whatwg-url@8.2.2':
dependencies:
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
'@types/webidl-conversions': 7.0.3
'@types/which@3.0.4': {}
@@ -9841,11 +9851,11 @@ snapshots:
'@types/ws@8.18.1':
dependencies:
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
'@types/yauzl@2.10.3':
dependencies:
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
optional: true
'@ungap/structured-clone@1.3.0': {}
@@ -10305,7 +10315,7 @@ snapshots:
engine.io@6.6.4:
dependencies:
'@types/cors': 2.8.19
- '@types/node': 22.15.17
+ '@types/node': 22.19.7
accepts: 1.3.8
base64id: 2.0.0
cookie: 0.7.2
@@ -11572,7 +11582,7 @@ snapshots:
is-docker: 2.2.1
is-wsl: 2.2.0
- openai@5.12.2(ws@8.18.3)(zod@3.25.76):
+ openai@6.16.0(ws@8.18.3)(zod@3.25.76):
optionalDependencies:
ws: 8.18.3
zod: 3.25.76
@@ -12367,6 +12377,8 @@ snapshots:
undici-types@6.21.0: {}
+ undici-types@7.16.0: {}
+
unified@11.0.5:
dependencies:
'@types/unist': 3.0.3
diff --git a/readme.hints.md b/readme.hints.md
index 2392bd5..9b1c165 100644
--- a/readme.hints.md
+++ b/readme.hints.md
@@ -3,9 +3,10 @@
## Dependencies
- Uses `@git.zone/tstest` v3.x for testing (import from `@git.zone/tstest/tapbundle`)
-- `@push.rocks/smartfile` is kept at v11 to avoid migration to factory pattern
+- `@push.rocks/smartfs` v1.x for file system operations (replaced smartfile)
- `@anthropic-ai/sdk` v0.71.x with extended thinking support
- `@mistralai/mistralai` v1.x for Mistral OCR and chat capabilities
+- `openai` v6.x for OpenAI API integration
- `@push.rocks/smartrequest` v5.x - uses `response.stream()` + `Readable.fromWeb()` for streaming
## Important Notes
diff --git a/readme.md b/readme.md
index 88c9ced..72cbf6e 100644
--- a/readme.md
+++ b/readme.md
@@ -6,7 +6,7 @@
[](https://www.typescriptlang.org/)
[](https://opensource.org/licenses/MIT)
-SmartAI unifies the world's leading AI providers - OpenAI, Anthropic, Perplexity, Ollama, Groq, XAI, Exo, and ElevenLabs - under a single, elegant TypeScript interface. Build AI applications at lightning speed without vendor lock-in.
+SmartAI unifies the world's leading AI providers - OpenAI, Anthropic, Mistral, Perplexity, Ollama, Groq, XAI, Exo, and ElevenLabs - under a single, elegant TypeScript interface. Build AI applications at lightning speed without vendor lock-in.
## Issue Reporting and Security
@@ -58,6 +58,7 @@ Choose the right provider for your use case:
| -------------- | :--: | :-------: | :-: | :----: | :-------: | :------: | :----: | --------------------------------------------------------------- |
| **OpenAI** | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | ✅ | • gpt-image-1
• DALL-E 3
• Deep research API |
| **Anthropic** | ✅ | ✅ | ❌ | ✅ | ✅ | ✅ | ❌ | • Claude Sonnet 4.5
• Superior reasoning
• Web search API |
+| **Mistral** | ✅ | ✅ | ❌ | ✅ | ✅ | ❌ | ❌ | • Native PDF OCR
• mistral-large
• Fast inference |
| **ElevenLabs** | ❌ | ❌ | ✅ | ❌ | ❌ | ❌ | ❌ | • Premium TTS
• 70+ languages
• Natural voices |
| **Ollama** | ✅ | ✅ | ❌ | ✅ | ✅ | ❌ | ❌ | • 100% local
• Privacy-first
• No API costs |
| **XAI** | ✅ | ✅ | ❌ | ❌ | ✅ | ❌ | ❌ | • Grok models
• Real-time data
• Uncensored |
@@ -282,6 +283,38 @@ const response = await anthropic.chat({
- Use `'quick'` for simple factual queries where deep reasoning isn't needed
- Thinking budget counts against total token usage
+### 📑 Native PDF OCR (Mistral)
+
+Mistral provides native PDF document processing via their OCR API - no image conversion required:
+
+```typescript
+import { MistralProvider } from '@push.rocks/smartai';
+
+const mistral = new MistralProvider({
+ mistralToken: 'your-api-key',
+ chatModel: 'mistral-large-latest', // Default
+ ocrModel: 'mistral-ocr-latest', // Default
+ tableFormat: 'markdown', // 'markdown' | 'html'
+});
+
+await mistral.start();
+
+// Direct PDF processing - no image conversion overhead
+const result = await mistral.document({
+ systemMessage: 'You are a document analyst.',
+ userMessage: 'Extract all invoice details and calculate the total.',
+ pdfDocuments: [invoicePdfBuffer],
+ messageHistory: [],
+});
+```
+
+**Key Advantage**: Unlike other providers that convert PDFs to images first, Mistral's OCR API processes PDFs natively, potentially offering faster and more accurate text extraction for document-heavy workloads.
+
+**Supported Formats:**
+- Native PDF processing via Files API
+- Image OCR (JPEG, PNG, GIF, WebP) for vision tasks
+- Table extraction with markdown or HTML output
+
### 🎨 Image Generation & Editing
Generate and edit images with OpenAI's cutting-edge models:
@@ -645,6 +678,7 @@ export ELEVENLABS_API_KEY=sk-...
| --------------------- | -------------------- | --------------------------------------------------------- |
| **General Purpose** | OpenAI | Most features, stable, well-documented |
| **Complex Reasoning** | Anthropic | Superior logical thinking, safer outputs |
+| **Document OCR** | Mistral | Native PDF processing, no image conversion overhead |
| **Research & Facts** | Perplexity | Web-aware, provides citations |
| **Deep Research** | OpenAI | Deep Research API with comprehensive analysis |
| **Premium TTS** | ElevenLabs | Most natural voices, 70+ languages, superior quality (v3) |
diff --git a/test/test.audio.elevenlabs.ts b/test/test.audio.elevenlabs.ts
index 27404d7..1c69d09 100644
--- a/test/test.audio.elevenlabs.ts
+++ b/test/test.audio.elevenlabs.ts
@@ -1,8 +1,9 @@
import { expect, tap } from '@git.zone/tstest/tapbundle';
import * as qenv from '@push.rocks/qenv';
-import * as smartfile from '@push.rocks/smartfile';
+import { SmartFs, SmartFsProviderNode } from '@push.rocks/smartfs';
const testQenv = new qenv.Qenv('./', './.nogit/');
+const smartfs = new SmartFs(new SmartFsProviderNode());
import * as smartai from '../ts/index.js';
@@ -27,7 +28,7 @@ tap.test('ElevenLabs Audio: should create audio response', async () => {
chunks.push(chunk as Uint8Array);
}
const audioBuffer = Buffer.concat(chunks);
- await smartfile.fs.toFs(audioBuffer, './.nogit/testoutput_elevenlabs.mp3');
+ await smartfs.file('./.nogit/testoutput_elevenlabs.mp3').write(audioBuffer);
console.log(`Audio Buffer length: ${audioBuffer.length}`);
expect(audioBuffer.length).toBeGreaterThan(0);
});
@@ -42,7 +43,7 @@ tap.test('ElevenLabs Audio: should create audio with custom voice', async () =>
chunks.push(chunk as Uint8Array);
}
const audioBuffer = Buffer.concat(chunks);
- await smartfile.fs.toFs(audioBuffer, './.nogit/testoutput_elevenlabs_custom.mp3');
+ await smartfs.file('./.nogit/testoutput_elevenlabs_custom.mp3').write(audioBuffer);
console.log(`Audio Buffer length (custom voice): ${audioBuffer.length}`);
expect(audioBuffer.length).toBeGreaterThan(0);
});
diff --git a/test/test.audio.openai.ts b/test/test.audio.openai.ts
index af9acda..5cf8c5e 100644
--- a/test/test.audio.openai.ts
+++ b/test/test.audio.openai.ts
@@ -1,8 +1,9 @@
import { expect, tap } from '@git.zone/tstest/tapbundle';
import * as qenv from '@push.rocks/qenv';
-import * as smartfile from '@push.rocks/smartfile';
+import { SmartFs, SmartFsProviderNode } from '@push.rocks/smartfs';
const testQenv = new qenv.Qenv('./', './.nogit/');
+const smartfs = new SmartFs(new SmartFsProviderNode());
import * as smartai from '../ts/index.js';
@@ -26,7 +27,7 @@ tap.test('OpenAI Audio: should create audio response', async () => {
chunks.push(chunk as Uint8Array);
}
const audioBuffer = Buffer.concat(chunks);
- await smartfile.fs.toFs(audioBuffer, './.nogit/testoutput.mp3');
+ await smartfs.file('./.nogit/testoutput.mp3').write(audioBuffer);
console.log(`Audio Buffer length: ${audioBuffer.length}`);
// Assert that the resulting buffer is not empty.
expect(audioBuffer.length).toBeGreaterThan(0);
diff --git a/test/test.document.anthropic.ts b/test/test.document.anthropic.ts
index 60b530e..cb70566 100644
--- a/test/test.document.anthropic.ts
+++ b/test/test.document.anthropic.ts
@@ -1,9 +1,10 @@
import { expect, tap } from '@git.zone/tstest/tapbundle';
import * as qenv from '@push.rocks/qenv';
import * as smartrequest from '@push.rocks/smartrequest';
-import * as smartfile from '@push.rocks/smartfile';
+import { SmartFs, SmartFsProviderNode } from '@push.rocks/smartfs';
const testQenv = new qenv.Qenv('./', './.nogit/');
+const smartfs = new SmartFs(new SmartFsProviderNode());
import * as smartai from '../ts/index.js';
@@ -41,7 +42,7 @@ tap.test('Anthropic Document: should handle complex document analysis', async ()
let pdfBuffer: Uint8Array;
try {
- pdfBuffer = await smartfile.fs.toBuffer(pdfPath);
+ pdfBuffer = await smartfs.file(pdfPath).read();
} catch (error) {
// If the file doesn't exist, use the dummy PDF
console.log('Demo PDF not found, using dummy PDF instead');
diff --git a/test/test.document.mistral.ts b/test/test.document.mistral.ts
index 79e3e60..49bf908 100644
--- a/test/test.document.mistral.ts
+++ b/test/test.document.mistral.ts
@@ -1,9 +1,10 @@
import { expect, tap } from '@git.zone/tstest/tapbundle';
import * as qenv from '@push.rocks/qenv';
import * as smartrequest from '@push.rocks/smartrequest';
-import * as smartfile from '@push.rocks/smartfile';
+import { SmartFs, SmartFsProviderNode } from '@push.rocks/smartfs';
const testQenv = new qenv.Qenv('./', './.nogit/');
+const smartfs = new SmartFs(new SmartFsProviderNode());
import * as smartai from '../ts/index.js';
@@ -42,7 +43,7 @@ tap.test('Mistral Document: should handle complex document analysis', async () =
let pdfBuffer: Uint8Array;
try {
- pdfBuffer = await smartfile.fs.toBuffer(pdfPath);
+ pdfBuffer = await smartfs.file(pdfPath).read();
} catch (error) {
// If the file doesn't exist, use the dummy PDF
console.log('Demo PDF not found, using dummy PDF instead');
diff --git a/test/test.document.openai.ts b/test/test.document.openai.ts
index 4a86efd..9f60d24 100644
--- a/test/test.document.openai.ts
+++ b/test/test.document.openai.ts
@@ -1,9 +1,10 @@
import { expect, tap } from '@git.zone/tstest/tapbundle';
import * as qenv from '@push.rocks/qenv';
import * as smartrequest from '@push.rocks/smartrequest';
-import * as smartfile from '@push.rocks/smartfile';
+import { SmartFs, SmartFsProviderNode } from '@push.rocks/smartfs';
const testQenv = new qenv.Qenv('./', './.nogit/');
+const smartfs = new SmartFs(new SmartFsProviderNode());
import * as smartai from '../ts/index.js';
@@ -32,7 +33,7 @@ tap.test('OpenAI Document: should document a pdf', async () => {
});
tap.test('OpenAI Document: should recognize companies in a pdf', async () => {
- const pdfBuffer = await smartfile.fs.toBuffer('./.nogit/demo_without_textlayer.pdf');
+ const pdfBuffer = await smartfs.file('./.nogit/demo_without_textlayer.pdf').read();
const result = await testSmartai.openaiProvider.document({
systemMessage: `
summarize the document.
diff --git a/test/test.vision.anthropic.ts b/test/test.vision.anthropic.ts
index fdc1007..acf53b7 100644
--- a/test/test.vision.anthropic.ts
+++ b/test/test.vision.anthropic.ts
@@ -1,8 +1,9 @@
import { expect, tap } from '@git.zone/tstest/tapbundle';
import * as qenv from '@push.rocks/qenv';
-import * as smartfile from '@push.rocks/smartfile';
+import { SmartFs, SmartFsProviderNode } from '@push.rocks/smartfs';
const testQenv = new qenv.Qenv('./', './.nogit/');
+const smartfs = new SmartFs(new SmartFsProviderNode());
import * as smartai from '../ts/index.js';
@@ -21,7 +22,7 @@ tap.test('Anthropic Vision: should analyze coffee image with latte art', async (
const imagePath = './test/testimages/coffee-dani/coffee.jpg';
console.log(`Loading coffee image from: ${imagePath}`);
- const imageBuffer = await smartfile.fs.toBuffer(imagePath);
+ const imageBuffer = await smartfs.file(imagePath).read();
console.log(`Image loaded, size: ${imageBuffer.length} bytes`);
const result = await anthropicProvider.vision({
@@ -45,7 +46,7 @@ tap.test('Anthropic Vision: should analyze laptop/workspace image', async () =>
const imagePath = './test/testimages/laptop-nicolas/laptop.jpg';
console.log(`Loading laptop image from: ${imagePath}`);
- const imageBuffer = await smartfile.fs.toBuffer(imagePath);
+ const imageBuffer = await smartfs.file(imagePath).read();
console.log(`Image loaded, size: ${imageBuffer.length} bytes`);
const result = await anthropicProvider.vision({
@@ -69,7 +70,7 @@ tap.test('Anthropic Vision: should analyze receipt/document image', async () =>
const imagePath = './test/testimages/receipt-annie/receipt.jpg';
console.log(`Loading receipt image from: ${imagePath}`);
- const imageBuffer = await smartfile.fs.toBuffer(imagePath);
+ const imageBuffer = await smartfs.file(imagePath).read();
console.log(`Image loaded, size: ${imageBuffer.length} bytes`);
const result = await anthropicProvider.vision({
diff --git a/ts/plugins.ts b/ts/plugins.ts
index 16c9d9d..881963f 100644
--- a/ts/plugins.ts
+++ b/ts/plugins.ts
@@ -8,7 +8,7 @@ export {
// @push.rocks scope
import * as qenv from '@push.rocks/qenv';
import * as smartarray from '@push.rocks/smartarray';
-import * as smartfile from '@push.rocks/smartfile';
+import * as smartfs from '@push.rocks/smartfs';
import * as smartpath from '@push.rocks/smartpath';
import * as smartpdf from '@push.rocks/smartpdf';
import * as smartpromise from '@push.rocks/smartpromise';
@@ -18,7 +18,7 @@ import * as webstream from '@push.rocks/webstream';
export {
smartarray,
qenv,
- smartfile,
+ smartfs,
smartpath,
smartpdf,
smartpromise,
diff --git a/ts/provider.ollama.ts b/ts/provider.ollama.ts
index e270305..aa59b09 100644
--- a/ts/provider.ollama.ts
+++ b/ts/provider.ollama.ts
@@ -12,10 +12,60 @@ import type {
ImageResponse
} from './abstract.classes.multimodal.js';
+/**
+ * Ollama model runtime options
+ * @see https://github.com/ollama/ollama/blob/main/docs/modelfile.md
+ */
+export interface IOllamaModelOptions {
+ num_ctx?: number; // Context window (default: 2048)
+ temperature?: number; // 0 = deterministic (default: 0.8)
+ top_k?: number; // Top-k sampling (default: 40)
+ top_p?: number; // Nucleus sampling (default: 0.9)
+ repeat_penalty?: number;// Repeat penalty (default: 1.1)
+ num_predict?: number; // Max tokens to predict
+ stop?: string[]; // Stop sequences
+ seed?: number; // Random seed for reproducibility
+}
+
export interface IOllamaProviderOptions {
baseUrl?: string;
model?: string;
visionModel?: string; // Model to use for vision tasks (e.g. 'llava')
+ defaultOptions?: IOllamaModelOptions; // Default model options
+ defaultTimeout?: number; // Default timeout in ms (default: 120000)
+}
+
+/**
+ * Extended chat options with Ollama-specific settings
+ */
+export interface IOllamaChatOptions extends ChatOptions {
+ options?: IOllamaModelOptions; // Per-request model options
+ timeout?: number; // Per-request timeout in ms
+ model?: string; // Per-request model override
+}
+
+/**
+ * Chunk emitted during streaming
+ */
+export interface IOllamaStreamChunk {
+ content: string;
+ thinking?: string; // For models with extended thinking
+ done: boolean;
+ stats?: {
+ totalDuration?: number;
+ evalCount?: number;
+ };
+}
+
+/**
+ * Extended chat response with Ollama-specific fields
+ */
+export interface IOllamaChatResponse extends ChatResponse {
+ thinking?: string;
+ stats?: {
+ totalDuration?: number;
+ evalCount?: number;
+ };
}
export class OllamaProvider extends MultiModalModel {
@@ -23,6 +73,8 @@ export class OllamaProvider extends MultiModalModel {
private baseUrl: string;
private model: string;
private visionModel: string;
+ private defaultOptions: IOllamaModelOptions;
+ private defaultTimeout: number;
constructor(optionsArg: IOllamaProviderOptions = {}) {
super();
@@ -30,6 +82,8 @@ export class OllamaProvider extends MultiModalModel {
this.baseUrl = optionsArg.baseUrl || 'http://localhost:11434';
this.model = optionsArg.model || 'llama2';
this.visionModel = optionsArg.visionModel || 'llava';
+ this.defaultOptions = optionsArg.defaultOptions || {};
+ this.defaultTimeout = optionsArg.defaultTimeout || 120000;
}
async start() {
@@ -154,7 +208,7 @@ export class OllamaProvider extends MultiModalModel {
{ role: 'user', content: optionsArg.userMessage }
];
- // Make API call to Ollama
+ // Make API call to Ollama with defaultOptions and timeout
const response = await fetch(`${this.baseUrl}/api/chat`, {
method: 'POST',
headers: {
@@ -163,8 +217,10 @@ export class OllamaProvider extends MultiModalModel {
body: JSON.stringify({
model: this.model,
messages: messages,
- stream: false
+ stream: false,
+ options: this.defaultOptions,
}),
+ signal: AbortSignal.timeout(this.defaultTimeout),
});
if (!response.ok) {
@@ -172,13 +228,150 @@ export class OllamaProvider extends MultiModalModel {
}
const result = await response.json();
-
+
return {
role: 'assistant' as const,
message: result.message.content,
};
}
+ /**
+ * Streaming chat with async iteration and options support
+ */
+ public async chatStreamResponse(
+ optionsArg: IOllamaChatOptions
+ ): Promise> {
+ const model = optionsArg.model || this.model;
+ const timeout = optionsArg.timeout || this.defaultTimeout;
+ const modelOptions = { ...this.defaultOptions, ...optionsArg.options };
+
+ const messages = [
+ { role: 'system', content: optionsArg.systemMessage },
+ ...optionsArg.messageHistory,
+ { role: 'user', content: optionsArg.userMessage }
+ ];
+
+ const response = await fetch(`${this.baseUrl}/api/chat`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ model,
+ messages,
+ stream: true,
+ options: modelOptions,
+ }),
+ signal: AbortSignal.timeout(timeout),
+ });
+
+ if (!response.ok) {
+ throw new Error(`Ollama API error: ${response.status}`);
+ }
+
+ const reader = response.body!.getReader();
+ const decoder = new TextDecoder();
+
+ return {
+ [Symbol.asyncIterator]: async function* () {
+ let buffer = '';
+ try {
+ while (true) {
+ const { done, value } = await reader.read();
+ if (done) break;
+ buffer += decoder.decode(value, { stream: true });
+ const lines = buffer.split('\n');
+ buffer = lines.pop() || '';
+ for (const line of lines) {
+ if (!line.trim()) continue;
+ try {
+ const json = JSON.parse(line);
+ yield {
+ content: json.message?.content || '',
+ thinking: json.message?.thinking,
+ done: json.done || false,
+ stats: json.done ? {
+ totalDuration: json.total_duration,
+ evalCount: json.eval_count,
+ } : undefined,
+ } as IOllamaStreamChunk;
+ } catch { /* skip malformed */ }
+ }
+ }
+ } finally {
+ reader.releaseLock();
+ }
+ }
+ };
+ }
+
+ /**
+ * Stream and collect full response with optional progress callback
+ */
+ public async collectStreamResponse(
+ optionsArg: IOllamaChatOptions,
+ onChunk?: (chunk: IOllamaStreamChunk) => void
+ ): Promise {
+ const stream = await this.chatStreamResponse(optionsArg);
+ let content = '';
+ let thinking = '';
+ let stats: IOllamaChatResponse['stats'];
+
+ for await (const chunk of stream) {
+ if (chunk.content) content += chunk.content;
+ if (chunk.thinking) thinking += chunk.thinking;
+ if (chunk.stats) stats = chunk.stats;
+ if (onChunk) onChunk(chunk);
+ }
+
+ return {
+ role: 'assistant' as const,
+ message: content,
+ thinking: thinking || undefined,
+ stats,
+ };
+ }
+
+ /**
+ * Non-streaming chat with full options support
+ */
+ public async chatWithOptions(optionsArg: IOllamaChatOptions): Promise {
+ const model = optionsArg.model || this.model;
+ const timeout = optionsArg.timeout || this.defaultTimeout;
+ const modelOptions = { ...this.defaultOptions, ...optionsArg.options };
+
+ const messages = [
+ { role: 'system', content: optionsArg.systemMessage },
+ ...optionsArg.messageHistory,
+ { role: 'user', content: optionsArg.userMessage }
+ ];
+
+ const response = await fetch(`${this.baseUrl}/api/chat`, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ model,
+ messages,
+ stream: false,
+ options: modelOptions,
+ }),
+ signal: AbortSignal.timeout(timeout),
+ });
+
+ if (!response.ok) {
+ throw new Error(`Ollama API error: ${response.statusText}`);
+ }
+
+ const result = await response.json();
+ return {
+ role: 'assistant' as const,
+ message: result.message.content,
+ thinking: result.message.thinking,
+ stats: {
+ totalDuration: result.total_duration,
+ evalCount: result.eval_count,
+ },
+ };
+ }
+
public async audio(optionsArg: { message: string }): Promise {
throw new Error('Audio generation is not supported by Ollama.');
}