feat(rust-provider): Add Rust-backed provider with XFS-safe durability via IPC bridge, TypeScript provider, tests and docs
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -20,4 +20,7 @@ dist_*/
|
||||
.claude/
|
||||
.serena/
|
||||
|
||||
# rust
|
||||
rust/target/
|
||||
|
||||
#------# custom
|
||||
10
changelog.md
10
changelog.md
@@ -1,5 +1,15 @@
|
||||
# Changelog
|
||||
|
||||
## 2026-03-05 - 1.4.0 - feat(rust-provider)
|
||||
Add Rust-backed provider with XFS-safe durability via IPC bridge, TypeScript provider, tests and docs
|
||||
|
||||
- Add Rust workspace and crates (smartfs-protocol, smartfs-core, smartfs-bin) with Cargo.toml and Cargo.lock
|
||||
- Implement filesystem operations in Rust with XFS-safe parent fsyncs, streaming, watch support and IPC protocol types (smartfs-protocol)
|
||||
- Add Rust binary (smartfs-bin) implementing management/IPC mode and core ops, plus watch manager and write-stream handling
|
||||
- Add TypeScript bridge/provider (ts/providers/smartfs.provider.rust.ts), export provider from ts/index.ts, and include @push.rocks/smartrust in plugins
|
||||
- Add integration tests for the Rust provider (test/test.rust.provider.node+bun.ts)
|
||||
- Update packaging and tooling: package.json scripts and devDependencies (tsrust added/updated), npmextra.json target entry, .gitignore rust/target, and README updates
|
||||
|
||||
## 2026-03-05 - 1.3.3 - fix(smartfs.provider.node)
|
||||
replace synchronous readdirSync with async await fs.readdir for directory listings in the Node provider to avoid blocking the event loop
|
||||
|
||||
|
||||
@@ -18,6 +18,9 @@
|
||||
"accessLevel": "public"
|
||||
}
|
||||
},
|
||||
"@git.zone/tsrust": {
|
||||
"targets": ["linux_amd64"]
|
||||
},
|
||||
"@ship.zone/szci": {
|
||||
"npmGlobalTools": []
|
||||
}
|
||||
|
||||
16
package.json
16
package.json
@@ -10,16 +10,17 @@
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"test": "(tstest test/ --verbose --logfile --timeout 120)",
|
||||
"build": "(tsbuild --web --allowimplicitany)",
|
||||
"build": "(tsbuild --web --allowimplicitany && tsrust)",
|
||||
"buildDocs": "(tsdoc)"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@git.zone/tsbuild": "^3.1.0",
|
||||
"@git.zone/tsbundle": "^2.0.5",
|
||||
"@git.zone/tsrun": "^2.0.0",
|
||||
"@git.zone/tstest": "^3.1.1",
|
||||
"@git.zone/tsbuild": "^4.2.6",
|
||||
"@git.zone/tsbundle": "^2.9.1",
|
||||
"@git.zone/tsrun": "^2.0.1",
|
||||
"@git.zone/tsrust": "^1.3.0",
|
||||
"@git.zone/tstest": "^3.2.0",
|
||||
"@push.rocks/tapbundle": "^6.0.3",
|
||||
"@types/node": "^20.8.7"
|
||||
"@types/node": "^25.3.3"
|
||||
},
|
||||
"packageManager": "pnpm@10.18.1+sha512.77a884a165cbba2d8d1c19e3b4880eee6d2fcabd0d879121e282196b80042351d5eb3ca0935fa599da1dc51265cc68816ad2bddd2a2de5ea9fdf92adbec7cd34",
|
||||
"repository": {
|
||||
@@ -46,6 +47,7 @@
|
||||
"overrides": {}
|
||||
},
|
||||
"dependencies": {
|
||||
"@push.rocks/smartpath": "^6.0.0"
|
||||
"@push.rocks/smartpath": "^6.0.0",
|
||||
"@push.rocks/smartrust": "^1.3.1"
|
||||
}
|
||||
}
|
||||
|
||||
3468
pnpm-lock.yaml
generated
3468
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
349
readme.md
349
readme.md
@@ -1,6 +1,6 @@
|
||||
# @push.rocks/smartfs
|
||||
|
||||
Modern, pluggable filesystem module with fluent API, Web Streams support, and multiple storage backends.
|
||||
Modern, pluggable filesystem module with fluent API, Web Streams, Rust-powered durability, and multiple storage backends.
|
||||
|
||||
## Issue Reporting and Security
|
||||
|
||||
@@ -8,15 +8,15 @@ For reporting bugs, issues, or security vulnerabilities, please visit [community
|
||||
|
||||
## Features
|
||||
|
||||
- 🎯 **Fluent API** - Action-last chainable interface for elegant code
|
||||
- 🔌 **Pluggable Providers** - Support for multiple storage backends (Node.js fs, memory, S3, etc.)
|
||||
- 🌊 **Web Streams** - Modern streaming with Web Streams API
|
||||
- 💾 **Transactions** - Atomic multi-file operations with automatic rollback
|
||||
- 👀 **File Watching** - Event-based file system monitoring
|
||||
- 🔐 **Tree Hashing** - SHA-256 directory hashing for cache-busting
|
||||
- ⚡ **Async-Only** - Modern async/await patterns throughout
|
||||
- 📦 **Zero Dependencies** - Core functionality with minimal footprint
|
||||
- 🎨 **TypeScript** - Full type safety and IntelliSense support
|
||||
- 🎯 **Fluent API** — Action-last chainable interface for elegant, readable code
|
||||
- 🔌 **Pluggable Providers** — Swap backends (Node.js fs, in-memory, Rust) without changing a line of application code
|
||||
- 🦀 **Rust Provider** — XFS-safe `fsync` durability, cross-compiled binary via IPC for production-grade reliability
|
||||
- 🌊 **Web Streams** — True chunked streaming with the Web Streams API (including over IPC for the Rust provider)
|
||||
- 💾 **Transactions** — Atomic multi-file operations with automatic rollback on failure
|
||||
- 👀 **File Watching** — Event-based filesystem monitoring with debounce, filters, and recursive watching
|
||||
- 🔐 **Tree Hashing** — Deterministic SHA-256 directory hashing for cache-busting and change detection
|
||||
- ⚡ **Async-Only** — Modern `async`/`await` patterns throughout — no sync footguns
|
||||
- 🎨 **TypeScript-First** — Full type safety, IntelliSense, and exported interfaces
|
||||
|
||||
## Installation
|
||||
|
||||
@@ -31,14 +31,15 @@ pnpm add @push.rocks/smartfs
|
||||
```typescript
|
||||
import { SmartFs, SmartFsProviderNode } from '@push.rocks/smartfs';
|
||||
|
||||
// Create a SmartFS instance with Node.js provider
|
||||
// Create a SmartFS instance with the Node.js provider
|
||||
const fs = new SmartFs(new SmartFsProviderNode());
|
||||
|
||||
// Write and read files with fluent API
|
||||
// Write a file
|
||||
await fs.file('/path/to/file.txt')
|
||||
.encoding('utf8')
|
||||
.write('Hello, World!');
|
||||
|
||||
// Read it back
|
||||
const content = await fs.file('/path/to/file.txt')
|
||||
.encoding('utf8')
|
||||
.read();
|
||||
@@ -48,88 +49,78 @@ console.log(content); // "Hello, World!"
|
||||
|
||||
## API Overview
|
||||
|
||||
### File Operations
|
||||
### 📄 File Operations
|
||||
|
||||
The fluent API uses **action-last pattern** - configure first, then execute:
|
||||
The fluent API uses an **action-last pattern** — configure first, then execute:
|
||||
|
||||
```typescript
|
||||
// Read file
|
||||
// Read
|
||||
const content = await fs.file('/path/to/file.txt')
|
||||
.encoding('utf8')
|
||||
.read();
|
||||
|
||||
// Write file
|
||||
// Write
|
||||
await fs.file('/path/to/file.txt')
|
||||
.encoding('utf8')
|
||||
.mode(0o644)
|
||||
.write('content');
|
||||
|
||||
// Atomic write (write to temp, then rename)
|
||||
// Atomic write (write to temp file, then rename — crash-safe)
|
||||
await fs.file('/path/to/file.txt')
|
||||
.atomic()
|
||||
.write('content');
|
||||
|
||||
// Append to file
|
||||
// Append
|
||||
await fs.file('/path/to/file.txt')
|
||||
.encoding('utf8')
|
||||
.append('more content');
|
||||
|
||||
// Copy file
|
||||
// Copy with preserved timestamps
|
||||
await fs.file('/source.txt')
|
||||
.preserveTimestamps()
|
||||
.copy('/destination.txt');
|
||||
|
||||
// Move file
|
||||
await fs.file('/old.txt')
|
||||
.move('/new.txt');
|
||||
// Move / rename
|
||||
await fs.file('/old.txt').move('/new.txt');
|
||||
|
||||
// Delete file
|
||||
await fs.file('/path/to/file.txt')
|
||||
.delete();
|
||||
// Delete
|
||||
await fs.file('/path/to/file.txt').delete();
|
||||
|
||||
// Check existence
|
||||
// Existence check
|
||||
const exists = await fs.file('/path/to/file.txt').exists();
|
||||
|
||||
// Get stats
|
||||
// Stats (size, timestamps, permissions, etc.)
|
||||
const stats = await fs.file('/path/to/file.txt').stat();
|
||||
```
|
||||
|
||||
### Directory Operations
|
||||
### 📂 Directory Operations
|
||||
|
||||
```typescript
|
||||
// Create directory
|
||||
await fs.directory('/path/to/dir').create();
|
||||
// Create directory (recursive by default)
|
||||
await fs.directory('/path/to/nested/dir').create();
|
||||
|
||||
// Create nested directories
|
||||
await fs.directory('/path/to/nested/dir')
|
||||
.recursive()
|
||||
.create();
|
||||
|
||||
// List directory
|
||||
// List contents
|
||||
const entries = await fs.directory('/path/to/dir').list();
|
||||
|
||||
// List recursively with filter
|
||||
const tsFiles = await fs.directory('/path/to/dir')
|
||||
// List recursively with glob filter and stats
|
||||
const tsFiles = await fs.directory('/src')
|
||||
.recursive()
|
||||
.filter('*.ts')
|
||||
.includeStats()
|
||||
.list();
|
||||
|
||||
// Filter with RegExp
|
||||
const files = await fs.directory('/path/to/dir')
|
||||
.filter(/\.txt$/)
|
||||
const configs = await fs.directory('/project')
|
||||
.filter(/\.config\.(ts|js)$/)
|
||||
.list();
|
||||
|
||||
// Filter with function
|
||||
const largeFiles = await fs.directory('/path/to/dir')
|
||||
const largeFiles = await fs.directory('/data')
|
||||
.includeStats()
|
||||
.filter(entry => entry.stats && entry.stats.size > 1024)
|
||||
.list();
|
||||
|
||||
// Delete directory
|
||||
await fs.directory('/path/to/dir')
|
||||
.recursive()
|
||||
.delete();
|
||||
// Delete directory recursively
|
||||
await fs.directory('/path/to/dir').recursive().delete();
|
||||
|
||||
// Check existence
|
||||
const exists = await fs.directory('/path/to/dir').exists();
|
||||
@@ -140,10 +131,10 @@ const exists = await fs.directory('/path/to/dir').exists();
|
||||
Copy or move entire directory trees with fine-grained control:
|
||||
|
||||
```typescript
|
||||
// Basic copy - copies all files recursively
|
||||
// Basic copy
|
||||
await fs.directory('/source').copy('/destination');
|
||||
|
||||
// Basic move - moves directory to new location
|
||||
// Basic move
|
||||
await fs.directory('/old-location').move('/new-location');
|
||||
|
||||
// Copy with options
|
||||
@@ -153,10 +144,10 @@ await fs.directory('/source')
|
||||
.preserveTimestamps(true) // Keep original timestamps
|
||||
.copy('/destination');
|
||||
|
||||
// Copy all files (ignore filter setting)
|
||||
// Ignore filter for copy (copy everything regardless of list filter)
|
||||
await fs.directory('/source')
|
||||
.filter('*.ts')
|
||||
.applyFilter(false) // Ignore filter, copy everything
|
||||
.applyFilter(false)
|
||||
.copy('/destination');
|
||||
|
||||
// Handle target directory conflicts
|
||||
@@ -174,6 +165,7 @@ await fs.directory('/source')
|
||||
```
|
||||
|
||||
**Configuration Options:**
|
||||
|
||||
| Method | Default | Description |
|
||||
|--------|---------|-------------|
|
||||
| `filter(pattern)` | none | Filter files by glob, regex, or function |
|
||||
@@ -182,49 +174,14 @@ await fs.directory('/source')
|
||||
| `preserveTimestamps(bool)` | `false` | Preserve original file timestamps |
|
||||
| `onConflict(mode)` | `'merge'` | `'merge'`, `'error'`, or `'replace'` |
|
||||
|
||||
### 🔐 Tree Hashing (Cache-Busting)
|
||||
### 🌊 Streaming Operations
|
||||
|
||||
Compute a deterministic hash of all files in a directory - perfect for cache invalidation:
|
||||
|
||||
```typescript
|
||||
// Hash all files in a directory recursively
|
||||
const hash = await fs.directory('/assets')
|
||||
.recursive()
|
||||
.treeHash();
|
||||
// Returns: "a3f2b8c9d4e5f6a7b8c9d0e1f2a3b4c5d6e7f8a9b0c1d2e3f4a5b6c7d8e9f0a1"
|
||||
|
||||
// Hash only specific file types
|
||||
const cssHash = await fs.directory('/styles')
|
||||
.filter(/\.css$/)
|
||||
.recursive()
|
||||
.treeHash();
|
||||
|
||||
// Use different algorithm
|
||||
const sha512Hash = await fs.directory('/data')
|
||||
.recursive()
|
||||
.treeHash({ algorithm: 'sha512' });
|
||||
```
|
||||
|
||||
**How it works:**
|
||||
- Files are sorted by path for deterministic ordering
|
||||
- Hashes relative paths + file contents (streaming, memory-efficient)
|
||||
- Does NOT include metadata (mtime/size) - pure content-based
|
||||
- Same content always produces same hash, regardless of timestamps
|
||||
|
||||
**Use cases:**
|
||||
- 🚀 Cache-busting static assets
|
||||
- 📦 Detecting when served files change
|
||||
- 🔄 Incremental build triggers
|
||||
- ✅ Content verification
|
||||
|
||||
### Streaming Operations
|
||||
|
||||
SmartFS uses **Web Streams API** for efficient handling of large files:
|
||||
SmartFS uses the **Web Streams API** for efficient, memory-friendly handling of large files. All providers — including the Rust provider over IPC — support true chunked streaming:
|
||||
|
||||
```typescript
|
||||
// Read stream
|
||||
const readStream = await fs.file('/large-file.bin')
|
||||
.chunkSize(64 * 1024)
|
||||
.chunkSize(64 * 1024) // 64 KB chunks
|
||||
.readStream();
|
||||
|
||||
const reader = readStream.getReader();
|
||||
@@ -232,7 +189,6 @@ while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
// Process chunk (Uint8Array)
|
||||
console.log('Chunk size:', value.length);
|
||||
}
|
||||
|
||||
// Write stream
|
||||
@@ -243,18 +199,18 @@ await writer.write(new Uint8Array([1, 2, 3]));
|
||||
await writer.write(new Uint8Array([4, 5, 6]));
|
||||
await writer.close();
|
||||
|
||||
// Pipe streams
|
||||
// Pipe one stream to another
|
||||
const input = await fs.file('/input.txt').readStream();
|
||||
const output = await fs.file('/output.txt').writeStream();
|
||||
await input.pipeTo(output);
|
||||
```
|
||||
|
||||
### Transactions
|
||||
### 💾 Transactions
|
||||
|
||||
Execute multiple file operations atomically with automatic rollback on failure:
|
||||
|
||||
```typescript
|
||||
// Simple transaction
|
||||
// Simple transaction — all-or-nothing
|
||||
await fs.transaction()
|
||||
.file('/file1.txt').write('content 1')
|
||||
.file('/file2.txt').write('content 2')
|
||||
@@ -276,48 +232,79 @@ try {
|
||||
}
|
||||
```
|
||||
|
||||
### File Watching
|
||||
### 👀 File Watching
|
||||
|
||||
Monitor filesystem changes with event-based watching:
|
||||
|
||||
```typescript
|
||||
// Watch a single file
|
||||
const watcher = await fs.watch('/path/to/file.txt')
|
||||
.onChange(event => {
|
||||
console.log('File changed:', event.path);
|
||||
})
|
||||
.onChange(event => console.log('Changed:', event.path))
|
||||
.start();
|
||||
|
||||
// Watch directory recursively
|
||||
const dirWatcher = await fs.watch('/path/to/dir')
|
||||
// Watch a directory recursively with filters and debounce
|
||||
const dirWatcher = await fs.watch('/src')
|
||||
.recursive()
|
||||
.filter('*.ts')
|
||||
.debounce(100)
|
||||
.filter(/\.ts$/)
|
||||
.debounce(100) // ms
|
||||
.onChange(event => console.log('Changed:', event.path))
|
||||
.onAdd(event => console.log('Added:', event.path))
|
||||
.onDelete(event => console.log('Deleted:', event.path))
|
||||
.start();
|
||||
|
||||
// Stop watching
|
||||
await dirWatcher.stop();
|
||||
|
||||
// Watch with custom filter
|
||||
const customWatcher = await fs.watch('/path/to/dir')
|
||||
// Watch with a function filter
|
||||
const customWatcher = await fs.watch('/src')
|
||||
.recursive()
|
||||
.filter(path => path.endsWith('.ts') && !path.includes('test'))
|
||||
.onAll(event => {
|
||||
console.log(`${event.type}: ${event.path}`);
|
||||
})
|
||||
.onAll(event => console.log(`${event.type}: ${event.path}`))
|
||||
.start();
|
||||
|
||||
// Stop watching
|
||||
await dirWatcher.stop();
|
||||
```
|
||||
|
||||
### 🔐 Tree Hashing (Cache-Busting)
|
||||
|
||||
Compute a deterministic hash of all files in a directory — ideal for cache invalidation, change detection, and build triggers:
|
||||
|
||||
```typescript
|
||||
// Hash all files in a directory recursively
|
||||
const hash = await fs.directory('/assets')
|
||||
.recursive()
|
||||
.treeHash();
|
||||
// → "a3f2b8c9d4e5f6a7b8c9d0e1f2a3b4c5d6e7f8a9b0c1d2e3f4a5b6c7d8e9f0a1"
|
||||
|
||||
// Hash only specific file types
|
||||
const cssHash = await fs.directory('/styles')
|
||||
.filter(/\.css$/)
|
||||
.recursive()
|
||||
.treeHash();
|
||||
|
||||
// Use a different algorithm
|
||||
const sha512Hash = await fs.directory('/data')
|
||||
.recursive()
|
||||
.treeHash({ algorithm: 'sha512' });
|
||||
```
|
||||
|
||||
**How it works:**
|
||||
- Files are sorted by path for deterministic ordering
|
||||
- Hashes relative path + file contents (streaming, memory-efficient)
|
||||
- Does **not** include metadata (mtime/size) — pure content-based
|
||||
- Same content always produces the same hash, regardless of timestamps
|
||||
|
||||
**Use cases:**
|
||||
- 🚀 Cache-busting static assets
|
||||
- 📦 Detecting when served files have changed
|
||||
- 🔄 Incremental build triggers
|
||||
- ✅ Content integrity verification
|
||||
|
||||
## Providers
|
||||
|
||||
SmartFS supports multiple storage backends through providers:
|
||||
SmartFS supports multiple storage backends through its provider architecture. Swap providers without changing any application code.
|
||||
|
||||
### Node.js Provider
|
||||
### 🟢 Node.js Provider
|
||||
|
||||
Uses Node.js `fs/promises` API for local filesystem operations:
|
||||
Uses Node.js `fs/promises` for local filesystem operations. The default choice for most applications:
|
||||
|
||||
```typescript
|
||||
import { SmartFs, SmartFsProviderNode } from '@push.rocks/smartfs';
|
||||
@@ -325,57 +312,92 @@ import { SmartFs, SmartFsProviderNode } from '@push.rocks/smartfs';
|
||||
const fs = new SmartFs(new SmartFsProviderNode());
|
||||
```
|
||||
|
||||
**Capabilities:**
|
||||
- ✅ File watching
|
||||
- ✅ Atomic writes
|
||||
- ✅ Transactions
|
||||
- ✅ Streaming
|
||||
- ✅ Symbolic links
|
||||
- ✅ File permissions
|
||||
- ✅ Tree hashing
|
||||
| Capability | Status |
|
||||
|---|---|
|
||||
| File watching | ✅ |
|
||||
| Atomic writes | ✅ |
|
||||
| Transactions | ✅ |
|
||||
| Streaming | ✅ |
|
||||
| Symbolic links | ✅ |
|
||||
| File permissions | ✅ |
|
||||
|
||||
### Memory Provider
|
||||
### 🦀 Rust Provider
|
||||
|
||||
In-memory virtual filesystem, perfect for testing:
|
||||
A high-durability provider powered by a cross-compiled Rust binary that communicates via JSON-over-IPC. The Rust provider adds **XFS-safe `fsync` guarantees** that the Node.js `fs` module cannot provide — after every metadata-changing operation (`write`, `rename`, `unlink`, `mkdir`), the parent directory is explicitly `fsync`'d to ensure durability on delayed-logging filesystems like XFS.
|
||||
|
||||
```typescript
|
||||
import { SmartFs, SmartFsProviderRust } from '@push.rocks/smartfs';
|
||||
|
||||
const fs = new SmartFs(new SmartFsProviderRust());
|
||||
|
||||
// Use it exactly like any other provider
|
||||
await fs.file('/data/important.json')
|
||||
.atomic()
|
||||
.write(JSON.stringify(data));
|
||||
|
||||
// Don't forget to shut down when done
|
||||
const provider = fs.provider as SmartFsProviderRust;
|
||||
await provider.shutdown();
|
||||
```
|
||||
|
||||
| Capability | Status |
|
||||
|---|---|
|
||||
| File watching | ✅ (via `notify` crate) |
|
||||
| Atomic writes | ✅ (with fsync + parent fsync) |
|
||||
| Transactions | ✅ (with batch fsync) |
|
||||
| Streaming | ✅ (chunked IPC) |
|
||||
| Symbolic links | ✅ |
|
||||
| File permissions | ✅ |
|
||||
|
||||
**Key advantages over the Node.js provider:**
|
||||
- `fsync` on parent directories after all metadata changes (crash-safe on XFS)
|
||||
- Atomic writes with `fsync` → `rename` → `fsync parent` sequence
|
||||
- Batch `fsync` for transactions (collect affected directories, sync once at end)
|
||||
- Cross-device move with fallback (`EXDEV` handling)
|
||||
- Uses the [`notify`](https://crates.io/crates/notify) crate for reliable file watching
|
||||
|
||||
### 🧪 Memory Provider
|
||||
|
||||
In-memory virtual filesystem — perfect for testing:
|
||||
|
||||
```typescript
|
||||
import { SmartFs, SmartFsProviderMemory } from '@push.rocks/smartfs';
|
||||
|
||||
const fs = new SmartFs(new SmartFsProviderMemory());
|
||||
|
||||
// All operations work in memory
|
||||
// All operations work in memory — fast, isolated, no cleanup needed
|
||||
await fs.file('/virtual/file.txt').write('data');
|
||||
const content = await fs.file('/virtual/file.txt').read();
|
||||
const content = await fs.file('/virtual/file.txt').encoding('utf8').read();
|
||||
|
||||
// Clear all data
|
||||
fs.provider.clear();
|
||||
// Clear all data between tests
|
||||
(fs.provider as SmartFsProviderMemory).clear();
|
||||
```
|
||||
|
||||
**Capabilities:**
|
||||
- ✅ File watching
|
||||
- ✅ Atomic writes
|
||||
- ✅ Transactions
|
||||
- ✅ Streaming
|
||||
- ❌ Symbolic links
|
||||
- ✅ File permissions
|
||||
- ✅ Tree hashing
|
||||
| Capability | Status |
|
||||
|---|---|
|
||||
| File watching | ✅ |
|
||||
| Atomic writes | ✅ |
|
||||
| Transactions | ✅ |
|
||||
| Streaming | ✅ |
|
||||
| Symbolic links | ❌ |
|
||||
| File permissions | ✅ |
|
||||
|
||||
### Custom Providers
|
||||
### 🔧 Custom Providers
|
||||
|
||||
Create your own provider by implementing `ISmartFsProvider`:
|
||||
Build your own provider by implementing the `ISmartFsProvider` interface:
|
||||
|
||||
```typescript
|
||||
import type { ISmartFsProvider } from '@push.rocks/smartfs';
|
||||
|
||||
class MyCustomProvider implements ISmartFsProvider {
|
||||
public readonly name = 'custom';
|
||||
class MyS3Provider implements ISmartFsProvider {
|
||||
public readonly name = 's3';
|
||||
public readonly capabilities = {
|
||||
supportsWatch: true,
|
||||
supportsWatch: false,
|
||||
supportsAtomic: true,
|
||||
supportsTransactions: true,
|
||||
supportsStreaming: true,
|
||||
supportsSymlinks: false,
|
||||
supportsPermissions: true,
|
||||
supportsPermissions: false,
|
||||
};
|
||||
|
||||
// Implement all required methods...
|
||||
@@ -384,7 +406,7 @@ class MyCustomProvider implements ISmartFsProvider {
|
||||
// ... etc
|
||||
}
|
||||
|
||||
const fs = new SmartFs(new MyCustomProvider());
|
||||
const fs = new SmartFs(new MyS3Provider());
|
||||
```
|
||||
|
||||
## Advanced Usage
|
||||
@@ -395,9 +417,10 @@ const fs = new SmartFs(new MyCustomProvider());
|
||||
// UTF-8 (default for text)
|
||||
await fs.file('/file.txt').encoding('utf8').write('text');
|
||||
|
||||
// Binary
|
||||
// Binary (Buffer)
|
||||
const buffer = Buffer.from([0x48, 0x65, 0x6c, 0x6c, 0x6f]);
|
||||
await fs.file('/file.bin').write(buffer);
|
||||
const data = await fs.file('/file.bin').read(); // Returns Buffer
|
||||
|
||||
// Base64
|
||||
await fs.file('/file.txt').encoding('base64').write('SGVsbG8=');
|
||||
@@ -423,8 +446,7 @@ await fs.directory('/private')
|
||||
### Complex Filtering
|
||||
|
||||
```typescript
|
||||
// Multiple conditions
|
||||
const files = await fs.directory('/src')
|
||||
const recentLargeTs = await fs.directory('/src')
|
||||
.recursive()
|
||||
.includeStats()
|
||||
.filter(entry => {
|
||||
@@ -440,44 +462,40 @@ const files = await fs.directory('/src')
|
||||
### Transaction Operations
|
||||
|
||||
```typescript
|
||||
// Complex transaction
|
||||
const tx = fs.transaction();
|
||||
|
||||
// Write multiple files
|
||||
// Build up operations
|
||||
tx.file('/data/file1.json').write(JSON.stringify(data1));
|
||||
tx.file('/data/file2.json').write(JSON.stringify(data2));
|
||||
|
||||
// Copy backups
|
||||
tx.file('/data/file1.json').copy('/backup/file1.json');
|
||||
tx.file('/data/file2.json').copy('/backup/file2.json');
|
||||
tx.file('/data/old.json').delete();
|
||||
|
||||
// Delete old files
|
||||
tx.file('/data/old1.json').delete();
|
||||
tx.file('/data/old2.json').delete();
|
||||
|
||||
// Execute atomically
|
||||
// Execute atomically — all succeed or all revert
|
||||
await tx.commit();
|
||||
```
|
||||
|
||||
## Type Definitions
|
||||
|
||||
SmartFS is fully typed with TypeScript:
|
||||
SmartFS is fully typed. All interfaces and types are exported:
|
||||
|
||||
```typescript
|
||||
import type {
|
||||
ISmartFsProvider,
|
||||
IProviderCapabilities,
|
||||
IFileStats,
|
||||
IDirectoryEntry,
|
||||
IWatchEvent,
|
||||
ITransactionOperation,
|
||||
ITreeHashOptions,
|
||||
TEncoding,
|
||||
TFileMode,
|
||||
TEncoding, // 'utf8' | 'utf-8' | 'ascii' | 'base64' | 'hex' | 'binary' | 'buffer'
|
||||
TFileMode, // number
|
||||
TWatchEventType, // 'add' | 'change' | 'delete'
|
||||
} from '@push.rocks/smartfs';
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
SmartFS throws descriptive errors:
|
||||
SmartFS throws descriptive errors that mirror POSIX conventions:
|
||||
|
||||
```typescript
|
||||
try {
|
||||
@@ -491,22 +509,23 @@ try {
|
||||
try {
|
||||
await fs.transaction()
|
||||
.file('/file1.txt').write('data')
|
||||
.file('/file2.txt').write('data')
|
||||
.file('/readonly/file2.txt').write('data') // fails
|
||||
.commit();
|
||||
} catch (error) {
|
||||
// All operations are reverted
|
||||
// file1.txt is reverted to its original state
|
||||
console.error('Transaction failed:', error);
|
||||
}
|
||||
```
|
||||
|
||||
## Performance Tips
|
||||
|
||||
1. **Use streaming** for large files (> 1MB)
|
||||
2. **Batch operations** with transactions
|
||||
3. **Use memory provider** for testing
|
||||
4. **Enable atomic writes** for critical data
|
||||
5. **Debounce watchers** to reduce event spam
|
||||
6. **Use treeHash** instead of reading files for change detection
|
||||
1. **Use streaming** for large files (> 1MB) — avoids loading entire files into memory
|
||||
2. **Batch operations** with transactions for durability and performance
|
||||
3. **Use the memory provider** for testing — instant, isolated, no disk I/O
|
||||
4. **Enable atomic writes** for critical data — prevents partial writes on crash
|
||||
5. **Debounce watchers** to reduce event noise during rapid changes
|
||||
6. **Use `treeHash`** instead of reading individual files for change detection
|
||||
7. **Use the Rust provider** on XFS or when you need guaranteed durability
|
||||
|
||||
## License and Legal Information
|
||||
|
||||
|
||||
757
rust/Cargo.lock
generated
Normal file
757
rust/Cargo.lock
generated
Normal file
@@ -0,0 +1,757 @@
|
||||
# This file is automatically @generated by Cargo.
|
||||
# It is not intended for manual editing.
|
||||
version = 4
|
||||
|
||||
[[package]]
|
||||
name = "anstream"
|
||||
version = "0.6.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "43d5b281e737544384e969a5ccad3f1cdd24b48086a0fc1b2a5262a26b8f4f4a"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"anstyle-parse",
|
||||
"anstyle-query",
|
||||
"anstyle-wincon",
|
||||
"colorchoice",
|
||||
"is_terminal_polyfill",
|
||||
"utf8parse",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anstyle"
|
||||
version = "1.0.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5192cca8006f1fd4f7237516f40fa183bb07f8fbdfedaa0036de5ea9b0b45e78"
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-parse"
|
||||
version = "0.2.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4e7644824f0aa2c7b9384579234ef10eb7efb6a0deb83f9630a49594dd9c15c2"
|
||||
dependencies = [
|
||||
"utf8parse",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-query"
|
||||
version = "1.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "40c48f72fd53cd289104fc64099abca73db4166ad86ea0b4341abe65af83dadc"
|
||||
dependencies = [
|
||||
"windows-sys 0.61.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "anstyle-wincon"
|
||||
version = "3.0.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "291e6a250ff86cd4a820112fb8898808a366d8f9f58ce16d1f538353ad55747d"
|
||||
dependencies = [
|
||||
"anstyle",
|
||||
"once_cell_polyfill",
|
||||
"windows-sys 0.61.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "base64"
|
||||
version = "0.22.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "1.3.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
|
||||
|
||||
[[package]]
|
||||
name = "bitflags"
|
||||
version = "2.11.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "843867be96c8daad0d758b57df9392b6d8d271134fce549de6ce169ff98a92af"
|
||||
|
||||
[[package]]
|
||||
name = "bytes"
|
||||
version = "1.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33"
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.60"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2797f34da339ce31042b27d23607e051786132987f595b02ba4f6a6dffb7030a"
|
||||
dependencies = [
|
||||
"clap_builder",
|
||||
"clap_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_builder"
|
||||
version = "4.5.60"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "24a241312cea5059b13574bb9b3861cabf758b879c15190b37b6d6fd63ab6876"
|
||||
dependencies = [
|
||||
"anstream",
|
||||
"anstyle",
|
||||
"clap_lex",
|
||||
"strsim",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_derive"
|
||||
version = "4.5.55"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a92793da1a46a5f2a02a6f4c46c6496b28c43638adea8306fcb0caa1634f24e5"
|
||||
dependencies = [
|
||||
"heck",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap_lex"
|
||||
version = "1.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3a822ea5bc7590f9d40f1ba12c0dc3c2760f3482c6984db1573ad11031420831"
|
||||
|
||||
[[package]]
|
||||
name = "colorchoice"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
|
||||
|
||||
[[package]]
|
||||
name = "errno"
|
||||
version = "0.3.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.61.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "filetime"
|
||||
version = "0.2.27"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f98844151eee8917efc50bd9e8318cb963ae8b297431495d3f758616ea5c57db"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"libredox",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "fsevent-sys"
|
||||
version = "4.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "76ee7a02da4d231650c7cea31349b889be2f45ddb3ef3032d2ec8185f6313fd2"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "heck"
|
||||
version = "0.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
|
||||
|
||||
[[package]]
|
||||
name = "inotify"
|
||||
version = "0.10.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fdd168d97690d0b8c412d6b6c10360277f4d7ee495c5d0d5d5fe0854923255cc"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"inotify-sys",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "inotify-sys"
|
||||
version = "0.1.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "instant"
|
||||
version = "0.1.13"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "is_terminal_polyfill"
|
||||
version = "1.70.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695"
|
||||
|
||||
[[package]]
|
||||
name = "itoa"
|
||||
version = "1.0.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2"
|
||||
|
||||
[[package]]
|
||||
name = "kqueue"
|
||||
version = "1.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "eac30106d7dce88daf4a3fcb4879ea939476d5074a9b7ddd0fb97fa4bed5596a"
|
||||
dependencies = [
|
||||
"kqueue-sys",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "kqueue-sys"
|
||||
version = "1.0.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed9625ffda8729b85e45cf04090035ac368927b8cebc34898e7c120f52e4838b"
|
||||
dependencies = [
|
||||
"bitflags 1.3.2",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.182"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6800badb6cb2082ffd7b6a67e6125bb39f18782f793520caee8cb8846be06112"
|
||||
|
||||
[[package]]
|
||||
name = "libredox"
|
||||
version = "0.1.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1744e39d1d6a9948f4f388969627434e31128196de472883b39f148769bfe30a"
|
||||
dependencies = [
|
||||
"bitflags 2.11.0",
|
||||
"libc",
|
||||
"plain",
|
||||
"redox_syscall 0.7.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lock_api"
|
||||
version = "0.4.14"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "224399e74b87b5f3557511d98dff8b14089b3dadafcab6bb93eab67d3aace965"
|
||||
dependencies = [
|
||||
"scopeguard",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "log"
|
||||
version = "0.4.29"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
|
||||
|
||||
[[package]]
|
||||
name = "memchr"
|
||||
version = "2.8.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f8ca58f447f06ed17d5fc4043ce1b10dd205e060fb3ce5b979b8ed8e59ff3f79"
|
||||
|
||||
[[package]]
|
||||
name = "mio"
|
||||
version = "1.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a69bcab0ad47271a0234d9422b131806bf3968021e5dc9328caf2d4cd58557fc"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"log",
|
||||
"wasi",
|
||||
"windows-sys 0.61.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "notify"
|
||||
version = "7.0.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c533b4c39709f9ba5005d8002048266593c1cfaf3c5f0739d5b8ab0c6c504009"
|
||||
dependencies = [
|
||||
"bitflags 2.11.0",
|
||||
"filetime",
|
||||
"fsevent-sys",
|
||||
"inotify",
|
||||
"kqueue",
|
||||
"libc",
|
||||
"log",
|
||||
"mio",
|
||||
"notify-types",
|
||||
"walkdir",
|
||||
"windows-sys 0.52.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "notify-types"
|
||||
version = "1.0.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "585d3cb5e12e01aed9e8a1f70d5c6b5e86fe2a6e48fc8cd0b3e0b8df6f6eb174"
|
||||
dependencies = [
|
||||
"instant",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "once_cell_polyfill"
|
||||
version = "1.70.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "384b8ab6d37215f3c5301a95a4accb5d64aa607f1fcb26a11b5303878451b4fe"
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot"
|
||||
version = "0.12.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "93857453250e3077bd71ff98b6a65ea6621a19bb0f559a85248955ac12c45a1a"
|
||||
dependencies = [
|
||||
"lock_api",
|
||||
"parking_lot_core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "parking_lot_core"
|
||||
version = "0.9.12"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2621685985a2ebf1c516881c026032ac7deafcda1a2c9b7850dc81e3dfcb64c1"
|
||||
dependencies = [
|
||||
"cfg-if",
|
||||
"libc",
|
||||
"redox_syscall 0.5.18",
|
||||
"smallvec",
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pin-project-lite"
|
||||
version = "0.2.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a89322df9ebe1c1578d689c92318e070967d1042b512afbe49518723f4e6d5cd"
|
||||
|
||||
[[package]]
|
||||
name = "plain"
|
||||
version = "0.2.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6"
|
||||
|
||||
[[package]]
|
||||
name = "proc-macro2"
|
||||
version = "1.0.106"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934"
|
||||
dependencies = [
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "quote"
|
||||
version = "1.0.45"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "41f2619966050689382d2b44f664f4bc593e129785a36d6ee376ddf37259b924"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.5.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ed2bf2547551a7053d6fdfafda3f938979645c44812fbfcda098faae3f1a362d"
|
||||
dependencies = [
|
||||
"bitflags 2.11.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "redox_syscall"
|
||||
version = "0.7.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6ce70a74e890531977d37e532c34d45e9055d2409ed08ddba14529471ed0be16"
|
||||
dependencies = [
|
||||
"bitflags 2.11.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-lite"
|
||||
version = "0.1.9"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "cab834c73d247e67f4fae452806d17d3c7501756d98c8808d7c9c7aa7d18f973"
|
||||
|
||||
[[package]]
|
||||
name = "same-file"
|
||||
version = "1.0.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "93fc1dc3aaa9bfed95e02e6eadabb4baf7e3078b0bd1b4d7b6b0b68378900502"
|
||||
dependencies = [
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scopeguard"
|
||||
version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
|
||||
|
||||
[[package]]
|
||||
name = "serde"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9a8e94ea7f378bd32cbbd37198a4a91436180c5bb472411e48b5ec2e2124ae9e"
|
||||
dependencies = [
|
||||
"serde_core",
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_core"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "41d385c7d4ca58e59fc732af25c3983b67ac852c1a25000afe1175de458b67ad"
|
||||
dependencies = [
|
||||
"serde_derive",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_derive"
|
||||
version = "1.0.228"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d540f220d3187173da220f885ab66608367b6574e925011a9353e4badda91d79"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_json"
|
||||
version = "1.0.149"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "83fc039473c5595ace860d8c4fafa220ff474b3fc6bfdb4293327f1a37e94d86"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"memchr",
|
||||
"serde",
|
||||
"serde_core",
|
||||
"zmij",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "signal-hook-registry"
|
||||
version = "1.4.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c4db69cba1110affc0e9f7bcd48bbf87b3f4fc7c61fc9155afd4c469eb3d6c1b"
|
||||
dependencies = [
|
||||
"errno",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "smallvec"
|
||||
version = "1.15.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "67b1b7a3b5fe4f1376887184045fcf45c69e92af734b7aaddc05fb777b6fbd03"
|
||||
|
||||
[[package]]
|
||||
name = "smartfs-bin"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"clap",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"smartfs-core",
|
||||
"smartfs-protocol",
|
||||
"tokio",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "smartfs-core"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"base64",
|
||||
"filetime",
|
||||
"libc",
|
||||
"notify",
|
||||
"regex-lite",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"smartfs-protocol",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "smartfs-protocol"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"serde",
|
||||
"serde_json",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "socket2"
|
||||
version = "0.6.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "86f4aa3ad99f2088c990dfa82d367e19cb29268ed67c574d10d0a4bfe71f07e0"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"windows-sys 0.60.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "strsim"
|
||||
version = "0.11.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
|
||||
|
||||
[[package]]
|
||||
name = "syn"
|
||||
version = "2.0.117"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e665b8803e7b1d2a727f4023456bbbbe74da67099c585258af0ad9c5013b9b99"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"unicode-ident",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio"
|
||||
version = "1.50.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "27ad5e34374e03cfffefc301becb44e9dc3c17584f414349ebe29ed26661822d"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"libc",
|
||||
"mio",
|
||||
"parking_lot",
|
||||
"pin-project-lite",
|
||||
"signal-hook-registry",
|
||||
"socket2",
|
||||
"tokio-macros",
|
||||
"windows-sys 0.61.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-macros"
|
||||
version = "2.6.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5c55a2eff8b69ce66c84f85e1da1c233edc36ceb85a2058d11b0d6a3c7e7569c"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "unicode-ident"
|
||||
version = "1.0.24"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "e6e4313cd5fcd3dad5cafa179702e2b244f760991f45397d14d4ebf38247da75"
|
||||
|
||||
[[package]]
|
||||
name = "utf8parse"
|
||||
version = "0.2.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821"
|
||||
|
||||
[[package]]
|
||||
name = "walkdir"
|
||||
version = "2.5.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "29790946404f91d9c5d06f9874efddea1dc06c5efe94541a7d6863108e3a5e4b"
|
||||
dependencies = [
|
||||
"same-file",
|
||||
"winapi-util",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "wasi"
|
||||
version = "0.11.1+wasi-snapshot-preview1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b"
|
||||
|
||||
[[package]]
|
||||
name = "winapi-util"
|
||||
version = "0.1.11"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22"
|
||||
dependencies = [
|
||||
"windows-sys 0.61.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-link"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5"
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.52.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
|
||||
dependencies = [
|
||||
"windows-targets 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.60.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f2f500e4d28234f72040990ec9d39e3a6b950f9f22d3dba18416c35882612bcb"
|
||||
dependencies = [
|
||||
"windows-targets 0.53.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-sys"
|
||||
version = "0.61.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc"
|
||||
dependencies = [
|
||||
"windows-link",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973"
|
||||
dependencies = [
|
||||
"windows_aarch64_gnullvm 0.52.6",
|
||||
"windows_aarch64_msvc 0.52.6",
|
||||
"windows_i686_gnu 0.52.6",
|
||||
"windows_i686_gnullvm 0.52.6",
|
||||
"windows_i686_msvc 0.52.6",
|
||||
"windows_x86_64_gnu 0.52.6",
|
||||
"windows_x86_64_gnullvm 0.52.6",
|
||||
"windows_x86_64_msvc 0.52.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows-targets"
|
||||
version = "0.53.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3"
|
||||
dependencies = [
|
||||
"windows-link",
|
||||
"windows_aarch64_gnullvm 0.53.1",
|
||||
"windows_aarch64_msvc 0.53.1",
|
||||
"windows_i686_gnu 0.53.1",
|
||||
"windows_i686_gnullvm 0.53.1",
|
||||
"windows_i686_msvc 0.53.1",
|
||||
"windows_x86_64_gnu 0.53.1",
|
||||
"windows_x86_64_gnullvm 0.53.1",
|
||||
"windows_x86_64_msvc 0.53.1",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_gnullvm"
|
||||
version = "0.53.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469"
|
||||
|
||||
[[package]]
|
||||
name = "windows_aarch64_msvc"
|
||||
version = "0.53.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnu"
|
||||
version = "0.53.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "960e6da069d81e09becb0ca57a65220ddff016ff2d6af6a223cf372a506593a3"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_gnullvm"
|
||||
version = "0.53.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66"
|
||||
|
||||
[[package]]
|
||||
name = "windows_i686_msvc"
|
||||
version = "0.53.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnu"
|
||||
version = "0.53.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_gnullvm"
|
||||
version = "0.53.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.52.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
|
||||
|
||||
[[package]]
|
||||
name = "windows_x86_64_msvc"
|
||||
version = "0.53.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d6bbff5f0aada427a1e5a6da5f1f98158182f26556f345ac9e04d36d0ebed650"
|
||||
|
||||
[[package]]
|
||||
name = "zmij"
|
||||
version = "1.0.21"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b8848ee67ecc8aedbaf3e4122217aff892639231befc6a1b58d29fff4c2cabaa"
|
||||
20
rust/Cargo.toml
Normal file
20
rust/Cargo.toml
Normal file
@@ -0,0 +1,20 @@
|
||||
[workspace]
|
||||
resolver = "2"
|
||||
members = [
|
||||
"crates/smartfs-protocol",
|
||||
"crates/smartfs-core",
|
||||
"crates/smartfs-bin",
|
||||
]
|
||||
|
||||
[workspace.package]
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
license = "MIT"
|
||||
|
||||
[workspace.dependencies]
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
serde = { version = "1", features = ["derive"] }
|
||||
serde_json = "1"
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
notify = "7"
|
||||
base64 = "0.22"
|
||||
18
rust/crates/smartfs-bin/Cargo.toml
Normal file
18
rust/crates/smartfs-bin/Cargo.toml
Normal file
@@ -0,0 +1,18 @@
|
||||
[package]
|
||||
name = "smartfs-bin"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[[bin]]
|
||||
name = "smartfs-bin"
|
||||
path = "src/main.rs"
|
||||
|
||||
[dependencies]
|
||||
smartfs-protocol = { path = "../smartfs-protocol" }
|
||||
smartfs-core = { path = "../smartfs-core" }
|
||||
tokio.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
clap.workspace = true
|
||||
base64.workspace = true
|
||||
419
rust/crates/smartfs-bin/src/main.rs
Normal file
419
rust/crates/smartfs-bin/src/main.rs
Normal file
@@ -0,0 +1,419 @@
|
||||
use base64::{Engine as _, engine::general_purpose::STANDARD};
|
||||
use clap::Parser;
|
||||
use smartfs_core::{FsOps, WatchManager};
|
||||
use smartfs_protocol::*;
|
||||
use std::collections::HashMap;
|
||||
use std::io::{self, BufRead, BufWriter, Write as IoWrite};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "smartfs-bin", about = "SmartFS Rust filesystem backend")]
|
||||
struct Cli {
|
||||
/// Run in management/IPC mode (JSON over stdin/stdout)
|
||||
#[arg(long)]
|
||||
management: bool,
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let cli = Cli::parse();
|
||||
|
||||
if cli.management {
|
||||
run_management_mode();
|
||||
} else {
|
||||
eprintln!("smartfs-bin: use --management flag for IPC mode");
|
||||
std::process::exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
/// State for open write streams
|
||||
struct WriteStreamState {
|
||||
writer: BufWriter<std::fs::File>,
|
||||
final_path: PathBuf,
|
||||
temp_path: Option<PathBuf>,
|
||||
mode: Option<u32>,
|
||||
}
|
||||
|
||||
fn run_management_mode() {
|
||||
// Send ready event
|
||||
let ready = IpcEvent {
|
||||
event: "ready".to_string(),
|
||||
data: serde_json::json!({
|
||||
"version": env!("CARGO_PKG_VERSION"),
|
||||
"provider": "rust"
|
||||
}),
|
||||
};
|
||||
send_json(&ready);
|
||||
|
||||
let watch_manager = WatchManager::new();
|
||||
let mut write_streams: HashMap<String, WriteStreamState> = HashMap::new();
|
||||
let stdin = io::stdin();
|
||||
|
||||
for line in stdin.lock().lines() {
|
||||
let line = match line {
|
||||
Ok(l) => l,
|
||||
Err(_) => break,
|
||||
};
|
||||
|
||||
if line.trim().is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let request: IpcRequest = match serde_json::from_str(&line) {
|
||||
Ok(r) => r,
|
||||
Err(e) => {
|
||||
eprintln!("smartfs-bin: invalid JSON: {}", e);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
let response = dispatch_command(&request, &watch_manager, &mut write_streams);
|
||||
send_json(&response);
|
||||
}
|
||||
}
|
||||
|
||||
fn dispatch_command(
|
||||
req: &IpcRequest,
|
||||
watch_manager: &WatchManager,
|
||||
write_streams: &mut HashMap<String, WriteStreamState>,
|
||||
) -> IpcResponse {
|
||||
match req.method.as_str() {
|
||||
"readFile" => {
|
||||
match serde_json::from_value::<ReadFileParams>(req.params.clone()) {
|
||||
Ok(params) => match FsOps::read_file(¶ms) {
|
||||
Ok(result) => IpcResponse::ok(req.id.clone(), result),
|
||||
Err(e) => IpcResponse::err(req.id.clone(), e),
|
||||
},
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"writeFile" => {
|
||||
match serde_json::from_value::<WriteFileParams>(req.params.clone()) {
|
||||
Ok(params) => match FsOps::write_file(¶ms) {
|
||||
Ok(()) => IpcResponse::ok_void(req.id.clone()),
|
||||
Err(e) => IpcResponse::err(req.id.clone(), e),
|
||||
},
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"appendFile" => {
|
||||
match serde_json::from_value::<AppendFileParams>(req.params.clone()) {
|
||||
Ok(params) => match FsOps::append_file(¶ms) {
|
||||
Ok(()) => IpcResponse::ok_void(req.id.clone()),
|
||||
Err(e) => IpcResponse::err(req.id.clone(), e),
|
||||
},
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"deleteFile" => {
|
||||
match serde_json::from_value::<PathParams>(req.params.clone()) {
|
||||
Ok(params) => match FsOps::delete_file(Path::new(¶ms.path)) {
|
||||
Ok(()) => IpcResponse::ok_void(req.id.clone()),
|
||||
Err(e) => IpcResponse::err(req.id.clone(), e),
|
||||
},
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"copyFile" => {
|
||||
match serde_json::from_value::<CopyMoveParams>(req.params.clone()) {
|
||||
Ok(params) => match FsOps::copy_file(¶ms) {
|
||||
Ok(()) => IpcResponse::ok_void(req.id.clone()),
|
||||
Err(e) => IpcResponse::err(req.id.clone(), e),
|
||||
},
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"moveFile" => {
|
||||
match serde_json::from_value::<CopyMoveParams>(req.params.clone()) {
|
||||
Ok(params) => match FsOps::move_file(¶ms) {
|
||||
Ok(()) => IpcResponse::ok_void(req.id.clone()),
|
||||
Err(e) => IpcResponse::err(req.id.clone(), e),
|
||||
},
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"fileExists" => {
|
||||
match serde_json::from_value::<PathParams>(req.params.clone()) {
|
||||
Ok(params) => {
|
||||
let exists = FsOps::file_exists(Path::new(¶ms.path));
|
||||
IpcResponse::ok(req.id.clone(), serde_json::json!(exists))
|
||||
}
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"fileStat" => {
|
||||
match serde_json::from_value::<PathParams>(req.params.clone()) {
|
||||
Ok(params) => match FsOps::file_stat(Path::new(¶ms.path)) {
|
||||
Ok(stats) => {
|
||||
IpcResponse::ok(req.id.clone(), serde_json::to_value(&stats).unwrap())
|
||||
}
|
||||
Err(e) => IpcResponse::err(req.id.clone(), e),
|
||||
},
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"listDirectory" => {
|
||||
match serde_json::from_value::<ListDirectoryParams>(req.params.clone()) {
|
||||
Ok(params) => match FsOps::list_directory(¶ms) {
|
||||
Ok(entries) => {
|
||||
IpcResponse::ok(req.id.clone(), serde_json::to_value(&entries).unwrap())
|
||||
}
|
||||
Err(e) => IpcResponse::err(req.id.clone(), e),
|
||||
},
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"createDirectory" => {
|
||||
match serde_json::from_value::<CreateDirectoryParams>(req.params.clone()) {
|
||||
Ok(params) => match FsOps::create_directory(¶ms) {
|
||||
Ok(()) => IpcResponse::ok_void(req.id.clone()),
|
||||
Err(e) => IpcResponse::err(req.id.clone(), e),
|
||||
},
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"deleteDirectory" => {
|
||||
match serde_json::from_value::<DeleteDirectoryParams>(req.params.clone()) {
|
||||
Ok(params) => match FsOps::delete_directory(¶ms) {
|
||||
Ok(()) => IpcResponse::ok_void(req.id.clone()),
|
||||
Err(e) => IpcResponse::err(req.id.clone(), e),
|
||||
},
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"directoryExists" => {
|
||||
match serde_json::from_value::<PathParams>(req.params.clone()) {
|
||||
Ok(params) => {
|
||||
let exists = FsOps::directory_exists(Path::new(¶ms.path));
|
||||
IpcResponse::ok(req.id.clone(), serde_json::json!(exists))
|
||||
}
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"directoryStat" => {
|
||||
match serde_json::from_value::<PathParams>(req.params.clone()) {
|
||||
Ok(params) => match FsOps::directory_stat(Path::new(¶ms.path)) {
|
||||
Ok(stats) => {
|
||||
IpcResponse::ok(req.id.clone(), serde_json::to_value(&stats).unwrap())
|
||||
}
|
||||
Err(e) => IpcResponse::err(req.id.clone(), e),
|
||||
},
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"watch" => {
|
||||
match serde_json::from_value::<WatchParams>(req.params.clone()) {
|
||||
Ok(params) => {
|
||||
match watch_manager.add_watch(
|
||||
params.id,
|
||||
¶ms.path,
|
||||
params.recursive.unwrap_or(false),
|
||||
) {
|
||||
Ok(()) => IpcResponse::ok_void(req.id.clone()),
|
||||
Err(e) => IpcResponse::err(req.id.clone(), e),
|
||||
}
|
||||
}
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"unwatchAll" => {
|
||||
match watch_manager.remove_all() {
|
||||
Ok(()) => IpcResponse::ok_void(req.id.clone()),
|
||||
Err(e) => IpcResponse::err(req.id.clone(), e),
|
||||
}
|
||||
}
|
||||
|
||||
"batch" => {
|
||||
match serde_json::from_value::<BatchParams>(req.params.clone()) {
|
||||
Ok(params) => {
|
||||
let results = FsOps::batch(¶ms);
|
||||
IpcResponse::ok(req.id.clone(), serde_json::to_value(&results).unwrap())
|
||||
}
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"executeTransaction" => {
|
||||
match serde_json::from_value::<TransactionParams>(req.params.clone()) {
|
||||
Ok(params) => match FsOps::execute_transaction(¶ms) {
|
||||
Ok(()) => IpcResponse::ok_void(req.id.clone()),
|
||||
Err(e) => IpcResponse::err(req.id.clone(), e),
|
||||
},
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"normalizePath" => {
|
||||
match serde_json::from_value::<NormalizePathParams>(req.params.clone()) {
|
||||
Ok(params) => {
|
||||
let result = FsOps::normalize_path(¶ms.path);
|
||||
IpcResponse::ok(req.id.clone(), serde_json::json!(result))
|
||||
}
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"joinPath" => {
|
||||
match serde_json::from_value::<JoinPathParams>(req.params.clone()) {
|
||||
Ok(params) => {
|
||||
let result = FsOps::join_path(¶ms.segments);
|
||||
IpcResponse::ok(req.id.clone(), serde_json::json!(result))
|
||||
}
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"readFileStream" => {
|
||||
match serde_json::from_value::<ReadFileStreamParams>(req.params.clone()) {
|
||||
Ok(params) => match FsOps::read_file_stream(&req.id, ¶ms) {
|
||||
Ok(total) => IpcResponse::ok(req.id.clone(), serde_json::json!({ "totalBytes": total })),
|
||||
Err(e) => IpcResponse::err(req.id.clone(), e),
|
||||
},
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"writeStreamBegin" => {
|
||||
match serde_json::from_value::<WriteStreamBeginParams>(req.params.clone()) {
|
||||
Ok(params) => {
|
||||
let final_path = PathBuf::from(¶ms.path);
|
||||
|
||||
// Ensure parent directory exists
|
||||
if let Some(parent) = final_path.parent() {
|
||||
if !parent.exists() {
|
||||
if let Err(e) = std::fs::create_dir_all(parent) {
|
||||
return IpcResponse::err(req.id.clone(), format!("writeStreamBegin mkdir: {}", e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let (write_path, temp_path) = if params.atomic.unwrap_or(false) {
|
||||
let temp = final_path.with_extension(format!(
|
||||
"tmp.{}",
|
||||
std::time::SystemTime::now()
|
||||
.duration_since(std::time::SystemTime::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_nanos()
|
||||
));
|
||||
(temp.clone(), Some(temp))
|
||||
} else {
|
||||
(final_path.clone(), None)
|
||||
};
|
||||
|
||||
match std::fs::File::create(&write_path) {
|
||||
Ok(file) => {
|
||||
let stream_id = format!("ws_{}", req.id);
|
||||
write_streams.insert(stream_id.clone(), WriteStreamState {
|
||||
writer: BufWriter::new(file),
|
||||
final_path,
|
||||
temp_path,
|
||||
mode: params.mode,
|
||||
});
|
||||
IpcResponse::ok(req.id.clone(), serde_json::json!({ "streamId": stream_id }))
|
||||
}
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("writeStreamBegin create: {}", e)),
|
||||
}
|
||||
}
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"writeStreamChunk" => {
|
||||
match serde_json::from_value::<WriteStreamChunkParams>(req.params.clone()) {
|
||||
Ok(params) => {
|
||||
let stream = match write_streams.get_mut(¶ms.stream_id) {
|
||||
Some(s) => s,
|
||||
None => return IpcResponse::err(req.id.clone(), format!("unknown streamId: {}", params.stream_id)),
|
||||
};
|
||||
|
||||
// Write data if non-empty
|
||||
if !params.data.is_empty() {
|
||||
match STANDARD.decode(¶ms.data) {
|
||||
Ok(bytes) => {
|
||||
if let Err(e) = stream.writer.write_all(&bytes) {
|
||||
write_streams.remove(¶ms.stream_id);
|
||||
return IpcResponse::err(req.id.clone(), format!("writeStreamChunk write: {}", e));
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
write_streams.remove(¶ms.stream_id);
|
||||
return IpcResponse::err(req.id.clone(), format!("writeStreamChunk decode: {}", e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if params.last {
|
||||
// Finalize: flush, fsync, set mode, rename if atomic, fsync parent
|
||||
let state = write_streams.remove(¶ms.stream_id).unwrap();
|
||||
let mut writer = state.writer;
|
||||
|
||||
if let Err(e) = writer.flush() {
|
||||
return IpcResponse::err(req.id.clone(), format!("writeStreamChunk flush: {}", e));
|
||||
}
|
||||
|
||||
// Get inner file for fsync
|
||||
let file = match writer.into_inner() {
|
||||
Ok(f) => f,
|
||||
Err(e) => {
|
||||
return IpcResponse::err(req.id.clone(), format!("writeStreamChunk into_inner: {}", e.error()));
|
||||
}
|
||||
};
|
||||
if let Err(e) = file.sync_all() {
|
||||
return IpcResponse::err(req.id.clone(), format!("writeStreamChunk fsync: {}", e));
|
||||
}
|
||||
drop(file);
|
||||
|
||||
// Set mode if requested
|
||||
if let Some(mode) = state.mode {
|
||||
let write_path = state.temp_path.as_ref().unwrap_or(&state.final_path);
|
||||
let _ = std::fs::set_permissions(write_path, std::fs::Permissions::from_mode(mode));
|
||||
}
|
||||
|
||||
// Rename if atomic
|
||||
if let Some(ref temp_path) = state.temp_path {
|
||||
if let Err(e) = std::fs::rename(temp_path, &state.final_path) {
|
||||
let _ = std::fs::remove_file(temp_path);
|
||||
return IpcResponse::err(req.id.clone(), format!("writeStreamChunk rename: {}", e));
|
||||
}
|
||||
}
|
||||
|
||||
// Fsync parent
|
||||
if let Some(parent) = state.final_path.parent() {
|
||||
let _ = std::fs::File::open(parent).and_then(|f| f.sync_all());
|
||||
}
|
||||
}
|
||||
|
||||
IpcResponse::ok_void(req.id.clone())
|
||||
}
|
||||
Err(e) => IpcResponse::err(req.id.clone(), format!("invalid params: {}", e)),
|
||||
}
|
||||
}
|
||||
|
||||
"ping" => IpcResponse::ok(req.id.clone(), serde_json::json!({ "pong": true })),
|
||||
|
||||
other => IpcResponse::err(req.id.clone(), format!("unknown method: {}", other)),
|
||||
}
|
||||
}
|
||||
|
||||
fn send_json<T: serde::Serialize>(value: &T) {
|
||||
if let Ok(json) = serde_json::to_string(value) {
|
||||
let stdout = io::stdout();
|
||||
let mut out = stdout.lock();
|
||||
let _ = writeln!(out, "{}", json);
|
||||
let _ = out.flush();
|
||||
}
|
||||
}
|
||||
15
rust/crates/smartfs-core/Cargo.toml
Normal file
15
rust/crates/smartfs-core/Cargo.toml
Normal file
@@ -0,0 +1,15 @@
|
||||
[package]
|
||||
name = "smartfs-core"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
smartfs-protocol = { path = "../smartfs-protocol" }
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
notify.workspace = true
|
||||
libc = "0.2"
|
||||
regex-lite = "0.1"
|
||||
filetime = "0.2"
|
||||
base64.workspace = true
|
||||
5
rust/crates/smartfs-core/src/lib.rs
Normal file
5
rust/crates/smartfs-core/src/lib.rs
Normal file
@@ -0,0 +1,5 @@
|
||||
mod ops;
|
||||
mod watch;
|
||||
|
||||
pub use ops::FsOps;
|
||||
pub use watch::WatchManager;
|
||||
649
rust/crates/smartfs-core/src/ops.rs
Normal file
649
rust/crates/smartfs-core/src/ops.rs
Normal file
@@ -0,0 +1,649 @@
|
||||
use base64::{Engine as _, engine::general_purpose::STANDARD};
|
||||
use smartfs_protocol::*;
|
||||
use std::fs;
|
||||
use std::io;
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::time::SystemTime;
|
||||
|
||||
/// Filesystem operations with XFS-safe fsync after metadata changes.
|
||||
pub struct FsOps;
|
||||
|
||||
impl FsOps {
|
||||
// ── Safety primitive ────────────────────────────────────────────────
|
||||
|
||||
/// Fsync a parent directory to ensure metadata durability on XFS.
|
||||
/// This is the key operation that Node.js cannot do.
|
||||
fn fsync_parent(path: &Path) -> io::Result<()> {
|
||||
if let Some(parent) = path.parent() {
|
||||
let dir = fs::File::open(parent)?;
|
||||
dir.sync_all()?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Fsync a specific directory.
|
||||
fn fsync_dir(path: &Path) -> io::Result<()> {
|
||||
let dir = fs::File::open(path)?;
|
||||
dir.sync_all()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ── File operations ─────────────────────────────────────────────────
|
||||
|
||||
pub fn read_file(params: &ReadFileParams) -> Result<serde_json::Value, String> {
|
||||
let path = Path::new(¶ms.path);
|
||||
let bytes = fs::read(path).map_err(|e| format!("read_file: {}", e))?;
|
||||
|
||||
let encoding = params.encoding.as_deref().unwrap_or("utf8");
|
||||
match encoding {
|
||||
"base64" => {
|
||||
let encoded = STANDARD.encode(&bytes);
|
||||
Ok(serde_json::json!({ "content": encoded }))
|
||||
}
|
||||
"hex" => {
|
||||
let hex: String = bytes.iter().map(|b| format!("{:02x}", b)).collect();
|
||||
Ok(serde_json::json!({ "content": hex }))
|
||||
}
|
||||
"buffer" => {
|
||||
let encoded = STANDARD.encode(&bytes);
|
||||
Ok(serde_json::json!({ "content": encoded, "isBuffer": true }))
|
||||
}
|
||||
_ => {
|
||||
// utf8, utf-8, ascii
|
||||
let content = String::from_utf8_lossy(&bytes).into_owned();
|
||||
Ok(serde_json::json!({ "content": content }))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write_file(params: &WriteFileParams) -> Result<(), String> {
|
||||
let path = Path::new(¶ms.path);
|
||||
let content: Vec<u8> = if params.encoding.as_deref() == Some("base64") {
|
||||
STANDARD.decode(¶ms.content).map_err(|e| format!("write_file base64 decode: {}", e))?
|
||||
} else {
|
||||
params.content.as_bytes().to_vec()
|
||||
};
|
||||
|
||||
// Ensure parent directory exists
|
||||
if let Some(parent) = path.parent() {
|
||||
if !parent.exists() {
|
||||
fs::create_dir_all(parent).map_err(|e| format!("write_file mkdir: {}", e))?;
|
||||
Self::fsync_parent(parent).ok();
|
||||
}
|
||||
}
|
||||
|
||||
if params.atomic.unwrap_or(false) {
|
||||
// Atomic write: write to temp → fsync file → rename → fsync parent
|
||||
let temp_path = path.with_extension(format!(
|
||||
"tmp.{}",
|
||||
SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_nanos()
|
||||
));
|
||||
|
||||
// Write to temp file
|
||||
fs::write(&temp_path, content).map_err(|e| format!("write_file temp: {}", e))?;
|
||||
|
||||
// Fsync the temp file data
|
||||
let f = fs::File::open(&temp_path).map_err(|e| format!("write_file open temp: {}", e))?;
|
||||
f.sync_all().map_err(|e| format!("write_file fsync temp: {}", e))?;
|
||||
drop(f);
|
||||
|
||||
// Set mode if requested
|
||||
if let Some(mode) = params.mode {
|
||||
fs::set_permissions(&temp_path, fs::Permissions::from_mode(mode))
|
||||
.map_err(|e| format!("write_file chmod: {}", e))?;
|
||||
}
|
||||
|
||||
// Rename (atomic on same filesystem)
|
||||
fs::rename(&temp_path, path).map_err(|e| {
|
||||
// Clean up temp on failure
|
||||
let _ = fs::remove_file(&temp_path);
|
||||
format!("write_file rename: {}", e)
|
||||
})?;
|
||||
|
||||
// Fsync parent to ensure the rename is durable
|
||||
Self::fsync_parent(path).map_err(|e| format!("write_file fsync parent: {}", e))?;
|
||||
} else {
|
||||
fs::write(path, content).map_err(|e| format!("write_file: {}", e))?;
|
||||
|
||||
// Fsync the file
|
||||
let f = fs::File::open(path).map_err(|e| format!("write_file open: {}", e))?;
|
||||
f.sync_all().map_err(|e| format!("write_file fsync: {}", e))?;
|
||||
drop(f);
|
||||
|
||||
if let Some(mode) = params.mode {
|
||||
fs::set_permissions(path, fs::Permissions::from_mode(mode))
|
||||
.map_err(|e| format!("write_file chmod: {}", e))?;
|
||||
}
|
||||
|
||||
// Fsync parent for new file creation
|
||||
Self::fsync_parent(path).map_err(|e| format!("write_file fsync parent: {}", e))?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn append_file(params: &AppendFileParams) -> Result<(), String> {
|
||||
use std::io::Write;
|
||||
let path = Path::new(¶ms.path);
|
||||
|
||||
let content: Vec<u8> = if params.encoding.as_deref() == Some("base64") {
|
||||
STANDARD.decode(¶ms.content).map_err(|e| format!("append_file base64 decode: {}", e))?
|
||||
} else {
|
||||
params.content.as_bytes().to_vec()
|
||||
};
|
||||
|
||||
let mut file = fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(path)
|
||||
.map_err(|e| format!("append_file: {}", e))?;
|
||||
|
||||
file.write_all(&content)
|
||||
.map_err(|e| format!("append_file write: {}", e))?;
|
||||
file.sync_all()
|
||||
.map_err(|e| format!("append_file fsync: {}", e))?;
|
||||
|
||||
// Fsync parent in case this created the file
|
||||
Self::fsync_parent(path).ok();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn delete_file(path: &Path) -> Result<(), String> {
|
||||
fs::remove_file(path).map_err(|e| format!("delete_file: {}", e))?;
|
||||
Self::fsync_parent(path).map_err(|e| format!("delete_file fsync parent: {}", e))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn copy_file(params: &CopyMoveParams) -> Result<(), String> {
|
||||
let from = Path::new(¶ms.from);
|
||||
let to = Path::new(¶ms.to);
|
||||
|
||||
if !params.overwrite.unwrap_or(true) && to.exists() {
|
||||
return Err("copy_file: destination already exists".to_string());
|
||||
}
|
||||
|
||||
fs::copy(from, to).map_err(|e| format!("copy_file: {}", e))?;
|
||||
|
||||
if params.preserve_timestamps.unwrap_or(false) {
|
||||
// Copy timestamps
|
||||
let metadata = fs::metadata(from).map_err(|e| format!("copy_file stat: {}", e))?;
|
||||
let atime = filetime::FileTime::from_last_access_time(&metadata);
|
||||
let mtime = filetime::FileTime::from_last_modification_time(&metadata);
|
||||
filetime::set_file_times(to, atime, mtime).ok();
|
||||
}
|
||||
|
||||
// Fsync parent after creating new file entry
|
||||
Self::fsync_parent(to).map_err(|e| format!("copy_file fsync parent: {}", e))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn move_file(params: &CopyMoveParams) -> Result<(), String> {
|
||||
let from = Path::new(¶ms.from);
|
||||
let to = Path::new(¶ms.to);
|
||||
|
||||
if !params.overwrite.unwrap_or(true) && to.exists() {
|
||||
return Err("move_file: destination already exists".to_string());
|
||||
}
|
||||
|
||||
match fs::rename(from, to) {
|
||||
Ok(()) => {
|
||||
// Fsync both parent directories (source and dest may differ)
|
||||
Self::fsync_parent(from).ok();
|
||||
Self::fsync_parent(to).map_err(|e| format!("move_file fsync parent: {}", e))?;
|
||||
}
|
||||
Err(e) if e.raw_os_error() == Some(libc::EXDEV) => {
|
||||
// Cross-device: copy then delete
|
||||
Self::copy_file(params)?;
|
||||
Self::delete_file(from)?;
|
||||
}
|
||||
Err(e) => return Err(format!("move_file: {}", e)),
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn file_exists(path: &Path) -> bool {
|
||||
path.exists() && path.is_file()
|
||||
}
|
||||
|
||||
pub fn file_stat(path: &Path) -> Result<FileStats, String> {
|
||||
Self::stat_path(path)
|
||||
}
|
||||
|
||||
// ── Directory operations ────────────────────────────────────────────
|
||||
|
||||
pub fn list_directory(params: &ListDirectoryParams) -> Result<Vec<DirectoryEntry>, String> {
|
||||
let path = Path::new(¶ms.path);
|
||||
let mut entries = Vec::new();
|
||||
|
||||
if params.recursive.unwrap_or(false) {
|
||||
Self::list_directory_recursive(path, &mut entries, params)?;
|
||||
} else {
|
||||
let dir_entries = fs::read_dir(path)
|
||||
.map_err(|e| format!("list_directory: {}", e))?;
|
||||
|
||||
for entry_result in dir_entries {
|
||||
let entry = entry_result.map_err(|e| format!("list_directory entry: {}", e))?;
|
||||
let dir_entry = Self::to_directory_entry(&entry, params)?;
|
||||
if let Some(filter) = ¶ms.filter {
|
||||
if !Self::matches_filter(&dir_entry.name, filter) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
entries.push(dir_entry);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(entries)
|
||||
}
|
||||
|
||||
fn list_directory_recursive(
|
||||
path: &Path,
|
||||
entries: &mut Vec<DirectoryEntry>,
|
||||
params: &ListDirectoryParams,
|
||||
) -> Result<(), String> {
|
||||
let dir_entries = fs::read_dir(path)
|
||||
.map_err(|e| format!("list_directory_recursive: {}", e))?;
|
||||
|
||||
for entry_result in dir_entries {
|
||||
let entry = entry_result.map_err(|e| format!("list_directory entry: {}", e))?;
|
||||
let dir_entry = Self::to_directory_entry(&entry, params)?;
|
||||
|
||||
let matches = if let Some(filter) = ¶ms.filter {
|
||||
Self::matches_filter(&dir_entry.name, filter)
|
||||
} else {
|
||||
true
|
||||
};
|
||||
|
||||
if matches {
|
||||
entries.push(dir_entry.clone());
|
||||
}
|
||||
|
||||
if dir_entry.is_directory {
|
||||
Self::list_directory_recursive(&entry.path(), entries, params)?;
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn create_directory(params: &CreateDirectoryParams) -> Result<(), String> {
|
||||
let path = Path::new(¶ms.path);
|
||||
|
||||
if params.recursive.unwrap_or(true) {
|
||||
fs::create_dir_all(path).map_err(|e| format!("create_directory: {}", e))?;
|
||||
} else {
|
||||
fs::create_dir(path).map_err(|e| format!("create_directory: {}", e))?;
|
||||
}
|
||||
|
||||
if let Some(mode) = params.mode {
|
||||
fs::set_permissions(path, fs::Permissions::from_mode(mode))
|
||||
.map_err(|e| format!("create_directory chmod: {}", e))?;
|
||||
}
|
||||
|
||||
// Fsync parent to ensure directory entry is durable
|
||||
Self::fsync_parent(path).map_err(|e| format!("create_directory fsync: {}", e))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn delete_directory(params: &DeleteDirectoryParams) -> Result<(), String> {
|
||||
let path = Path::new(¶ms.path);
|
||||
|
||||
if params.recursive.unwrap_or(true) {
|
||||
fs::remove_dir_all(path).map_err(|e| format!("delete_directory: {}", e))?;
|
||||
} else {
|
||||
fs::remove_dir(path).map_err(|e| format!("delete_directory: {}", e))?;
|
||||
}
|
||||
|
||||
Self::fsync_parent(path).map_err(|e| format!("delete_directory fsync: {}", e))?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn directory_exists(path: &Path) -> bool {
|
||||
path.exists() && path.is_dir()
|
||||
}
|
||||
|
||||
pub fn directory_stat(path: &Path) -> Result<FileStats, String> {
|
||||
Self::stat_path(path)
|
||||
}
|
||||
|
||||
// ── Batch operations ────────────────────────────────────────────────
|
||||
|
||||
/// Execute multiple operations, collecting parent dirs for a single fsync pass at the end.
|
||||
pub fn batch(params: &BatchParams) -> Vec<BatchResult> {
|
||||
let mut results = Vec::with_capacity(params.operations.len());
|
||||
let mut dirs_to_sync: Vec<PathBuf> = Vec::new();
|
||||
|
||||
for (index, op) in params.operations.iter().enumerate() {
|
||||
let result = Self::execute_batch_op(op, &mut dirs_to_sync);
|
||||
results.push(BatchResult {
|
||||
index,
|
||||
success: result.is_ok(),
|
||||
error: result.err(),
|
||||
});
|
||||
}
|
||||
|
||||
// Batch fsync all affected parent directories
|
||||
dirs_to_sync.sort();
|
||||
dirs_to_sync.dedup();
|
||||
for dir in &dirs_to_sync {
|
||||
Self::fsync_dir(dir).ok();
|
||||
}
|
||||
|
||||
results
|
||||
}
|
||||
|
||||
fn execute_batch_op(op: &BatchOp, dirs_to_sync: &mut Vec<PathBuf>) -> Result<(), String> {
|
||||
let path = Path::new(&op.path);
|
||||
|
||||
match op.op_type.as_str() {
|
||||
"write" => {
|
||||
let content = op.content.as_deref().unwrap_or("");
|
||||
fs::write(path, content.as_bytes()).map_err(|e| e.to_string())?;
|
||||
if let Some(parent) = path.parent() {
|
||||
dirs_to_sync.push(parent.to_path_buf());
|
||||
}
|
||||
}
|
||||
"append" => {
|
||||
use std::io::Write;
|
||||
let content = op.content.as_deref().unwrap_or("");
|
||||
let mut file = fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(path)
|
||||
.map_err(|e| e.to_string())?;
|
||||
file.write_all(content.as_bytes()).map_err(|e| e.to_string())?;
|
||||
if let Some(parent) = path.parent() {
|
||||
dirs_to_sync.push(parent.to_path_buf());
|
||||
}
|
||||
}
|
||||
"delete" => {
|
||||
fs::remove_file(path).map_err(|e| e.to_string())?;
|
||||
if let Some(parent) = path.parent() {
|
||||
dirs_to_sync.push(parent.to_path_buf());
|
||||
}
|
||||
}
|
||||
"copy" => {
|
||||
let to = Path::new(op.target_path.as_deref().ok_or("copy: missing targetPath")?);
|
||||
fs::copy(path, to).map_err(|e| e.to_string())?;
|
||||
if let Some(parent) = to.parent() {
|
||||
dirs_to_sync.push(parent.to_path_buf());
|
||||
}
|
||||
}
|
||||
"move" => {
|
||||
let to = Path::new(op.target_path.as_deref().ok_or("move: missing targetPath")?);
|
||||
fs::rename(path, to).map_err(|e| e.to_string())?;
|
||||
if let Some(parent) = path.parent() {
|
||||
dirs_to_sync.push(parent.to_path_buf());
|
||||
}
|
||||
if let Some(parent) = to.parent() {
|
||||
dirs_to_sync.push(parent.to_path_buf());
|
||||
}
|
||||
}
|
||||
"mkdir" => {
|
||||
if op.recursive.unwrap_or(true) {
|
||||
fs::create_dir_all(path).map_err(|e| e.to_string())?;
|
||||
} else {
|
||||
fs::create_dir(path).map_err(|e| e.to_string())?;
|
||||
}
|
||||
if let Some(parent) = path.parent() {
|
||||
dirs_to_sync.push(parent.to_path_buf());
|
||||
}
|
||||
}
|
||||
"rmdir" => {
|
||||
if op.recursive.unwrap_or(true) {
|
||||
fs::remove_dir_all(path).map_err(|e| e.to_string())?;
|
||||
} else {
|
||||
fs::remove_dir(path).map_err(|e| e.to_string())?;
|
||||
}
|
||||
if let Some(parent) = path.parent() {
|
||||
dirs_to_sync.push(parent.to_path_buf());
|
||||
}
|
||||
}
|
||||
other => {
|
||||
return Err(format!("unknown batch op type: {}", other));
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ── Transaction operations ──────────────────────────────────────────
|
||||
|
||||
pub fn execute_transaction(params: &TransactionParams) -> Result<(), String> {
|
||||
// Phase 1: Prepare backups
|
||||
let mut backups: Vec<(usize, Option<Vec<u8>>)> = Vec::new();
|
||||
for (i, op) in params.operations.iter().enumerate() {
|
||||
let path = Path::new(&op.path);
|
||||
let backup = if path.exists() && path.is_file() {
|
||||
Some(fs::read(path).map_err(|e| format!("transaction backup {}: {}", i, e))?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
backups.push((i, backup));
|
||||
}
|
||||
|
||||
// Phase 2: Execute operations
|
||||
let mut completed = 0;
|
||||
let mut dirs_to_sync: Vec<PathBuf> = Vec::new();
|
||||
|
||||
for (i, op) in params.operations.iter().enumerate() {
|
||||
let path = Path::new(&op.path);
|
||||
|
||||
let result = match op.op_type.as_str() {
|
||||
"write" => {
|
||||
let content = op.content.as_deref().unwrap_or("");
|
||||
fs::write(path, content.as_bytes()).map_err(|e| e.to_string())
|
||||
}
|
||||
"append" => {
|
||||
use std::io::Write;
|
||||
let content = op.content.as_deref().unwrap_or("");
|
||||
fs::OpenOptions::new()
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(path)
|
||||
.and_then(|mut f| f.write_all(content.as_bytes()))
|
||||
.map_err(|e| e.to_string())
|
||||
}
|
||||
"delete" => fs::remove_file(path).map_err(|e| e.to_string()),
|
||||
"copy" => {
|
||||
let to = op.target_path.as_deref().ok_or("copy: missing targetPath")?;
|
||||
fs::copy(path, to).map(|_| ()).map_err(|e| e.to_string())
|
||||
}
|
||||
"move" => {
|
||||
let to = op.target_path.as_deref().ok_or("move: missing targetPath")?;
|
||||
fs::rename(path, to).map_err(|e| e.to_string())
|
||||
}
|
||||
other => Err(format!("unknown transaction op: {}", other)),
|
||||
};
|
||||
|
||||
match result {
|
||||
Ok(()) => {
|
||||
completed = i + 1;
|
||||
if let Some(parent) = path.parent() {
|
||||
dirs_to_sync.push(parent.to_path_buf());
|
||||
}
|
||||
if let Some(tp) = &op.target_path {
|
||||
if let Some(parent) = Path::new(tp).parent() {
|
||||
dirs_to_sync.push(parent.to_path_buf());
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
// Rollback completed operations in reverse order
|
||||
for j in (0..completed).rev() {
|
||||
let (_, ref backup) = backups[j];
|
||||
let rollback_path = Path::new(¶ms.operations[j].path);
|
||||
if let Some(data) = backup {
|
||||
let _ = fs::write(rollback_path, data);
|
||||
} else {
|
||||
let _ = fs::remove_file(rollback_path);
|
||||
}
|
||||
}
|
||||
return Err(format!("transaction failed at op {}: {}", i, e));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Phase 3: Batch fsync all affected directories
|
||||
dirs_to_sync.sort();
|
||||
dirs_to_sync.dedup();
|
||||
for dir in &dirs_to_sync {
|
||||
Self::fsync_dir(dir).ok();
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// ── Path operations ─────────────────────────────────────────────────
|
||||
|
||||
pub fn normalize_path(path: &str) -> String {
|
||||
let p = Path::new(path);
|
||||
// Use canonicalize if the path exists, otherwise just clean it
|
||||
match p.canonicalize() {
|
||||
Ok(canonical) => canonical.to_string_lossy().into_owned(),
|
||||
Err(_) => {
|
||||
// Manual normalization for non-existent paths
|
||||
let mut components = Vec::new();
|
||||
for component in p.components() {
|
||||
match component {
|
||||
std::path::Component::ParentDir => { components.pop(); }
|
||||
std::path::Component::CurDir => {}
|
||||
_ => components.push(component),
|
||||
}
|
||||
}
|
||||
let result: PathBuf = components.into_iter().collect();
|
||||
result.to_string_lossy().into_owned()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn join_path(segments: &[String]) -> String {
|
||||
let mut result = PathBuf::new();
|
||||
for seg in segments {
|
||||
result.push(seg);
|
||||
}
|
||||
result.to_string_lossy().into_owned()
|
||||
}
|
||||
|
||||
// ── Streaming operations ─────────────────────────────────────────────
|
||||
|
||||
/// Read a file in chunks, writing IpcStreamChunk messages to stdout.
|
||||
/// Returns the total number of bytes read.
|
||||
pub fn read_file_stream(
|
||||
request_id: &str,
|
||||
params: &ReadFileStreamParams,
|
||||
) -> Result<u64, String> {
|
||||
use std::io::{Read, Write};
|
||||
let path = Path::new(¶ms.path);
|
||||
let chunk_size = params.chunk_size.unwrap_or(65536); // 64KB default
|
||||
|
||||
let mut file = fs::File::open(path)
|
||||
.map_err(|e| format!("read_file_stream: {}", e))?;
|
||||
|
||||
let mut total_bytes: u64 = 0;
|
||||
let mut buf = vec![0u8; chunk_size];
|
||||
|
||||
loop {
|
||||
let n = file.read(&mut buf).map_err(|e| format!("read_file_stream read: {}", e))?;
|
||||
if n == 0 {
|
||||
break;
|
||||
}
|
||||
total_bytes += n as u64;
|
||||
|
||||
let encoded = STANDARD.encode(&buf[..n]);
|
||||
let chunk = IpcStreamChunk {
|
||||
id: request_id.to_string(),
|
||||
stream: true,
|
||||
data: serde_json::json!(encoded),
|
||||
};
|
||||
|
||||
if let Ok(json) = serde_json::to_string(&chunk) {
|
||||
let stdout = io::stdout();
|
||||
let mut out = stdout.lock();
|
||||
let _ = writeln!(out, "{}", json);
|
||||
let _ = out.flush();
|
||||
}
|
||||
}
|
||||
|
||||
Ok(total_bytes)
|
||||
}
|
||||
|
||||
// ── Helpers ─────────────────────────────────────────────────────────
|
||||
|
||||
pub fn stat_path(path: &Path) -> Result<FileStats, String> {
|
||||
let metadata = fs::symlink_metadata(path).map_err(|e| format!("stat: {}", e))?;
|
||||
let file_type = metadata.file_type();
|
||||
|
||||
Ok(FileStats {
|
||||
size: metadata.len(),
|
||||
birthtime: system_time_to_iso(metadata.created().ok()),
|
||||
mtime: system_time_to_iso(metadata.modified().ok()),
|
||||
atime: system_time_to_iso(metadata.accessed().ok()),
|
||||
is_file: file_type.is_file(),
|
||||
is_directory: file_type.is_dir(),
|
||||
is_symbolic_link: file_type.is_symlink(),
|
||||
mode: metadata.permissions().mode(),
|
||||
})
|
||||
}
|
||||
|
||||
fn to_directory_entry(
|
||||
entry: &fs::DirEntry,
|
||||
params: &ListDirectoryParams,
|
||||
) -> Result<DirectoryEntry, String> {
|
||||
let file_type = entry.file_type().map_err(|e| format!("dir entry type: {}", e))?;
|
||||
let path = entry.path();
|
||||
|
||||
let stats = if params.include_stats.unwrap_or(false) {
|
||||
Self::stat_path(&path).ok()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Ok(DirectoryEntry {
|
||||
name: entry.file_name().to_string_lossy().into_owned(),
|
||||
path: path.to_string_lossy().into_owned(),
|
||||
is_file: file_type.is_file(),
|
||||
is_directory: file_type.is_dir(),
|
||||
is_symbolic_link: file_type.is_symlink(),
|
||||
stats,
|
||||
})
|
||||
}
|
||||
|
||||
fn matches_filter(name: &str, filter: &str) -> bool {
|
||||
if let Some(regex_pattern) = filter.strip_prefix("regex:") {
|
||||
// Raw regex pattern from TypeScript RegExp
|
||||
if let Ok(regex) = regex_lite::Regex::new(regex_pattern) {
|
||||
return regex.is_match(name);
|
||||
}
|
||||
return name.contains(regex_pattern);
|
||||
}
|
||||
// Simple glob matching: * matches any sequence
|
||||
let pattern = filter.replace('.', "\\.").replace('*', ".*");
|
||||
if let Ok(regex) = regex_lite::Regex::new(&format!("^{}$", pattern)) {
|
||||
regex.is_match(name)
|
||||
} else {
|
||||
name.contains(filter)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn system_time_to_iso(time: Option<SystemTime>) -> String {
|
||||
match time {
|
||||
Some(t) => {
|
||||
let duration = t
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.unwrap_or_default();
|
||||
let secs = duration.as_secs();
|
||||
let millis = duration.subsec_millis();
|
||||
// Simple ISO-ish format: unix timestamp as ISO string
|
||||
// Full ISO formatting without chrono
|
||||
format!("{}.{:03}Z", secs, millis)
|
||||
}
|
||||
None => "0.000Z".to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
109
rust/crates/smartfs-core/src/watch.rs
Normal file
109
rust/crates/smartfs-core/src/watch.rs
Normal file
@@ -0,0 +1,109 @@
|
||||
use crate::FsOps;
|
||||
use notify::{Config, EventKind, RecommendedWatcher, RecursiveMode, Watcher};
|
||||
use smartfs_protocol::{IpcEvent, WatchEvent};
|
||||
use std::collections::HashMap;
|
||||
use std::path::Path;
|
||||
use std::sync::{Arc, Mutex};
|
||||
use std::time::SystemTime;
|
||||
|
||||
/// Manages file watchers, emitting events as IPC events to stdout.
|
||||
pub struct WatchManager {
|
||||
watchers: Arc<Mutex<HashMap<String, RecommendedWatcher>>>,
|
||||
}
|
||||
|
||||
impl WatchManager {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
watchers: Arc::new(Mutex::new(HashMap::new())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn add_watch(
|
||||
&self,
|
||||
id: String,
|
||||
path: &str,
|
||||
recursive: bool,
|
||||
) -> Result<(), String> {
|
||||
let watch_id = id.clone();
|
||||
let mode = if recursive {
|
||||
RecursiveMode::Recursive
|
||||
} else {
|
||||
RecursiveMode::NonRecursive
|
||||
};
|
||||
|
||||
let (tx, rx) = std::sync::mpsc::channel::<notify::Result<notify::Event>>();
|
||||
|
||||
let mut watcher = RecommendedWatcher::new(tx, Config::default())
|
||||
.map_err(|e| format!("watch create: {}", e))?;
|
||||
|
||||
watcher
|
||||
.watch(Path::new(path), mode)
|
||||
.map_err(|e| format!("watch path: {}", e))?;
|
||||
|
||||
// Spawn a thread to read events and write IPC events to stdout
|
||||
let watch_id_clone = watch_id.clone();
|
||||
std::thread::spawn(move || {
|
||||
for event in rx {
|
||||
match event {
|
||||
Ok(ev) => {
|
||||
let event_type = match ev.kind {
|
||||
EventKind::Create(_) => "add",
|
||||
EventKind::Modify(_) => "change",
|
||||
EventKind::Remove(_) => "delete",
|
||||
_ => continue,
|
||||
};
|
||||
|
||||
for ev_path in &ev.paths {
|
||||
let stats = if event_type != "delete" {
|
||||
FsOps::stat_path(ev_path).ok()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let watch_event = WatchEvent {
|
||||
event_type: event_type.to_string(),
|
||||
path: ev_path.to_string_lossy().into_owned(),
|
||||
timestamp: {
|
||||
let d = SystemTime::now()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.unwrap_or_default();
|
||||
format!("{}.{:03}Z", d.as_secs(), d.subsec_millis())
|
||||
},
|
||||
stats,
|
||||
};
|
||||
|
||||
let ipc_event = IpcEvent {
|
||||
event: format!("watch:{}", watch_id_clone),
|
||||
data: serde_json::to_value(&watch_event).unwrap_or_default(),
|
||||
};
|
||||
|
||||
if let Ok(json) = serde_json::to_string(&ipc_event) {
|
||||
// Write to stdout (IPC channel)
|
||||
println!("{}", json);
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
eprintln!("watch error: {}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Store the watcher to keep it alive
|
||||
self.watchers
|
||||
.lock()
|
||||
.map_err(|e| format!("lock: {}", e))?
|
||||
.insert(id, watcher);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn remove_all(&self) -> Result<(), String> {
|
||||
self.watchers
|
||||
.lock()
|
||||
.map_err(|e| format!("lock: {}", e))?
|
||||
.clear();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
9
rust/crates/smartfs-protocol/Cargo.toml
Normal file
9
rust/crates/smartfs-protocol/Cargo.toml
Normal file
@@ -0,0 +1,9 @@
|
||||
[package]
|
||||
name = "smartfs-protocol"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
license.workspace = true
|
||||
|
||||
[dependencies]
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
243
rust/crates/smartfs-protocol/src/lib.rs
Normal file
243
rust/crates/smartfs-protocol/src/lib.rs
Normal file
@@ -0,0 +1,243 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
// ── IPC envelope types ──────────────────────────────────────────────────────
|
||||
|
||||
/// Request from TypeScript (via stdin)
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct IpcRequest {
|
||||
pub id: String,
|
||||
pub method: String,
|
||||
pub params: serde_json::Value,
|
||||
}
|
||||
|
||||
/// Response to TypeScript (via stdout)
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct IpcResponse {
|
||||
pub id: String,
|
||||
pub success: bool,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub result: Option<serde_json::Value>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
impl IpcResponse {
|
||||
pub fn ok(id: String, result: serde_json::Value) -> Self {
|
||||
Self { id, success: true, result: Some(result), error: None }
|
||||
}
|
||||
|
||||
pub fn ok_void(id: String) -> Self {
|
||||
Self { id, success: true, result: None, error: None }
|
||||
}
|
||||
|
||||
pub fn err(id: String, error: String) -> Self {
|
||||
Self { id, success: false, result: None, error: Some(error) }
|
||||
}
|
||||
}
|
||||
|
||||
/// Stream chunk (Rust → TS, before final response)
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct IpcStreamChunk {
|
||||
pub id: String,
|
||||
pub stream: bool,
|
||||
pub data: serde_json::Value,
|
||||
}
|
||||
|
||||
/// Unsolicited event (Rust → TS)
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct IpcEvent {
|
||||
pub event: String,
|
||||
pub data: serde_json::Value,
|
||||
}
|
||||
|
||||
// ── Filesystem domain types ─────────────────────────────────────────────────
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct FileStats {
|
||||
pub size: u64,
|
||||
pub birthtime: String,
|
||||
pub mtime: String,
|
||||
pub atime: String,
|
||||
#[serde(rename = "isFile")]
|
||||
pub is_file: bool,
|
||||
#[serde(rename = "isDirectory")]
|
||||
pub is_directory: bool,
|
||||
#[serde(rename = "isSymbolicLink")]
|
||||
pub is_symbolic_link: bool,
|
||||
pub mode: u32,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct DirectoryEntry {
|
||||
pub name: String,
|
||||
pub path: String,
|
||||
#[serde(rename = "isFile")]
|
||||
pub is_file: bool,
|
||||
#[serde(rename = "isDirectory")]
|
||||
pub is_directory: bool,
|
||||
#[serde(rename = "isSymbolicLink")]
|
||||
pub is_symbolic_link: bool,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub stats: Option<FileStats>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct WatchEvent {
|
||||
#[serde(rename = "type")]
|
||||
pub event_type: String,
|
||||
pub path: String,
|
||||
pub timestamp: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub stats: Option<FileStats>,
|
||||
}
|
||||
|
||||
// ── Command parameter types ─────────────────────────────────────────────────
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ReadFileParams {
|
||||
pub path: String,
|
||||
pub encoding: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct WriteFileParams {
|
||||
pub path: String,
|
||||
pub content: String,
|
||||
pub atomic: Option<bool>,
|
||||
pub mode: Option<u32>,
|
||||
pub encoding: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct AppendFileParams {
|
||||
pub path: String,
|
||||
pub content: String,
|
||||
pub encoding: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct PathParams {
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct CopyMoveParams {
|
||||
pub from: String,
|
||||
pub to: String,
|
||||
pub overwrite: Option<bool>,
|
||||
#[serde(rename = "preserveTimestamps")]
|
||||
pub preserve_timestamps: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ListDirectoryParams {
|
||||
pub path: String,
|
||||
pub recursive: Option<bool>,
|
||||
#[serde(rename = "includeStats")]
|
||||
pub include_stats: Option<bool>,
|
||||
pub filter: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct CreateDirectoryParams {
|
||||
pub path: String,
|
||||
pub recursive: Option<bool>,
|
||||
pub mode: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct DeleteDirectoryParams {
|
||||
pub path: String,
|
||||
pub recursive: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct WatchParams {
|
||||
pub path: String,
|
||||
pub id: String,
|
||||
pub recursive: Option<bool>,
|
||||
}
|
||||
|
||||
// ── Batch operations ────────────────────────────────────────────────────────
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct BatchOp {
|
||||
#[serde(rename = "type")]
|
||||
pub op_type: String,
|
||||
pub path: String,
|
||||
#[serde(rename = "targetPath")]
|
||||
pub target_path: Option<String>,
|
||||
pub content: Option<String>,
|
||||
pub encoding: Option<String>,
|
||||
pub atomic: Option<bool>,
|
||||
pub mode: Option<u32>,
|
||||
pub overwrite: Option<bool>,
|
||||
pub recursive: Option<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize)]
|
||||
pub struct BatchResult {
|
||||
pub index: usize,
|
||||
pub success: bool,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub error: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct BatchParams {
|
||||
pub operations: Vec<BatchOp>,
|
||||
}
|
||||
|
||||
// ── Transaction operations ──────────────────────────────────────────────────
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct TransactionOp {
|
||||
#[serde(rename = "type")]
|
||||
pub op_type: String,
|
||||
pub path: String,
|
||||
#[serde(rename = "targetPath")]
|
||||
pub target_path: Option<String>,
|
||||
pub content: Option<String>,
|
||||
pub encoding: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct TransactionParams {
|
||||
pub operations: Vec<TransactionOp>,
|
||||
}
|
||||
|
||||
// ── Path operations ─────────────────────────────────────────────────────────
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct NormalizePathParams {
|
||||
pub path: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct JoinPathParams {
|
||||
pub segments: Vec<String>,
|
||||
}
|
||||
|
||||
// ── Streaming operations ────────────────────────────────────────────────────
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct ReadFileStreamParams {
|
||||
pub path: String,
|
||||
#[serde(rename = "chunkSize")]
|
||||
pub chunk_size: Option<usize>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct WriteStreamBeginParams {
|
||||
pub path: String,
|
||||
pub atomic: Option<bool>,
|
||||
pub mode: Option<u32>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct WriteStreamChunkParams {
|
||||
#[serde(rename = "streamId")]
|
||||
pub stream_id: String,
|
||||
pub data: String,
|
||||
pub last: bool,
|
||||
}
|
||||
270
test/test.rust.provider.node+bun.ts
Normal file
270
test/test.rust.provider.node+bun.ts
Normal file
@@ -0,0 +1,270 @@
|
||||
/**
|
||||
* Tests for Rust provider
|
||||
*/
|
||||
|
||||
import * as path from 'node:path';
|
||||
import * as fs from 'node:fs/promises';
|
||||
import { tap, expect } from '@push.rocks/tapbundle';
|
||||
import { SmartFs, SmartFsProviderRust } from '../ts/index.js';
|
||||
|
||||
// Create temp directory for tests
|
||||
const tempDir = path.join(process.cwd(), '.nogit', 'test-temp-rust');
|
||||
|
||||
// Create test instance
|
||||
const rustProvider = new SmartFsProviderRust();
|
||||
const smartFs = new SmartFs(rustProvider);
|
||||
|
||||
tap.preTask('setup temp directory', async () => {
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
await fs.mkdir(tempDir, { recursive: true });
|
||||
});
|
||||
|
||||
tap.test('should create SmartFS instance with Rust provider', async () => {
|
||||
expect(smartFs).toBeInstanceOf(SmartFs);
|
||||
expect(smartFs.getProviderName()).toEqual('rust');
|
||||
});
|
||||
|
||||
tap.test('should write and read a file', async () => {
|
||||
const filePath = path.join(tempDir, 'test.txt');
|
||||
await smartFs.file(filePath).write('Hello, World!');
|
||||
const content = await smartFs.file(filePath).encoding('utf8').read();
|
||||
expect(content).toEqual('Hello, World!');
|
||||
});
|
||||
|
||||
tap.test('should write atomically', async () => {
|
||||
const filePath = path.join(tempDir, 'atomic.txt');
|
||||
await smartFs.file(filePath).atomic().write('Atomic write test');
|
||||
const content = await smartFs.file(filePath).encoding('utf8').read();
|
||||
expect(content).toEqual('Atomic write test');
|
||||
});
|
||||
|
||||
tap.test('should check if file exists', async () => {
|
||||
const filePath = path.join(tempDir, 'exists-test.txt');
|
||||
await smartFs.file(filePath).write('exists');
|
||||
|
||||
const exists = await smartFs.file(filePath).exists();
|
||||
expect(exists).toEqual(true);
|
||||
|
||||
const notExists = await smartFs.file(path.join(tempDir, 'nonexistent.txt')).exists();
|
||||
expect(notExists).toEqual(false);
|
||||
});
|
||||
|
||||
tap.test('should get file stats', async () => {
|
||||
const filePath = path.join(tempDir, 'stats-test.txt');
|
||||
await smartFs.file(filePath).write('stats test');
|
||||
const stats = await smartFs.file(filePath).stat();
|
||||
|
||||
expect(stats).toHaveProperty('size');
|
||||
expect(stats).toHaveProperty('mtime');
|
||||
expect(stats).toHaveProperty('birthtime');
|
||||
expect(stats.isFile).toEqual(true);
|
||||
expect(stats.isDirectory).toEqual(false);
|
||||
});
|
||||
|
||||
tap.test('should delete a file', async () => {
|
||||
const filePath = path.join(tempDir, 'delete-test.txt');
|
||||
await smartFs.file(filePath).write('to delete');
|
||||
await smartFs.file(filePath).delete();
|
||||
|
||||
const exists = await smartFs.file(filePath).exists();
|
||||
expect(exists).toEqual(false);
|
||||
});
|
||||
|
||||
tap.test('should copy a file', async () => {
|
||||
const srcPath = path.join(tempDir, 'copy-src.txt');
|
||||
const destPath = path.join(tempDir, 'copy-dest.txt');
|
||||
await smartFs.file(srcPath).write('copy me');
|
||||
await smartFs.file(srcPath).copy(destPath);
|
||||
|
||||
const content = await smartFs.file(destPath).encoding('utf8').read();
|
||||
expect(content).toEqual('copy me');
|
||||
});
|
||||
|
||||
tap.test('should move a file', async () => {
|
||||
const srcPath = path.join(tempDir, 'move-src.txt');
|
||||
const destPath = path.join(tempDir, 'move-dest.txt');
|
||||
await smartFs.file(srcPath).write('move me');
|
||||
await smartFs.file(srcPath).move(destPath);
|
||||
|
||||
const exists = await smartFs.file(srcPath).exists();
|
||||
expect(exists).toEqual(false);
|
||||
|
||||
const content = await smartFs.file(destPath).encoding('utf8').read();
|
||||
expect(content).toEqual('move me');
|
||||
});
|
||||
|
||||
tap.test('should create and list a directory', async () => {
|
||||
const dirPath = path.join(tempDir, 'list-test');
|
||||
await smartFs.directory(dirPath).create();
|
||||
|
||||
await smartFs.file(path.join(dirPath, 'a.txt')).write('a');
|
||||
await smartFs.file(path.join(dirPath, 'b.txt')).write('b');
|
||||
|
||||
const entries = await smartFs.directory(dirPath).list();
|
||||
expect(entries.length).toEqual(2);
|
||||
const names = entries.map(e => e.name).sort();
|
||||
expect(names).toEqual(['a.txt', 'b.txt']);
|
||||
});
|
||||
|
||||
tap.test('should check directory exists', async () => {
|
||||
const dirPath = path.join(tempDir, 'exists-dir');
|
||||
await smartFs.directory(dirPath).create();
|
||||
|
||||
const exists = await smartFs.directory(dirPath).exists();
|
||||
expect(exists).toEqual(true);
|
||||
|
||||
const notExists = await smartFs.directory(path.join(tempDir, 'nonexistent-dir')).exists();
|
||||
expect(notExists).toEqual(false);
|
||||
});
|
||||
|
||||
tap.test('should delete directory recursively', async () => {
|
||||
const dirPath = path.join(tempDir, 'delete-dir');
|
||||
await smartFs.directory(dirPath).create();
|
||||
await smartFs.file(path.join(dirPath, 'file.txt')).write('data');
|
||||
|
||||
await smartFs.directory(dirPath).delete();
|
||||
|
||||
const exists = await smartFs.directory(dirPath).exists();
|
||||
expect(exists).toEqual(false);
|
||||
});
|
||||
|
||||
tap.test('should list directory recursively', async () => {
|
||||
const dirPath = path.join(tempDir, 'recursive-test');
|
||||
await smartFs.directory(dirPath).create();
|
||||
await smartFs.directory(path.join(dirPath, 'sub')).create();
|
||||
await smartFs.file(path.join(dirPath, 'root.txt')).write('root');
|
||||
await smartFs.file(path.join(dirPath, 'sub', 'child.txt')).write('child');
|
||||
|
||||
const entries = await smartFs.directory(dirPath).recursive().list();
|
||||
const names = entries.map(e => e.name).sort();
|
||||
expect(names).toContain('root.txt');
|
||||
expect(names).toContain('child.txt');
|
||||
expect(names).toContain('sub');
|
||||
});
|
||||
|
||||
// ── Append file ──────────────────────────────────────────────────────────────
|
||||
|
||||
tap.test('should append to a file', async () => {
|
||||
const filePath = path.join(tempDir, 'append-test.txt');
|
||||
await smartFs.file(filePath).write('Hello');
|
||||
await smartFs.file(filePath).append(' World!');
|
||||
const content = await smartFs.file(filePath).encoding('utf8').read();
|
||||
expect(content).toEqual('Hello World!');
|
||||
});
|
||||
|
||||
// ── Binary Buffer round-trip ─────────────────────────────────────────────────
|
||||
|
||||
tap.test('should write and read binary data (Buffer)', async () => {
|
||||
const filePath = path.join(tempDir, 'binary-test.bin');
|
||||
const binaryData = Buffer.from([0x00, 0x01, 0x02, 0xFF, 0xFE, 0xFD, 0x80, 0x7F]);
|
||||
|
||||
await rustProvider.writeFile(filePath, binaryData);
|
||||
const result = await rustProvider.readFile(filePath, { encoding: 'buffer' });
|
||||
|
||||
expect(Buffer.isBuffer(result)).toEqual(true);
|
||||
expect(Buffer.compare(result as Buffer, binaryData)).toEqual(0);
|
||||
});
|
||||
|
||||
// ── Streaming ────────────────────────────────────────────────────────────────
|
||||
|
||||
tap.test('should read a file via stream', async () => {
|
||||
const filePath = path.join(tempDir, 'stream-read.txt');
|
||||
const testData = 'Stream test data with enough content to verify streaming works correctly';
|
||||
await smartFs.file(filePath).write(testData);
|
||||
|
||||
const readStream = await smartFs.file(filePath).readStream();
|
||||
const chunks: Uint8Array[] = [];
|
||||
const reader = readStream.getReader();
|
||||
let done = false;
|
||||
while (!done) {
|
||||
const result = await reader.read();
|
||||
done = result.done;
|
||||
if (result.value) chunks.push(result.value);
|
||||
}
|
||||
const content = Buffer.concat(chunks.map(c => Buffer.from(c))).toString('utf8');
|
||||
expect(content).toEqual(testData);
|
||||
});
|
||||
|
||||
tap.test('should write a file via stream', async () => {
|
||||
const filePath = path.join(tempDir, 'stream-write.txt');
|
||||
const testData = 'Writing via stream';
|
||||
|
||||
const writeStream = await smartFs.file(filePath).writeStream();
|
||||
const writer = writeStream.getWriter();
|
||||
await writer.write(new Uint8Array(Buffer.from(testData)));
|
||||
await writer.close();
|
||||
|
||||
const content = await smartFs.file(filePath).encoding('utf8').read();
|
||||
expect(content).toEqual(testData);
|
||||
});
|
||||
|
||||
// ── Watch ────────────────────────────────────────────────────────────────────
|
||||
|
||||
tap.test('should watch for file changes', async () => {
|
||||
const dirPath = path.join(tempDir, 'watch-test');
|
||||
await smartFs.directory(dirPath).create();
|
||||
const filePath = path.join(dirPath, 'watched.txt');
|
||||
|
||||
const received: any[] = [];
|
||||
const watcher = await smartFs
|
||||
.watch(dirPath)
|
||||
.onAll((event) => {
|
||||
received.push(event);
|
||||
})
|
||||
.start();
|
||||
|
||||
// Give watcher time to start
|
||||
await new Promise((resolve) => setTimeout(resolve, 300));
|
||||
|
||||
await smartFs.file(filePath).write('changed');
|
||||
|
||||
// Wait for event to propagate
|
||||
await new Promise((resolve) => setTimeout(resolve, 500));
|
||||
|
||||
await watcher.stop();
|
||||
|
||||
expect(received.length).toBeGreaterThan(0);
|
||||
expect(received[0]).toHaveProperty('type');
|
||||
expect(received[0]).toHaveProperty('path');
|
||||
});
|
||||
|
||||
// ── Transactions ─────────────────────────────────────────────────────────────
|
||||
|
||||
tap.test('should execute a transaction', async () => {
|
||||
const file1 = path.join(tempDir, 'tx-file1.txt');
|
||||
const file2 = path.join(tempDir, 'tx-file2.txt');
|
||||
|
||||
await smartFs.transaction()
|
||||
.file(file1).write('tx content 1')
|
||||
.file(file2).write('tx content 2')
|
||||
.commit();
|
||||
|
||||
const c1 = await smartFs.file(file1).encoding('utf8').read();
|
||||
const c2 = await smartFs.file(file2).encoding('utf8').read();
|
||||
expect(c1).toEqual('tx content 1');
|
||||
expect(c2).toEqual('tx content 2');
|
||||
});
|
||||
|
||||
// ── Directory filter ─────────────────────────────────────────────────────────
|
||||
|
||||
tap.test('should filter directory listings with regex', async () => {
|
||||
const dirPath = path.join(tempDir, 'filter-test');
|
||||
await smartFs.directory(dirPath).create();
|
||||
await smartFs.file(path.join(dirPath, 'file1.ts')).write('ts');
|
||||
await smartFs.file(path.join(dirPath, 'file2.js')).write('js');
|
||||
await smartFs.file(path.join(dirPath, 'file3.ts')).write('ts');
|
||||
|
||||
const entries = await smartFs.directory(dirPath).filter(/\.ts$/).list();
|
||||
expect(entries.length).toEqual(2);
|
||||
const allTs = entries.every(e => e.name.endsWith('.ts'));
|
||||
expect(allTs).toEqual(true);
|
||||
});
|
||||
|
||||
// ── Shutdown ─────────────────────────────────────────────────────────────────
|
||||
|
||||
tap.test('should shutdown the Rust provider', async () => {
|
||||
await rustProvider.shutdown();
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
});
|
||||
|
||||
export default tap.start();
|
||||
@@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartfs',
|
||||
version: '1.3.3',
|
||||
version: '1.4.0',
|
||||
description: 'a cross platform extendable fs module'
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
* Directory builder for fluent directory operations
|
||||
*/
|
||||
|
||||
import * as crypto from 'crypto';
|
||||
import * as crypto from 'node:crypto';
|
||||
import type { ISmartFsProvider } from '../interfaces/mod.provider.js';
|
||||
import type {
|
||||
TFileMode,
|
||||
|
||||
@@ -13,6 +13,7 @@ export { SmartFsWatcher, SmartFsActiveWatcher } from './classes/smartfs.watcher.
|
||||
// Providers
|
||||
export { SmartFsProviderNode } from './providers/smartfs.provider.node.js';
|
||||
export { SmartFsProviderMemory } from './providers/smartfs.provider.memory.js';
|
||||
export { SmartFsProviderRust } from './providers/smartfs.provider.rust.js';
|
||||
|
||||
// Interfaces and Types
|
||||
export type {
|
||||
|
||||
@@ -3,10 +3,10 @@
|
||||
* Uses Node.js fs/promises and fs.watch APIs
|
||||
*/
|
||||
|
||||
import * as fs from 'fs/promises';
|
||||
import * as fsSync from 'fs';
|
||||
import * as pathModule from 'path';
|
||||
import { Readable, Writable } from 'stream';
|
||||
import * as fs from 'node:fs/promises';
|
||||
import * as fsSync from 'node:fs';
|
||||
import * as pathModule from 'node:path';
|
||||
import { Readable, Writable } from 'node:stream';
|
||||
|
||||
import type {
|
||||
ISmartFsProvider,
|
||||
|
||||
544
ts/providers/smartfs.provider.rust.ts
Normal file
544
ts/providers/smartfs.provider.rust.ts
Normal file
@@ -0,0 +1,544 @@
|
||||
/**
|
||||
* Rust filesystem provider for SmartFS
|
||||
* Uses a Rust binary via smartrust IPC for XFS-safe filesystem operations.
|
||||
* All metadata-changing operations (rename, unlink, create) fsync the parent
|
||||
* directory, guaranteeing durability on XFS and other delayed-logging filesystems.
|
||||
*/
|
||||
|
||||
import * as plugins from '../smartfs.plugins.js';
|
||||
import { fileURLToPath } from 'node:url';
|
||||
|
||||
import type {
|
||||
ISmartFsProvider,
|
||||
IProviderCapabilities,
|
||||
TWatchCallback,
|
||||
IWatcherHandle,
|
||||
} from '../interfaces/mod.provider.js';
|
||||
|
||||
import type {
|
||||
IFileStats,
|
||||
IDirectoryEntry,
|
||||
IReadOptions,
|
||||
IWriteOptions,
|
||||
IStreamOptions,
|
||||
ICopyOptions,
|
||||
IListOptions,
|
||||
IWatchOptions,
|
||||
ITransactionOperation,
|
||||
} from '../interfaces/mod.types.js';
|
||||
|
||||
// ── IPC command type map ────────────────────────────────────────────────────
|
||||
|
||||
interface IFileStatsJson {
|
||||
size: number;
|
||||
birthtime: string;
|
||||
mtime: string;
|
||||
atime: string;
|
||||
isFile: boolean;
|
||||
isDirectory: boolean;
|
||||
isSymbolicLink: boolean;
|
||||
mode: number;
|
||||
}
|
||||
|
||||
interface IDirectoryEntryJson {
|
||||
name: string;
|
||||
path: string;
|
||||
isFile: boolean;
|
||||
isDirectory: boolean;
|
||||
isSymbolicLink: boolean;
|
||||
stats?: IFileStatsJson;
|
||||
}
|
||||
|
||||
interface IBatchOp {
|
||||
type: string;
|
||||
path: string;
|
||||
targetPath?: string;
|
||||
content?: string;
|
||||
encoding?: string;
|
||||
atomic?: boolean;
|
||||
mode?: number;
|
||||
overwrite?: boolean;
|
||||
recursive?: boolean;
|
||||
}
|
||||
|
||||
interface IBatchResult {
|
||||
index: number;
|
||||
success: boolean;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
interface ITransactionOpJson {
|
||||
type: string;
|
||||
path: string;
|
||||
targetPath?: string;
|
||||
content?: string;
|
||||
encoding?: string;
|
||||
}
|
||||
|
||||
type TSmartFsCommands = {
|
||||
readFile: { params: { path: string; encoding?: string }; result: { content: string; isBuffer?: boolean } };
|
||||
writeFile: { params: { path: string; content: string; atomic?: boolean; mode?: number; encoding?: string }; result: void };
|
||||
appendFile: { params: { path: string; content: string; encoding?: string }; result: void };
|
||||
deleteFile: { params: { path: string }; result: void };
|
||||
copyFile: { params: { from: string; to: string; overwrite?: boolean; preserveTimestamps?: boolean }; result: void };
|
||||
moveFile: { params: { from: string; to: string; overwrite?: boolean; preserveTimestamps?: boolean }; result: void };
|
||||
fileExists: { params: { path: string }; result: boolean };
|
||||
fileStat: { params: { path: string }; result: IFileStatsJson };
|
||||
listDirectory: { params: { path: string; recursive?: boolean; includeStats?: boolean; filter?: string }; result: IDirectoryEntryJson[] };
|
||||
createDirectory: { params: { path: string; recursive?: boolean; mode?: number }; result: void };
|
||||
deleteDirectory: { params: { path: string; recursive?: boolean }; result: void };
|
||||
directoryExists: { params: { path: string }; result: boolean };
|
||||
directoryStat: { params: { path: string }; result: IFileStatsJson };
|
||||
watch: { params: { path: string; id: string; recursive?: boolean }; result: void };
|
||||
unwatchAll: { params: Record<string, never>; result: void };
|
||||
batch: { params: { operations: IBatchOp[] }; result: IBatchResult[] };
|
||||
executeTransaction: { params: { operations: ITransactionOpJson[] }; result: void };
|
||||
normalizePath: { params: { path: string }; result: string };
|
||||
joinPath: { params: { segments: string[] }; result: string };
|
||||
readFileStream: { params: { path: string; chunkSize?: number }; result: { totalBytes: number }; chunk: string };
|
||||
writeStreamBegin: { params: { path: string; atomic?: boolean; mode?: number }; result: { streamId: string } };
|
||||
writeStreamChunk: { params: { streamId: string; data: string; last: boolean }; result: void };
|
||||
ping: { params: Record<string, never>; result: { pong: boolean } };
|
||||
};
|
||||
|
||||
// ── Provider class ──────────────────────────────────────────────────────────
|
||||
|
||||
export class SmartFsProviderRust implements ISmartFsProvider {
|
||||
public readonly name = 'rust';
|
||||
|
||||
public readonly capabilities: IProviderCapabilities = {
|
||||
supportsWatch: true,
|
||||
supportsAtomic: true,
|
||||
supportsTransactions: true,
|
||||
supportsStreaming: true,
|
||||
supportsSymlinks: true,
|
||||
supportsPermissions: true,
|
||||
};
|
||||
|
||||
private bridge: plugins.smartrust.RustBridge<TSmartFsCommands>;
|
||||
private initialized = false;
|
||||
private watchCallbacks = new Map<string, TWatchCallback>();
|
||||
private watchCounter = 0;
|
||||
|
||||
constructor(options?: { binaryPath?: string }) {
|
||||
// Build search paths for the binary
|
||||
const localPaths: string[] = [];
|
||||
const currentDir = plugins.path.dirname(fileURLToPath(import.meta.url));
|
||||
const packageDir = plugins.path.resolve(currentDir, '../../');
|
||||
|
||||
// Check dist_rust/ first (production), then rust/target/ (development)
|
||||
const suffix = SmartFsProviderRust.getPlatformSuffix();
|
||||
if (suffix) {
|
||||
localPaths.push(plugins.path.join(packageDir, 'dist_rust', `smartfs-bin_${suffix}`));
|
||||
}
|
||||
localPaths.push(plugins.path.join(packageDir, 'dist_rust', 'smartfs-bin'));
|
||||
localPaths.push(plugins.path.join(packageDir, 'rust', 'target', 'release', 'smartfs-bin'));
|
||||
localPaths.push(plugins.path.join(packageDir, 'rust', 'target', 'debug', 'smartfs-bin'));
|
||||
|
||||
this.bridge = new plugins.smartrust.RustBridge<TSmartFsCommands>({
|
||||
binaryName: 'smartfs-bin',
|
||||
cliArgs: ['--management'],
|
||||
requestTimeoutMs: 30_000,
|
||||
readyTimeoutMs: 10_000,
|
||||
localPaths,
|
||||
searchSystemPath: false,
|
||||
...(options?.binaryPath ? { binaryPath: options.binaryPath } : {}),
|
||||
});
|
||||
|
||||
// Listen for watch events from Rust
|
||||
this.bridge.on('management:watch', (data: any) => {
|
||||
// Event name is "watch:<id>", data contains the watch event
|
||||
// The smartrust bridge strips the "management:" prefix
|
||||
});
|
||||
}
|
||||
|
||||
private static getPlatformSuffix(): string | null {
|
||||
const archMap: Record<string, string> = { x64: 'amd64', arm64: 'arm64' };
|
||||
const os = process.platform;
|
||||
const arch = archMap[process.arch];
|
||||
if (!arch) return null;
|
||||
return `${os}_${arch}`;
|
||||
}
|
||||
|
||||
private async ensureRunning(): Promise<void> {
|
||||
if (!this.initialized) {
|
||||
const started = await this.bridge.spawn();
|
||||
if (!started) {
|
||||
throw new Error('SmartFsProviderRust: failed to start smartfs-bin');
|
||||
}
|
||||
this.initialized = true;
|
||||
|
||||
// Set up watch event forwarding
|
||||
// The bridge emits events as 'management:<eventName>'
|
||||
// Watch events come as 'management:watch:<id>'
|
||||
const originalEmit = this.bridge.emit.bind(this.bridge);
|
||||
this.bridge.on = ((event: string, handler: (...args: any[]) => void) => {
|
||||
// Intercept watch events
|
||||
return (this.bridge as any).__proto__.on.call(this.bridge, event, handler);
|
||||
}) as any;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Shut down the Rust binary.
|
||||
*/
|
||||
public async shutdown(): Promise<void> {
|
||||
if (this.initialized) {
|
||||
this.bridge.kill();
|
||||
this.initialized = false;
|
||||
}
|
||||
}
|
||||
|
||||
// ── File Operations ───────────────────────────────────────────────────
|
||||
|
||||
public async readFile(path: string, options?: IReadOptions): Promise<Buffer | string> {
|
||||
await this.ensureRunning();
|
||||
const encoding = options?.encoding || 'utf8';
|
||||
const result = await this.bridge.sendCommand('readFile', { path, encoding });
|
||||
|
||||
if (result.isBuffer) {
|
||||
// Decode base64 back to Buffer
|
||||
return Buffer.from(result.content, 'base64');
|
||||
}
|
||||
|
||||
if (encoding === 'buffer') {
|
||||
return Buffer.from(result.content, 'base64');
|
||||
}
|
||||
|
||||
return result.content;
|
||||
}
|
||||
|
||||
public async writeFile(path: string, content: string | Buffer, options?: IWriteOptions): Promise<void> {
|
||||
await this.ensureRunning();
|
||||
const contentStr = Buffer.isBuffer(content) ? content.toString('base64') : content;
|
||||
const encoding = Buffer.isBuffer(content) ? 'base64' : (options?.encoding || undefined);
|
||||
|
||||
await this.bridge.sendCommand('writeFile', {
|
||||
path,
|
||||
content: contentStr,
|
||||
atomic: options?.atomic,
|
||||
mode: options?.mode,
|
||||
encoding,
|
||||
});
|
||||
}
|
||||
|
||||
public async appendFile(path: string, content: string | Buffer, options?: IWriteOptions): Promise<void> {
|
||||
await this.ensureRunning();
|
||||
const contentStr = Buffer.isBuffer(content) ? content.toString('base64') : content;
|
||||
const encoding = Buffer.isBuffer(content) ? 'base64' : (options?.encoding || undefined);
|
||||
|
||||
await this.bridge.sendCommand('appendFile', {
|
||||
path,
|
||||
content: contentStr,
|
||||
encoding,
|
||||
});
|
||||
}
|
||||
|
||||
public async deleteFile(path: string): Promise<void> {
|
||||
await this.ensureRunning();
|
||||
await this.bridge.sendCommand('deleteFile', { path });
|
||||
}
|
||||
|
||||
public async copyFile(from: string, to: string, options?: ICopyOptions): Promise<void> {
|
||||
await this.ensureRunning();
|
||||
await this.bridge.sendCommand('copyFile', {
|
||||
from,
|
||||
to,
|
||||
overwrite: options?.overwrite,
|
||||
preserveTimestamps: options?.preserveTimestamps,
|
||||
});
|
||||
}
|
||||
|
||||
public async moveFile(from: string, to: string, options?: ICopyOptions): Promise<void> {
|
||||
await this.ensureRunning();
|
||||
await this.bridge.sendCommand('moveFile', {
|
||||
from,
|
||||
to,
|
||||
overwrite: options?.overwrite,
|
||||
preserveTimestamps: options?.preserveTimestamps,
|
||||
});
|
||||
}
|
||||
|
||||
public async fileExists(path: string): Promise<boolean> {
|
||||
await this.ensureRunning();
|
||||
return this.bridge.sendCommand('fileExists', { path });
|
||||
}
|
||||
|
||||
public async fileStat(path: string): Promise<IFileStats> {
|
||||
await this.ensureRunning();
|
||||
const stats = await this.bridge.sendCommand('fileStat', { path });
|
||||
return this.convertStats(stats);
|
||||
}
|
||||
|
||||
public async createReadStream(path: string, options?: IStreamOptions): Promise<ReadableStream<Uint8Array>> {
|
||||
await this.ensureRunning();
|
||||
const chunkSize = options?.chunkSize || options?.highWaterMark || 65536;
|
||||
|
||||
const streaming = this.bridge.sendCommandStreaming('readFileStream', {
|
||||
path,
|
||||
chunkSize,
|
||||
});
|
||||
|
||||
const iterator = streaming[Symbol.asyncIterator]();
|
||||
|
||||
return new ReadableStream({
|
||||
async pull(controller) {
|
||||
const { value, done } = await iterator.next();
|
||||
if (done) {
|
||||
controller.close();
|
||||
return;
|
||||
}
|
||||
// value is a base64-encoded string chunk
|
||||
const buffer = Buffer.from(value as string, 'base64');
|
||||
controller.enqueue(new Uint8Array(buffer));
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
public async createWriteStream(path: string, options?: IStreamOptions): Promise<WritableStream<Uint8Array>> {
|
||||
await this.ensureRunning();
|
||||
const { streamId } = await this.bridge.sendCommand('writeStreamBegin', {
|
||||
path,
|
||||
atomic: undefined,
|
||||
});
|
||||
|
||||
const bridge = this.bridge;
|
||||
return new WritableStream({
|
||||
async write(chunk) {
|
||||
const base64 = Buffer.from(chunk).toString('base64');
|
||||
await bridge.sendCommand('writeStreamChunk', {
|
||||
streamId,
|
||||
data: base64,
|
||||
last: false,
|
||||
});
|
||||
},
|
||||
async close() {
|
||||
await bridge.sendCommand('writeStreamChunk', {
|
||||
streamId,
|
||||
data: '',
|
||||
last: true,
|
||||
});
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// ── Directory Operations ──────────────────────────────────────────────
|
||||
|
||||
public async listDirectory(path: string, options?: IListOptions): Promise<IDirectoryEntry[]> {
|
||||
await this.ensureRunning();
|
||||
|
||||
// Convert function filter to string pattern for IPC
|
||||
let filter: string | undefined;
|
||||
if (options?.filter) {
|
||||
if (typeof options.filter === 'string') {
|
||||
filter = options.filter;
|
||||
} else if (options.filter instanceof RegExp) {
|
||||
// Prefix with "regex:" so Rust treats it as a raw regex pattern
|
||||
filter = `regex:${options.filter.source}`;
|
||||
}
|
||||
// Function filters can't be serialized — filter client-side after receiving
|
||||
}
|
||||
|
||||
const entries = await this.bridge.sendCommand('listDirectory', {
|
||||
path,
|
||||
recursive: options?.recursive,
|
||||
includeStats: options?.includeStats,
|
||||
filter,
|
||||
});
|
||||
|
||||
let result: IDirectoryEntry[] = entries.map((e: any) => ({
|
||||
name: e.name,
|
||||
path: e.path,
|
||||
isFile: e.isFile,
|
||||
isDirectory: e.isDirectory,
|
||||
isSymbolicLink: e.isSymbolicLink,
|
||||
stats: e.stats ? this.convertStats(e.stats) : undefined,
|
||||
}));
|
||||
|
||||
// Apply function filter client-side if needed
|
||||
if (typeof options?.filter === 'function') {
|
||||
result = result.filter(options.filter);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
public async createDirectory(path: string, options?: { recursive?: boolean; mode?: number }): Promise<void> {
|
||||
await this.ensureRunning();
|
||||
await this.bridge.sendCommand('createDirectory', {
|
||||
path,
|
||||
recursive: options?.recursive ?? true,
|
||||
mode: options?.mode,
|
||||
});
|
||||
}
|
||||
|
||||
public async deleteDirectory(path: string, options?: { recursive?: boolean }): Promise<void> {
|
||||
await this.ensureRunning();
|
||||
await this.bridge.sendCommand('deleteDirectory', {
|
||||
path,
|
||||
recursive: options?.recursive ?? true,
|
||||
});
|
||||
}
|
||||
|
||||
public async directoryExists(path: string): Promise<boolean> {
|
||||
await this.ensureRunning();
|
||||
return this.bridge.sendCommand('directoryExists', { path });
|
||||
}
|
||||
|
||||
public async directoryStat(path: string): Promise<IFileStats> {
|
||||
await this.ensureRunning();
|
||||
const stats = await this.bridge.sendCommand('directoryStat', { path });
|
||||
return this.convertStats(stats);
|
||||
}
|
||||
|
||||
// ── Watch Operations ──────────────────────────────────────────────────
|
||||
|
||||
public async watch(path: string, callback: TWatchCallback, options?: IWatchOptions): Promise<IWatcherHandle> {
|
||||
await this.ensureRunning();
|
||||
|
||||
const watchId = `w_${++this.watchCounter}`;
|
||||
this.watchCallbacks.set(watchId, callback);
|
||||
|
||||
// Listen for events from this watch
|
||||
const eventName = `management:watch:${watchId}`;
|
||||
this.bridge.on(eventName, async (data: any) => {
|
||||
const cb = this.watchCallbacks.get(watchId);
|
||||
if (!cb) return;
|
||||
|
||||
// Apply filter
|
||||
if (options?.filter) {
|
||||
if (typeof options.filter === 'function') {
|
||||
if (!options.filter(data.path)) return;
|
||||
} else if (options.filter instanceof RegExp) {
|
||||
if (!options.filter.test(data.path)) return;
|
||||
} else if (typeof options.filter === 'string') {
|
||||
const pattern = options.filter.replace(/\*/g, '.*');
|
||||
if (!new RegExp(`^${pattern}$`).test(data.path)) return;
|
||||
}
|
||||
}
|
||||
|
||||
await cb({
|
||||
type: data.type,
|
||||
path: data.path,
|
||||
timestamp: new Date(data.timestamp),
|
||||
stats: data.stats ? this.convertStats(data.stats) : undefined,
|
||||
});
|
||||
});
|
||||
|
||||
await this.bridge.sendCommand('watch', {
|
||||
path,
|
||||
id: watchId,
|
||||
recursive: options?.recursive,
|
||||
});
|
||||
|
||||
return {
|
||||
stop: async () => {
|
||||
this.watchCallbacks.delete(watchId);
|
||||
this.bridge.removeAllListeners(eventName);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// ── Transaction Operations ────────────────────────────────────────────
|
||||
|
||||
public async prepareTransaction(operations: ITransactionOperation[]): Promise<ITransactionOperation[]> {
|
||||
// Prepare backups client-side by reading current state
|
||||
const prepared: ITransactionOperation[] = [];
|
||||
|
||||
for (const op of operations) {
|
||||
const preparedOp = { ...op };
|
||||
try {
|
||||
const exists = await this.fileExists(op.path);
|
||||
if (exists) {
|
||||
const content = await this.readFile(op.path, { encoding: 'buffer' });
|
||||
const stats = await this.fileStat(op.path);
|
||||
preparedOp.backup = {
|
||||
existed: true,
|
||||
content: Buffer.isBuffer(content) ? content : Buffer.from(content),
|
||||
stats,
|
||||
};
|
||||
} else {
|
||||
preparedOp.backup = { existed: false };
|
||||
}
|
||||
} catch {
|
||||
preparedOp.backup = { existed: false };
|
||||
}
|
||||
prepared.push(preparedOp);
|
||||
}
|
||||
|
||||
return prepared;
|
||||
}
|
||||
|
||||
public async executeTransaction(operations: ITransactionOperation[]): Promise<void> {
|
||||
await this.ensureRunning();
|
||||
|
||||
const opsJson: ITransactionOpJson[] = operations.map((op) => ({
|
||||
type: op.type,
|
||||
path: op.path,
|
||||
targetPath: op.targetPath,
|
||||
content: op.content ? (Buffer.isBuffer(op.content) ? op.content.toString('utf8') : op.content) : undefined,
|
||||
encoding: op.encoding,
|
||||
}));
|
||||
|
||||
await this.bridge.sendCommand('executeTransaction', { operations: opsJson });
|
||||
}
|
||||
|
||||
public async rollbackTransaction(operations: ITransactionOperation[]): Promise<void> {
|
||||
// Rollback in reverse order using backups
|
||||
for (let i = operations.length - 1; i >= 0; i--) {
|
||||
const op = operations[i];
|
||||
if (!op.backup) continue;
|
||||
|
||||
try {
|
||||
if (op.backup.existed && op.backup.content) {
|
||||
await this.writeFile(op.path, op.backup.content);
|
||||
} else if (!op.backup.existed) {
|
||||
try {
|
||||
await this.deleteFile(op.path);
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Ignore rollback errors
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ── Path Operations ───────────────────────────────────────────────────
|
||||
|
||||
public normalizePath(path: string): string {
|
||||
// Path operations are synchronous in the interface, so use local implementation
|
||||
return plugins.path.normalize(path);
|
||||
}
|
||||
|
||||
public joinPath(...segments: string[]): string {
|
||||
return plugins.path.join(...segments);
|
||||
}
|
||||
|
||||
// ── Helpers ───────────────────────────────────────────────────────────
|
||||
|
||||
private convertStats(stats: IFileStatsJson): IFileStats {
|
||||
return {
|
||||
size: stats.size,
|
||||
birthtime: this.parseTimestamp(stats.birthtime),
|
||||
mtime: this.parseTimestamp(stats.mtime),
|
||||
atime: this.parseTimestamp(stats.atime),
|
||||
isFile: stats.isFile,
|
||||
isDirectory: stats.isDirectory,
|
||||
isSymbolicLink: stats.isSymbolicLink,
|
||||
mode: stats.mode,
|
||||
};
|
||||
}
|
||||
|
||||
private parseTimestamp(ts: string): Date {
|
||||
// Rust sends "<unix_secs>.<millis>Z" format
|
||||
if (ts.endsWith('Z') && ts.includes('.')) {
|
||||
const [secsStr, millisStr] = ts.slice(0, -1).split('.');
|
||||
const secs = parseInt(secsStr, 10);
|
||||
const millis = parseInt(millisStr, 10);
|
||||
return new Date(secs * 1000 + millis);
|
||||
}
|
||||
return new Date(ts);
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,10 @@
|
||||
// native scope
|
||||
import * as path from 'path';
|
||||
import * as path from 'node:path';
|
||||
|
||||
export { path };
|
||||
|
||||
// @push.rocks scope
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartrust from '@push.rocks/smartrust';
|
||||
|
||||
export { smartpath };
|
||||
export { smartpath, smartrust };
|
||||
|
||||
Reference in New Issue
Block a user