initial
This commit is contained in:
66
.gitea/workflows/default_nottags.yaml
Normal file
66
.gitea/workflows/default_nottags.yaml
Normal file
@@ -0,0 +1,66 @@
|
||||
name: Default (not tags)
|
||||
|
||||
on:
|
||||
push:
|
||||
tags-ignore:
|
||||
- '**'
|
||||
|
||||
env:
|
||||
IMAGE: code.foss.global/host.today/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
|
||||
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
|
||||
|
||||
jobs:
|
||||
security:
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Install pnpm and npmci
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @ship.zone/npmci
|
||||
|
||||
- name: Run npm prepare
|
||||
run: npmci npm prepare
|
||||
|
||||
- name: Audit production dependencies
|
||||
run: |
|
||||
npmci command npm config set registry https://registry.npmjs.org
|
||||
npmci command pnpm audit --audit-level=high --prod
|
||||
continue-on-error: true
|
||||
|
||||
- name: Audit development dependencies
|
||||
run: |
|
||||
npmci command npm config set registry https://registry.npmjs.org
|
||||
npmci command pnpm audit --audit-level=high --dev
|
||||
continue-on-error: true
|
||||
|
||||
test:
|
||||
if: ${{ always() }}
|
||||
needs: security
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Test stable
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
npmci npm test
|
||||
|
||||
- name: Test build
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
npmci npm build
|
||||
124
.gitea/workflows/default_tags.yaml
Normal file
124
.gitea/workflows/default_tags.yaml
Normal file
@@ -0,0 +1,124 @@
|
||||
name: Default (tags)
|
||||
|
||||
on:
|
||||
push:
|
||||
tags:
|
||||
- '*'
|
||||
|
||||
env:
|
||||
IMAGE: code.foss.global/host.today/ht-docker-node:npmci
|
||||
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
|
||||
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
|
||||
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
|
||||
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
|
||||
NPMCI_URL_CLOUDLY: ${{secrets.NPMCI_URL_CLOUDLY}}
|
||||
|
||||
jobs:
|
||||
security:
|
||||
runs-on: ubuntu-latest
|
||||
continue-on-error: true
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Audit production dependencies
|
||||
run: |
|
||||
npmci command npm config set registry https://registry.npmjs.org
|
||||
npmci command pnpm audit --audit-level=high --prod
|
||||
continue-on-error: true
|
||||
|
||||
- name: Audit development dependencies
|
||||
run: |
|
||||
npmci command npm config set registry https://registry.npmjs.org
|
||||
npmci command pnpm audit --audit-level=high --dev
|
||||
continue-on-error: true
|
||||
|
||||
test:
|
||||
if: ${{ always() }}
|
||||
needs: security
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Test stable
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
npmci npm test
|
||||
|
||||
- name: Test build
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
npmci npm build
|
||||
|
||||
release:
|
||||
needs: test
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Release
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm publish
|
||||
|
||||
metadata:
|
||||
needs: test
|
||||
if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags/')
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: ${{ env.IMAGE }}
|
||||
continue-on-error: true
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
pnpm install -g pnpm
|
||||
pnpm install -g @ship.zone/npmci
|
||||
npmci npm prepare
|
||||
|
||||
- name: Code quality
|
||||
run: |
|
||||
npmci command npm install -g typescript
|
||||
npmci npm install
|
||||
|
||||
- name: Trigger
|
||||
run: npmci trigger
|
||||
|
||||
- name: Build docs and upload artifacts
|
||||
run: |
|
||||
npmci node install stable
|
||||
npmci npm install
|
||||
pnpm install -g @git.zone/tsdoc
|
||||
npmci command tsdoc
|
||||
continue-on-error: true
|
||||
23
.gitignore
vendored
Normal file
23
.gitignore
vendored
Normal file
@@ -0,0 +1,23 @@
|
||||
.nogit/
|
||||
|
||||
# artifacts
|
||||
coverage/
|
||||
public/
|
||||
|
||||
# installs
|
||||
node_modules/
|
||||
|
||||
# caches
|
||||
.yarn/
|
||||
.cache/
|
||||
.rpt2_cache
|
||||
|
||||
# builds
|
||||
dist/
|
||||
dist_*/
|
||||
|
||||
# AI
|
||||
.claude/
|
||||
.serena/
|
||||
|
||||
#------# custom
|
||||
11
.vscode/launch.json
vendored
Normal file
11
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"command": "npm test",
|
||||
"name": "Run npm test",
|
||||
"request": "launch",
|
||||
"type": "node-terminal"
|
||||
}
|
||||
]
|
||||
}
|
||||
26
.vscode/settings.json
vendored
Normal file
26
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
{
|
||||
"json.schemas": [
|
||||
{
|
||||
"fileMatch": ["/npmextra.json"],
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"npmci": {
|
||||
"type": "object",
|
||||
"description": "settings for npmci"
|
||||
},
|
||||
"gitzone": {
|
||||
"type": "object",
|
||||
"description": "settings for gitzone",
|
||||
"properties": {
|
||||
"projectType": {
|
||||
"type": "string",
|
||||
"enum": ["website", "element", "service", "npm", "wcc"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
18
npmextra.json
Normal file
18
npmextra.json
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"gitzone": {
|
||||
"projectType": "npm",
|
||||
"module": {
|
||||
"githost": "code.foss.global",
|
||||
"gitscope": "push.rocks",
|
||||
"gitrepo": "smartfs",
|
||||
"description": "a cross platform extendable fs module",
|
||||
"npmPackagename": "@push.rocks/smartfs",
|
||||
"license": "MIT",
|
||||
"projectDomain": "push.rocks"
|
||||
}
|
||||
},
|
||||
"npmci": {
|
||||
"npmGlobalTools": [],
|
||||
"npmAccessLevel": "public"
|
||||
}
|
||||
}
|
||||
51
package.json
Normal file
51
package.json
Normal file
@@ -0,0 +1,51 @@
|
||||
{
|
||||
"name": "@push.rocks/smartfs",
|
||||
"version": "1.0.1",
|
||||
"private": false,
|
||||
"description": "a cross platform extendable fs module",
|
||||
"main": "dist_ts/index.js",
|
||||
"typings": "dist_ts/index.d.ts",
|
||||
"type": "module",
|
||||
"author": "Task Venture Capital GmbH",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"test": "(tstest test/ --verbose --logfile --timeout 120)",
|
||||
"build": "(tsbuild --web --allowimplicitany)",
|
||||
"buildDocs": "(tsdoc)"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@git.zone/tsbuild": "^3.1.0",
|
||||
"@git.zone/tsbundle": "^2.0.5",
|
||||
"@git.zone/tsrun": "^2.0.0",
|
||||
"@git.zone/tstest": "^3.1.1",
|
||||
"@push.rocks/tapbundle": "^6.0.3",
|
||||
"@types/node": "^20.8.7"
|
||||
},
|
||||
"packageManager": "pnpm@10.18.1+sha512.77a884a165cbba2d8d1c19e3b4880eee6d2fcabd0d879121e282196b80042351d5eb3ca0935fa599da1dc51265cc68816ad2bddd2a2de5ea9fdf92adbec7cd34",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://code.foss.global/push.rocks/smartfs.git"
|
||||
},
|
||||
"bugs": {
|
||||
"url": "https://code.foss.global/push.rocks/smartfs/issues"
|
||||
},
|
||||
"homepage": "https://code.foss.global/push.rocks/smartfs#readme",
|
||||
"files": [
|
||||
"ts/**/*",
|
||||
"ts_web/**/*",
|
||||
"dist/**/*",
|
||||
"dist_*/**/*",
|
||||
"dist_ts/**/*",
|
||||
"dist_ts_web/**/*",
|
||||
"assets/**/*",
|
||||
"cli.js",
|
||||
"npmextra.json",
|
||||
"readme.md"
|
||||
],
|
||||
"pnpm": {
|
||||
"overrides": {}
|
||||
},
|
||||
"dependencies": {
|
||||
"@push.rocks/smartpath": "^6.0.0"
|
||||
}
|
||||
}
|
||||
9741
pnpm-lock.yaml
generated
Normal file
9741
pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
393
readme.hints.md
Normal file
393
readme.hints.md
Normal file
@@ -0,0 +1,393 @@
|
||||
# SmartFS Architecture Hints
|
||||
|
||||
## Overview
|
||||
|
||||
SmartFS is a modern, pluggable filesystem module built with TypeScript. It provides a fluent API for filesystem operations with support for multiple storage backends, transactions, streaming, and file watching.
|
||||
|
||||
## Core Design Principles
|
||||
|
||||
### 1. Fluent API with Action-Last Pattern
|
||||
|
||||
The API uses a **builder pattern** where configuration methods return `this` for chaining, and action methods return `Promise` for execution:
|
||||
|
||||
```typescript
|
||||
await fs.file('/path')
|
||||
.encoding('utf8') // configuration
|
||||
.atomic() // configuration
|
||||
.write('content'); // action (returns Promise)
|
||||
```
|
||||
|
||||
**Reasoning:**
|
||||
- Configuration is explicit and discoverable
|
||||
- Action methods clearly indicate execution points
|
||||
- Natural reading order: "configure what you want, then do it"
|
||||
- Type-safe chaining prevents invalid operations
|
||||
|
||||
### 2. Provider Architecture
|
||||
|
||||
All filesystem operations go through a provider interface (`ISmartFsProvider`). This allows:
|
||||
|
||||
- **Pluggable backends**: Node.js fs, memory, S3, etc.
|
||||
- **Testing**: Use memory provider for fast, isolated tests
|
||||
- **Abstraction**: Hide platform-specific details
|
||||
- **Extensibility**: Easy to add new storage backends
|
||||
|
||||
**Provider Responsibilities:**
|
||||
- Implement all filesystem operations
|
||||
- Handle path normalization
|
||||
- Provide capability flags
|
||||
- Implement transactions (or fall back to sequential)
|
||||
|
||||
### 3. Async-Only Design
|
||||
|
||||
No synchronous operations are exposed. All methods return Promises.
|
||||
|
||||
**Reasoning:**
|
||||
- Modern async/await patterns
|
||||
- Better performance (non-blocking)
|
||||
- Consistent API surface
|
||||
- Simplifies implementation
|
||||
|
||||
### 4. Web Streams API
|
||||
|
||||
Uses Web Streams (`ReadableStream`, `WritableStream`) instead of Node.js streams.
|
||||
|
||||
**Reasoning:**
|
||||
- Standard API across platforms (Node.js, browser, Deno)
|
||||
- Better composability with `.pipeTo()`
|
||||
- Backpressure handling built-in
|
||||
- Future-proof (web standard)
|
||||
|
||||
### 5. Transaction System
|
||||
|
||||
Transactions provide atomic multi-file operations with automatic rollback.
|
||||
|
||||
**Implementation:**
|
||||
1. `prepareTransaction()` - Create backups of existing files
|
||||
2. `executeTransaction()` - Execute all operations
|
||||
3. `rollbackTransaction()` - Restore from backups if any operation fails
|
||||
|
||||
**Trade-offs:**
|
||||
- Not truly atomic at OS level (no fsync barriers)
|
||||
- Best-effort rollback (can fail if disk full, etc.)
|
||||
- Sufficient for most use cases
|
||||
|
||||
### 6. File Watching
|
||||
|
||||
Event-based file watching with debouncing and filtering.
|
||||
|
||||
**Features:**
|
||||
- Recursive watching
|
||||
- Pattern filtering (glob, RegExp, function)
|
||||
- Debouncing to reduce event spam
|
||||
- Multiple event handlers per watcher
|
||||
|
||||
**Implementation Notes:**
|
||||
- Node provider uses `fs.watch`
|
||||
- Memory provider triggers events synchronously
|
||||
- Events include stats when available
|
||||
|
||||
## Directory Structure
|
||||
|
||||
```
|
||||
ts/
|
||||
├── classes/
|
||||
│ ├── smartfs.ts # Main entry point
|
||||
│ ├── smartfs.file.ts # File builder
|
||||
│ ├── smartfs.directory.ts # Directory builder
|
||||
│ ├── smartfs.transaction.ts # Transaction builder
|
||||
│ └── smartfs.watcher.ts # Watcher builder
|
||||
├── interfaces/
|
||||
│ ├── mod.provider.ts # Provider interface
|
||||
│ └── mod.types.ts # Type definitions
|
||||
├── providers/
|
||||
│ ├── smartfs.provider.node.ts # Node.js implementation
|
||||
│ └── smartfs.provider.memory.ts # Memory implementation
|
||||
└── index.ts # Public exports
|
||||
```
|
||||
|
||||
## Key Architectural Decisions
|
||||
|
||||
### Why Action-Last?
|
||||
|
||||
**Considered alternatives:**
|
||||
- Action-first: `fs.file('/path').read().asText()` - Less intuitive
|
||||
- Mixed: `fs.read('/path', { encoding: 'utf8' })` - Less fluent
|
||||
|
||||
**Chosen:** Action-last pattern
|
||||
- Clear execution point
|
||||
- Natural configuration flow
|
||||
- Better IDE autocomplete
|
||||
|
||||
### Why Separate Builders?
|
||||
|
||||
Each builder (`SmartFsFile`, `SmartFsDirectory`, etc.) is a separate class.
|
||||
|
||||
**Benefits:**
|
||||
- Type safety (can't call `.list()` on a file)
|
||||
- Clear separation of concerns
|
||||
- Better code organization
|
||||
- Easier to extend
|
||||
|
||||
### Transaction Implementation
|
||||
|
||||
**Design choice:** Prepare → Execute → Rollback pattern
|
||||
|
||||
**Alternative considered:** Copy-on-write filesystem
|
||||
- Would require provider-specific implementations
|
||||
- More complex
|
||||
- Not worth the complexity for most use cases
|
||||
|
||||
**Chosen approach:**
|
||||
1. Read existing files before operations
|
||||
2. Store backup data in operation objects
|
||||
3. Rollback by restoring from backups
|
||||
|
||||
**Limitations:**
|
||||
- Not truly atomic (no cross-file locks)
|
||||
- Rollback can fail (rare)
|
||||
- Memory overhead for large files
|
||||
|
||||
**Future improvements:**
|
||||
- Providers could override with native transactions
|
||||
- Add transaction isolation levels
|
||||
- Support for distributed transactions
|
||||
|
||||
### Path Handling
|
||||
|
||||
All paths are normalized by the provider.
|
||||
|
||||
**Reasoning:**
|
||||
- Providers know their path conventions
|
||||
- Allows Windows vs. Unix path handling
|
||||
- S3 provider can handle virtual paths
|
||||
- Memory provider uses consistent format
|
||||
|
||||
### Error Handling
|
||||
|
||||
Errors bubble up from providers.
|
||||
|
||||
**Design:**
|
||||
- No custom error wrapping
|
||||
- Provider errors are descriptive
|
||||
- Use standard Node.js error codes (ENOENT, etc.)
|
||||
|
||||
**Reasoning:**
|
||||
- Simpler implementation
|
||||
- Familiar error messages
|
||||
- Less abstraction overhead
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
### Streaming
|
||||
|
||||
Use streams for files > 1MB to avoid loading entire file into memory.
|
||||
|
||||
**Chunk size defaults:**
|
||||
- Read: 64KB (configurable via `.chunkSize()`)
|
||||
- Write: 16KB (Node.js default)
|
||||
|
||||
### Memory Provider
|
||||
|
||||
All data stored in Map<string, IMemoryEntry>.
|
||||
|
||||
**Trade-offs:**
|
||||
- Fast (no I/O)
|
||||
- Limited by available memory
|
||||
- No persistence
|
||||
- Perfect for testing
|
||||
|
||||
### Node Provider
|
||||
|
||||
Direct use of Node.js `fs/promises` API.
|
||||
|
||||
**Optimizations:**
|
||||
- Atomic writes use temp files + rename (atomic at OS level)
|
||||
- Move operations try rename first, fallback to copy+delete
|
||||
- Stat caching not implemented (can add if needed)
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
### Test Organization
|
||||
|
||||
- `test/test.memory.provider.ts` - Memory provider tests
|
||||
- `test/test.node.provider.ts` - Node.js provider tests
|
||||
- `test/test.ts` - Main test entry
|
||||
|
||||
### Testing Approach
|
||||
|
||||
1. **Memory provider tests** - Fast, no I/O, comprehensive
|
||||
2. **Node provider tests** - Real filesystem, integration
|
||||
3. **Both test suites** share similar test cases
|
||||
|
||||
**Reasoning:**
|
||||
- Memory tests are fast and isolated
|
||||
- Node tests verify real filesystem behavior
|
||||
- Easy to add new provider tests
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
### Potential Features
|
||||
|
||||
1. **S3 Provider** - Cloud storage backend
|
||||
2. **FTP Provider** - Remote filesystem access
|
||||
3. **Virtual Provider** - Union of multiple providers
|
||||
4. **Caching Layer** - In-memory cache for frequently accessed files
|
||||
5. **Compression** - Transparent compression/decompression
|
||||
6. **Encryption** - Transparent encryption at rest
|
||||
7. **Versioning** - Automatic file versioning
|
||||
|
||||
### API Extensions
|
||||
|
||||
1. **Shortcut methods** - `fs.read('/path')` as alternative to `fs.file('/path').read()`
|
||||
2. **Batch operations** - `fs.batch().file(...).file(...).execute()`
|
||||
3. **Query API** - SQL-like queries for file listings
|
||||
4. **Hooks** - Before/after operation hooks
|
||||
|
||||
## Dependencies
|
||||
|
||||
### Core Dependencies
|
||||
|
||||
- `@push.rocks/smartpath` - Path utilities
|
||||
- `@types/node` - TypeScript types for Node.js
|
||||
|
||||
### Why Minimal Dependencies?
|
||||
|
||||
**Philosophy:**
|
||||
- Keep the core light
|
||||
- Avoid dependency hell
|
||||
- Easier to maintain
|
||||
- Faster installation
|
||||
|
||||
**Trade-off:**
|
||||
- More code to maintain
|
||||
- Can't leverage external libraries
|
||||
|
||||
**Decision:**
|
||||
- Worth it for a foundational library
|
||||
- Providers can have their own dependencies
|
||||
|
||||
## Compatibility
|
||||
|
||||
### Node.js Versions
|
||||
|
||||
Requires Node.js 18+ for:
|
||||
- Native Web Streams API
|
||||
- `fs.rm()` (replaces deprecated `fs.rmdir()`)
|
||||
- `fs/promises` API
|
||||
|
||||
### Browser Compatibility
|
||||
|
||||
Core architecture supports browser, but:
|
||||
- No browser provider implemented yet
|
||||
- Would need IndexedDB or similar backend
|
||||
- Stream handling already compatible
|
||||
|
||||
### TypeScript
|
||||
|
||||
Uses ES2022 target, NodeNext modules.
|
||||
|
||||
**Reasoning:**
|
||||
- Modern JavaScript features
|
||||
- ESM-first approach
|
||||
- Better tree-shaking
|
||||
- Future-proof
|
||||
|
||||
## Common Patterns
|
||||
|
||||
### Provider Implementation Checklist
|
||||
|
||||
When implementing a new provider:
|
||||
|
||||
1. ✅ Implement `ISmartFsProvider` interface
|
||||
2. ✅ Set capability flags correctly
|
||||
3. ✅ Normalize paths in constructor/methods
|
||||
4. ✅ Handle errors with descriptive messages
|
||||
5. ✅ Implement transaction support (or fallback)
|
||||
6. ✅ Add comprehensive tests
|
||||
7. ✅ Document provider-specific limitations
|
||||
|
||||
### Builder Pattern Example
|
||||
|
||||
```typescript
|
||||
class SmartFsFile {
|
||||
private options = {};
|
||||
|
||||
// Configuration (returns this)
|
||||
encoding(enc: string): this {
|
||||
this.options.encoding = enc;
|
||||
return this;
|
||||
}
|
||||
|
||||
// Action (returns Promise)
|
||||
async read(): Promise<string | Buffer> {
|
||||
return this.provider.readFile(this.path, this.options);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Known Limitations
|
||||
|
||||
### Transaction Atomicity
|
||||
|
||||
- Not truly atomic across files
|
||||
- Rollback can fail in edge cases
|
||||
- No distributed transaction support
|
||||
|
||||
**Mitigation:**
|
||||
- Document limitations clearly
|
||||
- Best-effort rollback is sufficient for most cases
|
||||
- Providers can override with native transactions
|
||||
|
||||
### File Watching
|
||||
|
||||
- Node.js `fs.watch` has platform-specific behavior
|
||||
- May miss rapid changes
|
||||
- No guarantee of event order
|
||||
|
||||
**Mitigation:**
|
||||
- Debouncing helps with rapid changes
|
||||
- Document platform differences
|
||||
- Memory provider has predictable behavior (testing)
|
||||
|
||||
### Path Handling
|
||||
|
||||
- No cross-provider path compatibility
|
||||
- Provider-specific path formats
|
||||
|
||||
**Mitigation:**
|
||||
- Document path format per provider
|
||||
- Use provider's `normalizePath()` method
|
||||
- Consider adding path conversion utilities
|
||||
|
||||
## Maintenance Notes
|
||||
|
||||
### When to Update
|
||||
|
||||
- **Breaking changes:** Avoid unless absolutely necessary
|
||||
- **New features:** Add as fluent methods or new builders
|
||||
- **Bug fixes:** Prioritize data integrity
|
||||
- **Performance:** Profile before optimizing
|
||||
|
||||
### Code Style
|
||||
|
||||
- Use TypeScript strict mode
|
||||
- Prefer composition over inheritance
|
||||
- Keep classes focused (SRP)
|
||||
- Document public APIs with JSDoc
|
||||
- Use meaningful variable names
|
||||
|
||||
### Testing Requirements
|
||||
|
||||
All changes must:
|
||||
- Pass existing tests
|
||||
- Add new tests for new features
|
||||
- Maintain >90% code coverage
|
||||
- Test both memory and Node providers
|
||||
|
||||
## Resources
|
||||
|
||||
- [Web Streams API](https://developer.mozilla.org/en-US/docs/Web/API/Streams_API)
|
||||
- [Node.js fs/promises](https://nodejs.org/api/fs.html#promises-api)
|
||||
- [Builder Pattern](https://refactoring.guru/design-patterns/builder)
|
||||
- [Provider Pattern](https://en.wikipedia.org/wiki/Provider_model)
|
||||
445
readme.md
Normal file
445
readme.md
Normal file
@@ -0,0 +1,445 @@
|
||||
# @push.rocks/smartfs
|
||||
|
||||
Modern, pluggable filesystem module with fluent API, Web Streams support, and multiple storage backends.
|
||||
|
||||
## Features
|
||||
|
||||
- **🎯 Fluent API** - Action-last chainable interface for elegant code
|
||||
- **🔌 Pluggable Providers** - Support for multiple storage backends (Node.js fs, memory, S3, etc.)
|
||||
- **🌊 Web Streams** - Modern streaming with Web Streams API
|
||||
- **💾 Transactions** - Atomic multi-file operations with automatic rollback
|
||||
- **👀 File Watching** - Event-based file system monitoring
|
||||
- **⚡ Async-Only** - Modern async/await patterns throughout
|
||||
- **📦 Zero Dependencies** - Core functionality with minimal dependencies
|
||||
- **🎨 TypeScript** - Full type safety and IntelliSense support
|
||||
|
||||
## Installation
|
||||
|
||||
```bash
|
||||
pnpm install @push.rocks/smartfs
|
||||
```
|
||||
|
||||
## Quick Start
|
||||
|
||||
```typescript
|
||||
import { SmartFs, SmartFsProviderNode } from '@push.rocks/smartfs';
|
||||
|
||||
// Create a SmartFS instance with Node.js provider
|
||||
const fs = new SmartFs(new SmartFsProviderNode());
|
||||
|
||||
// Write and read files with fluent API
|
||||
await fs.file('/path/to/file.txt')
|
||||
.encoding('utf8')
|
||||
.write('Hello, World!');
|
||||
|
||||
const content = await fs.file('/path/to/file.txt')
|
||||
.encoding('utf8')
|
||||
.read();
|
||||
|
||||
console.log(content); // "Hello, World!"
|
||||
```
|
||||
|
||||
## API Overview
|
||||
|
||||
### File Operations
|
||||
|
||||
The fluent API uses **action-last pattern** - configure first, then execute:
|
||||
|
||||
```typescript
|
||||
// Read file
|
||||
const content = await fs.file('/path/to/file.txt')
|
||||
.encoding('utf8')
|
||||
.read();
|
||||
|
||||
// Write file
|
||||
await fs.file('/path/to/file.txt')
|
||||
.encoding('utf8')
|
||||
.mode(0o644)
|
||||
.write('content');
|
||||
|
||||
// Atomic write (write to temp, then rename)
|
||||
await fs.file('/path/to/file.txt')
|
||||
.atomic()
|
||||
.write('content');
|
||||
|
||||
// Append to file
|
||||
await fs.file('/path/to/file.txt')
|
||||
.encoding('utf8')
|
||||
.append('more content');
|
||||
|
||||
// Copy file
|
||||
await fs.file('/source.txt')
|
||||
.preserveTimestamps()
|
||||
.copy('/destination.txt');
|
||||
|
||||
// Move file
|
||||
await fs.file('/old.txt')
|
||||
.move('/new.txt');
|
||||
|
||||
// Delete file
|
||||
await fs.file('/path/to/file.txt')
|
||||
.delete();
|
||||
|
||||
// Check existence
|
||||
const exists = await fs.file('/path/to/file.txt').exists();
|
||||
|
||||
// Get stats
|
||||
const stats = await fs.file('/path/to/file.txt').stat();
|
||||
```
|
||||
|
||||
### Directory Operations
|
||||
|
||||
```typescript
|
||||
// Create directory
|
||||
await fs.directory('/path/to/dir').create();
|
||||
|
||||
// Create nested directories
|
||||
await fs.directory('/path/to/nested/dir')
|
||||
.recursive()
|
||||
.create();
|
||||
|
||||
// List directory
|
||||
const entries = await fs.directory('/path/to/dir').list();
|
||||
|
||||
// List recursively with filter
|
||||
const tsFiles = await fs.directory('/path/to/dir')
|
||||
.recursive()
|
||||
.filter('*.ts')
|
||||
.includeStats()
|
||||
.list();
|
||||
|
||||
// Filter with RegExp
|
||||
const files = await fs.directory('/path/to/dir')
|
||||
.filter(/\.txt$/)
|
||||
.list();
|
||||
|
||||
// Filter with function
|
||||
const largeFiles = await fs.directory('/path/to/dir')
|
||||
.includeStats()
|
||||
.filter(entry => entry.stats && entry.stats.size > 1024)
|
||||
.list();
|
||||
|
||||
// Delete directory
|
||||
await fs.directory('/path/to/dir')
|
||||
.recursive()
|
||||
.delete();
|
||||
|
||||
// Check existence
|
||||
const exists = await fs.directory('/path/to/dir').exists();
|
||||
```
|
||||
|
||||
### Streaming Operations
|
||||
|
||||
SmartFS uses **Web Streams API** for efficient handling of large files:
|
||||
|
||||
```typescript
|
||||
// Read stream
|
||||
const readStream = await fs.file('/large-file.bin')
|
||||
.chunkSize(64 * 1024)
|
||||
.readStream();
|
||||
|
||||
const reader = readStream.getReader();
|
||||
while (true) {
|
||||
const { done, value } = await reader.read();
|
||||
if (done) break;
|
||||
// Process chunk (Uint8Array)
|
||||
console.log('Chunk size:', value.length);
|
||||
}
|
||||
|
||||
// Write stream
|
||||
const writeStream = await fs.file('/output.bin').writeStream();
|
||||
const writer = writeStream.getWriter();
|
||||
|
||||
await writer.write(new Uint8Array([1, 2, 3]));
|
||||
await writer.write(new Uint8Array([4, 5, 6]));
|
||||
await writer.close();
|
||||
|
||||
// Pipe streams
|
||||
const input = await fs.file('/input.txt').readStream();
|
||||
const output = await fs.file('/output.txt').writeStream();
|
||||
await input.pipeTo(output);
|
||||
```
|
||||
|
||||
### Transactions
|
||||
|
||||
Execute multiple file operations atomically with automatic rollback on failure:
|
||||
|
||||
```typescript
|
||||
// Simple transaction
|
||||
await fs.transaction()
|
||||
.file('/file1.txt').write('content 1')
|
||||
.file('/file2.txt').write('content 2')
|
||||
.file('/file3.txt').delete()
|
||||
.commit();
|
||||
|
||||
// Transaction with error handling
|
||||
const tx = fs.transaction()
|
||||
.file('/important.txt').write('critical data')
|
||||
.file('/backup.txt').copy('/backup-old.txt')
|
||||
.file('/temp.txt').delete();
|
||||
|
||||
try {
|
||||
await tx.commit();
|
||||
console.log('Transaction completed successfully');
|
||||
} catch (error) {
|
||||
console.error('Transaction failed and was rolled back:', error);
|
||||
// All operations are automatically reverted
|
||||
}
|
||||
```
|
||||
|
||||
### File Watching
|
||||
|
||||
Monitor filesystem changes with event-based watching:
|
||||
|
||||
```typescript
|
||||
// Watch a single file
|
||||
const watcher = await fs.watch('/path/to/file.txt')
|
||||
.onChange(event => {
|
||||
console.log('File changed:', event.path);
|
||||
})
|
||||
.start();
|
||||
|
||||
// Watch directory recursively
|
||||
const dirWatcher = await fs.watch('/path/to/dir')
|
||||
.recursive()
|
||||
.filter('*.ts')
|
||||
.debounce(100)
|
||||
.onChange(event => console.log('Changed:', event.path))
|
||||
.onAdd(event => console.log('Added:', event.path))
|
||||
.onDelete(event => console.log('Deleted:', event.path))
|
||||
.start();
|
||||
|
||||
// Stop watching
|
||||
await dirWatcher.stop();
|
||||
|
||||
// Watch with custom filter
|
||||
const customWatcher = await fs.watch('/path/to/dir')
|
||||
.recursive()
|
||||
.filter(path => path.endsWith('.ts') && !path.includes('test'))
|
||||
.onAll(event => {
|
||||
console.log(`${event.type}: ${event.path}`);
|
||||
})
|
||||
.start();
|
||||
```
|
||||
|
||||
## Providers
|
||||
|
||||
SmartFS supports multiple storage backends through providers:
|
||||
|
||||
### Node.js Provider
|
||||
|
||||
Uses Node.js `fs/promises` API for local filesystem operations:
|
||||
|
||||
```typescript
|
||||
import { SmartFs, SmartFsProviderNode } from '@push.rocks/smartfs';
|
||||
|
||||
const fs = new SmartFs(new SmartFsProviderNode());
|
||||
```
|
||||
|
||||
**Capabilities:**
|
||||
- ✅ File watching
|
||||
- ✅ Atomic writes
|
||||
- ✅ Transactions
|
||||
- ✅ Streaming
|
||||
- ✅ Symbolic links
|
||||
- ✅ File permissions
|
||||
|
||||
### Memory Provider
|
||||
|
||||
In-memory virtual filesystem, perfect for testing:
|
||||
|
||||
```typescript
|
||||
import { SmartFs, SmartFsProviderMemory } from '@push.rocks/smartfs';
|
||||
|
||||
const fs = new SmartFs(new SmartFsProviderMemory());
|
||||
|
||||
// All operations work in memory
|
||||
await fs.file('/virtual/file.txt').write('data');
|
||||
const content = await fs.file('/virtual/file.txt').read();
|
||||
|
||||
// Clear all data
|
||||
fs.provider.clear();
|
||||
```
|
||||
|
||||
**Capabilities:**
|
||||
- ✅ File watching
|
||||
- ✅ Atomic writes
|
||||
- ✅ Transactions
|
||||
- ✅ Streaming
|
||||
- ❌ Symbolic links
|
||||
- ✅ File permissions
|
||||
|
||||
### Custom Providers
|
||||
|
||||
Create your own provider by implementing `ISmartFsProvider`:
|
||||
|
||||
```typescript
|
||||
import type { ISmartFsProvider } from '@push.rocks/smartfs';
|
||||
|
||||
class MyCustomProvider implements ISmartFsProvider {
|
||||
public readonly name = 'custom';
|
||||
public readonly capabilities = {
|
||||
supportsWatch: true,
|
||||
supportsAtomic: true,
|
||||
supportsTransactions: true,
|
||||
supportsStreaming: true,
|
||||
supportsSymlinks: false,
|
||||
supportsPermissions: true,
|
||||
};
|
||||
|
||||
// Implement all required methods...
|
||||
async readFile(path: string, options?) { /* ... */ }
|
||||
async writeFile(path: string, content, options?) { /* ... */ }
|
||||
// ... etc
|
||||
}
|
||||
|
||||
const fs = new SmartFs(new MyCustomProvider());
|
||||
```
|
||||
|
||||
## Advanced Usage
|
||||
|
||||
### Encoding Options
|
||||
|
||||
```typescript
|
||||
// UTF-8 (default for text)
|
||||
await fs.file('/file.txt').encoding('utf8').write('text');
|
||||
|
||||
// Binary
|
||||
const buffer = Buffer.from([0x48, 0x65, 0x6c, 0x6c, 0x6f]);
|
||||
await fs.file('/file.bin').write(buffer);
|
||||
|
||||
// Base64
|
||||
await fs.file('/file.txt').encoding('base64').write('SGVsbG8=');
|
||||
|
||||
// Hex
|
||||
await fs.file('/file.txt').encoding('hex').write('48656c6c6f');
|
||||
```
|
||||
|
||||
### File Permissions
|
||||
|
||||
```typescript
|
||||
// Set file mode
|
||||
await fs.file('/script.sh')
|
||||
.mode(0o755)
|
||||
.write('#!/bin/bash\necho "Hello"');
|
||||
|
||||
// Set directory mode
|
||||
await fs.directory('/private')
|
||||
.mode(0o700)
|
||||
.create();
|
||||
```
|
||||
|
||||
### Complex Filtering
|
||||
|
||||
```typescript
|
||||
// Multiple conditions
|
||||
const files = await fs.directory('/src')
|
||||
.recursive()
|
||||
.includeStats()
|
||||
.filter(entry => {
|
||||
if (!entry.stats) return false;
|
||||
return entry.isFile &&
|
||||
entry.name.endsWith('.ts') &&
|
||||
entry.stats.size > 1024 &&
|
||||
entry.stats.mtime > new Date('2024-01-01');
|
||||
})
|
||||
.list();
|
||||
```
|
||||
|
||||
### Transaction Operations
|
||||
|
||||
```typescript
|
||||
// Complex transaction
|
||||
const tx = fs.transaction();
|
||||
|
||||
// Write multiple files
|
||||
tx.file('/data/file1.json').write(JSON.stringify(data1));
|
||||
tx.file('/data/file2.json').write(JSON.stringify(data2));
|
||||
|
||||
// Copy backups
|
||||
tx.file('/data/file1.json').copy('/backup/file1.json');
|
||||
tx.file('/data/file2.json').copy('/backup/file2.json');
|
||||
|
||||
// Delete old files
|
||||
tx.file('/data/old1.json').delete();
|
||||
tx.file('/data/old2.json').delete();
|
||||
|
||||
// Execute atomically
|
||||
await tx.commit();
|
||||
```
|
||||
|
||||
## Type Definitions
|
||||
|
||||
SmartFS is fully typed with TypeScript:
|
||||
|
||||
```typescript
|
||||
import type {
|
||||
IFileStats,
|
||||
IDirectoryEntry,
|
||||
IWatchEvent,
|
||||
ITransactionOperation,
|
||||
TEncoding,
|
||||
TFileMode,
|
||||
} from '@push.rocks/smartfs';
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
pnpm test
|
||||
|
||||
# Run specific test
|
||||
pnpm tstest test/test.memory.provider.ts --verbose
|
||||
|
||||
# Run with log output
|
||||
pnpm tstest test/test.node.provider.ts --logfile .nogit/testlogs/test.log
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
SmartFS throws descriptive errors:
|
||||
|
||||
```typescript
|
||||
try {
|
||||
await fs.file('/nonexistent.txt').read();
|
||||
} catch (error) {
|
||||
console.error(error.message);
|
||||
// "ENOENT: no such file or directory, open '/nonexistent.txt'"
|
||||
}
|
||||
|
||||
// Transactions automatically rollback on error
|
||||
try {
|
||||
await fs.transaction()
|
||||
.file('/file1.txt').write('data')
|
||||
.file('/file2.txt').write('data')
|
||||
.commit();
|
||||
} catch (error) {
|
||||
// All operations are reverted
|
||||
console.error('Transaction failed:', error);
|
||||
}
|
||||
```
|
||||
|
||||
## Performance Tips
|
||||
|
||||
1. **Use streaming** for large files (> 1MB)
|
||||
2. **Batch operations** with transactions
|
||||
3. **Use memory provider** for testing
|
||||
4. **Enable atomic writes** for critical data
|
||||
5. **Debounce watchers** to reduce event spam
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions welcome! Please ensure:
|
||||
- All tests pass
|
||||
- Code follows existing style
|
||||
- TypeScript types are complete
|
||||
- Documentation is updated
|
||||
|
||||
## License
|
||||
|
||||
MIT © [Lossless GmbH](https://lossless.gmbh)
|
||||
|
||||
---
|
||||
|
||||
For more information, visit [code.foss.global](https://code.foss.global/push.rocks/smartfs)
|
||||
235
test/test.memory.provider.ts
Normal file
235
test/test.memory.provider.ts
Normal file
@@ -0,0 +1,235 @@
|
||||
/**
|
||||
* Tests for Memory provider
|
||||
*/
|
||||
|
||||
import { tap, expect } from '@push.rocks/tapbundle';
|
||||
import { SmartFs, SmartFsProviderMemory } from '../ts/index.js';
|
||||
|
||||
// Create test instance
|
||||
const memoryProvider = new SmartFsProviderMemory();
|
||||
const fs = new SmartFs(memoryProvider);
|
||||
|
||||
tap.test('should create SmartFS instance with Memory provider', async () => {
|
||||
expect(fs).toBeInstanceOf(SmartFs);
|
||||
expect(fs.getProviderName()).toEqual('memory');
|
||||
});
|
||||
|
||||
tap.test('should write and read a file', async () => {
|
||||
await fs.file('/test.txt').write('Hello, World!');
|
||||
const content = await fs.file('/test.txt').encoding('utf8').read();
|
||||
expect(content).toEqual('Hello, World!');
|
||||
});
|
||||
|
||||
tap.test('should write and read a file with encoding', async () => {
|
||||
await fs.file('/test2.txt').encoding('utf8').write('Test content');
|
||||
const content = await fs.file('/test2.txt').encoding('utf8').read();
|
||||
expect(content).toEqual('Test content');
|
||||
});
|
||||
|
||||
tap.test('should check if file exists', async () => {
|
||||
const exists = await fs.file('/test.txt').exists();
|
||||
expect(exists).toEqual(true);
|
||||
|
||||
const notExists = await fs.file('/nonexistent.txt').exists();
|
||||
expect(notExists).toEqual(false);
|
||||
});
|
||||
|
||||
tap.test('should get file stats', async () => {
|
||||
await fs.file('/stats-test.txt').write('stats test');
|
||||
const stats = await fs.file('/stats-test.txt').stat();
|
||||
|
||||
expect(stats).toHaveProperty('size');
|
||||
expect(stats).toHaveProperty('mtime');
|
||||
expect(stats).toHaveProperty('birthtime');
|
||||
expect(stats.isFile).toEqual(true);
|
||||
expect(stats.isDirectory).toEqual(false);
|
||||
});
|
||||
|
||||
tap.test('should append to a file', async () => {
|
||||
await fs.file('/append-test.txt').write('Hello');
|
||||
await fs.file('/append-test.txt').append(' World!');
|
||||
const content = await fs.file('/append-test.txt').encoding('utf8').read();
|
||||
expect(content).toEqual('Hello World!');
|
||||
});
|
||||
|
||||
tap.test('should delete a file', async () => {
|
||||
await fs.file('/delete-test.txt').write('to be deleted');
|
||||
await fs.file('/delete-test.txt').delete();
|
||||
const exists = await fs.file('/delete-test.txt').exists();
|
||||
expect(exists).toEqual(false);
|
||||
});
|
||||
|
||||
tap.test('should copy a file', async () => {
|
||||
await fs.file('/copy-source.txt').write('copy me');
|
||||
await fs.file('/copy-source.txt').copy('/copy-dest.txt');
|
||||
|
||||
const sourceContent = await fs.file('/copy-source.txt').encoding('utf8').read();
|
||||
const destContent = await fs.file('/copy-dest.txt').encoding('utf8').read();
|
||||
|
||||
expect(sourceContent).toEqual('copy me');
|
||||
expect(destContent).toEqual('copy me');
|
||||
});
|
||||
|
||||
tap.test('should move a file', async () => {
|
||||
await fs.file('/move-source.txt').write('move me');
|
||||
await fs.file('/move-source.txt').move('/move-dest.txt');
|
||||
|
||||
const sourceExists = await fs.file('/move-source.txt').exists();
|
||||
const destContent = await fs.file('/move-dest.txt').encoding('utf8').read();
|
||||
|
||||
expect(sourceExists).toEqual(false);
|
||||
expect(destContent).toEqual('move me');
|
||||
});
|
||||
|
||||
tap.test('should create a directory', async () => {
|
||||
await fs.directory('/test-dir').create();
|
||||
const exists = await fs.directory('/test-dir').exists();
|
||||
expect(exists).toEqual(true);
|
||||
});
|
||||
|
||||
tap.test('should create nested directories recursively', async () => {
|
||||
await fs.directory('/nested/deep/path').recursive().create();
|
||||
const exists = await fs.directory('/nested/deep/path').exists();
|
||||
expect(exists).toEqual(true);
|
||||
});
|
||||
|
||||
tap.test('should list directory contents', async () => {
|
||||
await fs.directory('/list-test').create();
|
||||
await fs.file('/list-test/file1.txt').write('file1');
|
||||
await fs.file('/list-test/file2.txt').write('file2');
|
||||
await fs.directory('/list-test/subdir').create();
|
||||
|
||||
const entries = await fs.directory('/list-test').list();
|
||||
|
||||
expect(entries).toHaveLength(3);
|
||||
const names = entries.map((e) => e.name).sort();
|
||||
expect(names).toEqual(['file1.txt', 'file2.txt', 'subdir']);
|
||||
});
|
||||
|
||||
tap.test('should list directory contents recursively', async () => {
|
||||
await fs.directory('/recursive-test').create();
|
||||
await fs.file('/recursive-test/file1.txt').write('file1');
|
||||
await fs.directory('/recursive-test/subdir').create();
|
||||
await fs.file('/recursive-test/subdir/file2.txt').write('file2');
|
||||
|
||||
const entries = await fs.directory('/recursive-test').recursive().list();
|
||||
|
||||
expect(entries.length).toBeGreaterThanOrEqual(3);
|
||||
});
|
||||
|
||||
tap.test('should filter directory listings', async () => {
|
||||
await fs.directory('/filter-test').create();
|
||||
await fs.file('/filter-test/file1.ts').write('ts file');
|
||||
await fs.file('/filter-test/file2.js').write('js file');
|
||||
await fs.file('/filter-test/file3.ts').write('ts file');
|
||||
|
||||
const entries = await fs.directory('/filter-test').filter('*.ts').list();
|
||||
|
||||
expect(entries).toHaveLength(2);
|
||||
expect(entries.every((e) => e.name.endsWith('.ts'))).toEqual(true);
|
||||
});
|
||||
|
||||
tap.test('should delete a directory recursively', async () => {
|
||||
await fs.directory('/delete-dir-test').create();
|
||||
await fs.file('/delete-dir-test/file.txt').write('file');
|
||||
await fs.directory('/delete-dir-test/subdir').create();
|
||||
|
||||
await fs.directory('/delete-dir-test').recursive().delete();
|
||||
|
||||
const exists = await fs.directory('/delete-dir-test').exists();
|
||||
expect(exists).toEqual(false);
|
||||
});
|
||||
|
||||
tap.test('should handle file streams', async () => {
|
||||
const testData = 'Stream test data with some content';
|
||||
await fs.file('/stream-test.txt').write(testData);
|
||||
|
||||
const readStream = await fs.file('/stream-test.txt').readStream();
|
||||
const chunks: Uint8Array[] = [];
|
||||
|
||||
const reader = readStream.getReader();
|
||||
let done = false;
|
||||
|
||||
while (!done) {
|
||||
const result = await reader.read();
|
||||
done = result.done;
|
||||
if (result.value) {
|
||||
chunks.push(result.value);
|
||||
}
|
||||
}
|
||||
|
||||
const buffer = Buffer.concat(chunks.map((c) => Buffer.from(c)));
|
||||
const content = buffer.toString('utf8');
|
||||
|
||||
expect(content).toEqual(testData);
|
||||
});
|
||||
|
||||
tap.test('should write file streams', async () => {
|
||||
const testData = 'Writing via stream';
|
||||
const buffer = Buffer.from(testData);
|
||||
|
||||
const writeStream = await fs.file('/write-stream-test.txt').writeStream();
|
||||
const writer = writeStream.getWriter();
|
||||
|
||||
await writer.write(new Uint8Array(buffer));
|
||||
await writer.close();
|
||||
|
||||
const content = await fs.file('/write-stream-test.txt').encoding('utf8').read();
|
||||
expect(content).toEqual(testData);
|
||||
});
|
||||
|
||||
tap.test('should execute transactions', async () => {
|
||||
await fs
|
||||
.transaction()
|
||||
.file('/tx-file1.txt')
|
||||
.write('transaction file 1')
|
||||
.file('/tx-file2.txt')
|
||||
.write('transaction file 2')
|
||||
.file('/tx-file3.txt')
|
||||
.write('transaction file 3')
|
||||
.commit();
|
||||
|
||||
const content1 = await fs.file('/tx-file1.txt').encoding('utf8').read();
|
||||
const content2 = await fs.file('/tx-file2.txt').encoding('utf8').read();
|
||||
const content3 = await fs.file('/tx-file3.txt').encoding('utf8').read();
|
||||
|
||||
expect(content1).toEqual('transaction file 1');
|
||||
expect(content2).toEqual('transaction file 2');
|
||||
expect(content3).toEqual('transaction file 3');
|
||||
});
|
||||
|
||||
tap.test('should rollback transactions on error', async () => {
|
||||
// This test verifies that transaction operations are prepared with backups
|
||||
// Since memory provider doesn't naturally fail, we just verify the mechanism exists
|
||||
await fs.file('/tx-rollback-test.txt').write('original');
|
||||
|
||||
const tx = fs.transaction();
|
||||
tx.file('/tx-rollback-test.txt').write('modified');
|
||||
|
||||
// Verify operations are tracked
|
||||
const operations = tx.getOperations();
|
||||
expect(operations.length).toEqual(1);
|
||||
expect(operations[0].type).toEqual('write');
|
||||
|
||||
// Commit normally (rollback test would require mocking provider failure)
|
||||
await tx.commit();
|
||||
expect(tx.isCommitted()).toEqual(true);
|
||||
});
|
||||
|
||||
tap.test('should handle file watching', async () => {
|
||||
await fs.file('/watch-test.txt').write('initial');
|
||||
|
||||
// Create watcher (verifies the API works)
|
||||
const watcher = await fs
|
||||
.watch('/watch-test.txt')
|
||||
.onChange((event) => {
|
||||
// Event handler registered
|
||||
})
|
||||
.start();
|
||||
|
||||
// Verify watcher can be stopped
|
||||
await watcher.stop();
|
||||
expect(true).toEqual(true); // Test passes if we get here
|
||||
});
|
||||
|
||||
export default tap.start();
|
||||
264
test/test.node.provider.ts
Normal file
264
test/test.node.provider.ts
Normal file
@@ -0,0 +1,264 @@
|
||||
/**
|
||||
* Tests for Node.js provider
|
||||
*/
|
||||
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs/promises';
|
||||
import { tap, expect } from '@push.rocks/tapbundle';
|
||||
import { SmartFs, SmartFsProviderNode } from '../ts/index.js';
|
||||
|
||||
// Create temp directory for tests
|
||||
const tempDir = path.join(process.cwd(), '.nogit', 'test-temp');
|
||||
|
||||
// Create test instance
|
||||
const nodeProvider = new SmartFsProviderNode();
|
||||
const smartFs = new SmartFs(nodeProvider);
|
||||
|
||||
tap.preTask('setup temp directory', async () => {
|
||||
await fs.mkdir(tempDir, { recursive: true });
|
||||
});
|
||||
|
||||
tap.test('should create SmartFS instance with Node provider', async () => {
|
||||
expect(smartFs).toBeInstanceOf(SmartFs);
|
||||
expect(smartFs.getProviderName()).toEqual('node');
|
||||
});
|
||||
|
||||
tap.test('should write and read a file', async () => {
|
||||
const filePath = path.join(tempDir, 'test.txt');
|
||||
await smartFs.file(filePath).write('Hello, World!');
|
||||
const content = await smartFs.file(filePath).encoding('utf8').read();
|
||||
expect(content).toEqual('Hello, World!');
|
||||
});
|
||||
|
||||
tap.test('should write atomically', async () => {
|
||||
const filePath = path.join(tempDir, 'atomic.txt');
|
||||
await smartFs.file(filePath).atomic().write('Atomic write test');
|
||||
const content = await smartFs.file(filePath).encoding('utf8').read();
|
||||
expect(content).toEqual('Atomic write test');
|
||||
});
|
||||
|
||||
tap.test('should check if file exists', async () => {
|
||||
const filePath = path.join(tempDir, 'exists-test.txt');
|
||||
await smartFs.file(filePath).write('exists');
|
||||
|
||||
const exists = await smartFs.file(filePath).exists();
|
||||
expect(exists).toEqual(true);
|
||||
|
||||
const notExists = await smartFs.file(path.join(tempDir, 'nonexistent.txt')).exists();
|
||||
expect(notExists).toEqual(false);
|
||||
});
|
||||
|
||||
tap.test('should get file stats', async () => {
|
||||
const filePath = path.join(tempDir, 'stats-test.txt');
|
||||
await smartFs.file(filePath).write('stats test');
|
||||
const stats = await smartFs.file(filePath).stat();
|
||||
|
||||
expect(stats).toHaveProperty('size');
|
||||
expect(stats).toHaveProperty('mtime');
|
||||
expect(stats).toHaveProperty('birthtime');
|
||||
expect(stats.isFile).toEqual(true);
|
||||
expect(stats.isDirectory).toEqual(false);
|
||||
expect(stats.size).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
tap.test('should append to a file', async () => {
|
||||
const filePath = path.join(tempDir, 'append-test.txt');
|
||||
await smartFs.file(filePath).write('Hello');
|
||||
await smartFs.file(filePath).append(' World!');
|
||||
const content = await smartFs.file(filePath).encoding('utf8').read();
|
||||
expect(content).toEqual('Hello World!');
|
||||
});
|
||||
|
||||
tap.test('should delete a file', async () => {
|
||||
const filePath = path.join(tempDir, 'delete-test.txt');
|
||||
await smartFs.file(filePath).write('to be deleted');
|
||||
await smartFs.file(filePath).delete();
|
||||
const exists = await smartFs.file(filePath).exists();
|
||||
expect(exists).toEqual(false);
|
||||
});
|
||||
|
||||
tap.test('should copy a file', async () => {
|
||||
const sourcePath = path.join(tempDir, 'copy-source.txt');
|
||||
const destPath = path.join(tempDir, 'copy-dest.txt');
|
||||
|
||||
await smartFs.file(sourcePath).write('copy me');
|
||||
await smartFs.file(sourcePath).copy(destPath);
|
||||
|
||||
const sourceContent = await smartFs.file(sourcePath).encoding('utf8').read();
|
||||
const destContent = await smartFs.file(destPath).encoding('utf8').read();
|
||||
|
||||
expect(sourceContent).toEqual('copy me');
|
||||
expect(destContent).toEqual('copy me');
|
||||
});
|
||||
|
||||
tap.test('should move a file', async () => {
|
||||
const sourcePath = path.join(tempDir, 'move-source.txt');
|
||||
const destPath = path.join(tempDir, 'move-dest.txt');
|
||||
|
||||
await smartFs.file(sourcePath).write('move me');
|
||||
await smartFs.file(sourcePath).move(destPath);
|
||||
|
||||
const sourceExists = await smartFs.file(sourcePath).exists();
|
||||
const destContent = await smartFs.file(destPath).encoding('utf8').read();
|
||||
|
||||
expect(sourceExists).toEqual(false);
|
||||
expect(destContent).toEqual('move me');
|
||||
});
|
||||
|
||||
tap.test('should create a directory', async () => {
|
||||
const dirPath = path.join(tempDir, 'test-dir');
|
||||
await smartFs.directory(dirPath).create();
|
||||
const exists = await smartFs.directory(dirPath).exists();
|
||||
expect(exists).toEqual(true);
|
||||
});
|
||||
|
||||
tap.test('should create nested directories recursively', async () => {
|
||||
const dirPath = path.join(tempDir, 'nested', 'deep', 'path');
|
||||
await smartFs.directory(dirPath).recursive().create();
|
||||
const exists = await smartFs.directory(dirPath).exists();
|
||||
expect(exists).toEqual(true);
|
||||
});
|
||||
|
||||
tap.test('should list directory contents', async () => {
|
||||
const dirPath = path.join(tempDir, 'list-test');
|
||||
await smartFs.directory(dirPath).create();
|
||||
await smartFs.file(path.join(dirPath, 'file1.txt')).write('file1');
|
||||
await smartFs.file(path.join(dirPath, 'file2.txt')).write('file2');
|
||||
await smartFs.directory(path.join(dirPath, 'subdir')).create();
|
||||
|
||||
const entries = await smartFs.directory(dirPath).list();
|
||||
|
||||
expect(entries).toHaveLength(3);
|
||||
const names = entries.map((e) => e.name).sort();
|
||||
expect(names).toEqual(['file1.txt', 'file2.txt', 'subdir']);
|
||||
});
|
||||
|
||||
tap.test('should list directory contents recursively', async () => {
|
||||
const dirPath = path.join(tempDir, 'recursive-test');
|
||||
await smartFs.directory(dirPath).create();
|
||||
await smartFs.file(path.join(dirPath, 'file1.txt')).write('file1');
|
||||
await smartFs.directory(path.join(dirPath, 'subdir')).create();
|
||||
await smartFs.file(path.join(dirPath, 'subdir', 'file2.txt')).write('file2');
|
||||
|
||||
const entries = await smartFs.directory(dirPath).recursive().list();
|
||||
|
||||
expect(entries.length).toBeGreaterThanOrEqual(3);
|
||||
});
|
||||
|
||||
tap.test('should filter directory listings with RegExp', async () => {
|
||||
const dirPath = path.join(tempDir, 'filter-test');
|
||||
await smartFs.directory(dirPath).create();
|
||||
await smartFs.file(path.join(dirPath, 'file1.ts')).write('ts file');
|
||||
await smartFs.file(path.join(dirPath, 'file2.js')).write('js file');
|
||||
await smartFs.file(path.join(dirPath, 'file3.ts')).write('ts file');
|
||||
|
||||
const entries = await smartFs.directory(dirPath).filter(/\.ts$/).list();
|
||||
|
||||
expect(entries).toHaveLength(2);
|
||||
expect(entries.every((e) => e.name.endsWith('.ts'))).toEqual(true);
|
||||
});
|
||||
|
||||
tap.test('should delete a directory recursively', async () => {
|
||||
const dirPath = path.join(tempDir, 'delete-dir-test');
|
||||
await smartFs.directory(dirPath).create();
|
||||
await smartFs.file(path.join(dirPath, 'file.txt')).write('file');
|
||||
await smartFs.directory(path.join(dirPath, 'subdir')).create();
|
||||
|
||||
await smartFs.directory(dirPath).recursive().delete();
|
||||
|
||||
const exists = await smartFs.directory(dirPath).exists();
|
||||
expect(exists).toEqual(false);
|
||||
});
|
||||
|
||||
tap.test('should handle file streams', async () => {
|
||||
const filePath = path.join(tempDir, 'stream-test.txt');
|
||||
const testData = 'Stream test data with some content';
|
||||
await smartFs.file(filePath).write(testData);
|
||||
|
||||
const readStream = await smartFs.file(filePath).readStream();
|
||||
const chunks: Uint8Array[] = [];
|
||||
|
||||
const reader = readStream.getReader();
|
||||
let done = false;
|
||||
|
||||
while (!done) {
|
||||
const result = await reader.read();
|
||||
done = result.done;
|
||||
if (result.value) {
|
||||
chunks.push(result.value);
|
||||
}
|
||||
}
|
||||
|
||||
const buffer = Buffer.concat(chunks.map((c) => Buffer.from(c)));
|
||||
const content = buffer.toString('utf8');
|
||||
|
||||
expect(content).toEqual(testData);
|
||||
});
|
||||
|
||||
tap.test('should write file streams', async () => {
|
||||
const filePath = path.join(tempDir, 'write-stream-test.txt');
|
||||
const testData = 'Writing via stream';
|
||||
const buffer = Buffer.from(testData);
|
||||
|
||||
const writeStream = await smartFs.file(filePath).writeStream();
|
||||
const writer = writeStream.getWriter();
|
||||
|
||||
await writer.write(new Uint8Array(buffer));
|
||||
await writer.close();
|
||||
|
||||
const content = await smartFs.file(filePath).encoding('utf8').read();
|
||||
expect(content).toEqual(testData);
|
||||
});
|
||||
|
||||
tap.test('should execute transactions', async () => {
|
||||
const tx = smartFs.transaction();
|
||||
|
||||
const file1Path = path.join(tempDir, 'tx-file1.txt');
|
||||
const file2Path = path.join(tempDir, 'tx-file2.txt');
|
||||
const file3Path = path.join(tempDir, 'tx-file3.txt');
|
||||
|
||||
await tx
|
||||
.file(file1Path)
|
||||
.write('transaction file 1')
|
||||
.file(file2Path)
|
||||
.write('transaction file 2')
|
||||
.file(file3Path)
|
||||
.write('transaction file 3')
|
||||
.commit();
|
||||
|
||||
const content1 = await smartFs.file(file1Path).encoding('utf8').read();
|
||||
const content2 = await smartFs.file(file2Path).encoding('utf8').read();
|
||||
const content3 = await smartFs.file(file3Path).encoding('utf8').read();
|
||||
|
||||
expect(content1).toEqual('transaction file 1');
|
||||
expect(content2).toEqual('transaction file 2');
|
||||
expect(content3).toEqual('transaction file 3');
|
||||
});
|
||||
|
||||
tap.test('should handle file watching', async () => {
|
||||
const filePath = path.join(tempDir, 'watch-test.txt');
|
||||
await smartFs.file(filePath).write('initial');
|
||||
|
||||
return new Promise<void>(async (resolve) => {
|
||||
const watcher = await smartFs
|
||||
.watch(filePath)
|
||||
.onChange(async (event) => {
|
||||
expect(event.type).toEqual('change');
|
||||
await watcher.stop();
|
||||
resolve();
|
||||
})
|
||||
.start();
|
||||
|
||||
// Wait a bit for watcher to be ready
|
||||
setTimeout(async () => {
|
||||
await smartFs.file(filePath).write('changed');
|
||||
}, 100);
|
||||
});
|
||||
});
|
||||
|
||||
tap.test('cleanup temp directory', async () => {
|
||||
await fs.rm(tempDir, { recursive: true, force: true });
|
||||
expect(true).toEqual(true);
|
||||
});
|
||||
|
||||
export default tap.start();
|
||||
7
test/test.ts
Normal file
7
test/test.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
/**
|
||||
* Main test entry point
|
||||
* Imports all test files
|
||||
*/
|
||||
|
||||
import './test.memory.provider.js';
|
||||
import './test.node.provider.js';
|
||||
139
ts/classes/smartfs.directory.ts
Normal file
139
ts/classes/smartfs.directory.ts
Normal file
@@ -0,0 +1,139 @@
|
||||
/**
|
||||
* Directory builder for fluent directory operations
|
||||
*/
|
||||
|
||||
import type { ISmartFsProvider } from '../interfaces/mod.provider.js';
|
||||
import type {
|
||||
TFileMode,
|
||||
IFileStats,
|
||||
IDirectoryEntry,
|
||||
IListOptions,
|
||||
} from '../interfaces/mod.types.js';
|
||||
|
||||
/**
|
||||
* Directory builder class for fluent directory operations
|
||||
* Configuration methods return `this` for chaining
|
||||
* Action methods return Promises for execution
|
||||
*/
|
||||
export class SmartFsDirectory {
|
||||
private provider: ISmartFsProvider;
|
||||
private path: string;
|
||||
|
||||
// Configuration options
|
||||
private options: {
|
||||
recursive?: boolean;
|
||||
mode?: TFileMode;
|
||||
filter?: string | RegExp | ((entry: IDirectoryEntry) => boolean);
|
||||
includeStats?: boolean;
|
||||
} = {};
|
||||
|
||||
constructor(provider: ISmartFsProvider, path: string) {
|
||||
this.provider = provider;
|
||||
this.path = this.provider.normalizePath(path);
|
||||
}
|
||||
|
||||
// --- Configuration Methods (return this for chaining) ---
|
||||
|
||||
/**
|
||||
* Enable recursive operations (for list, create, delete)
|
||||
*/
|
||||
public recursive(): this {
|
||||
this.options.recursive = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set directory permissions/mode
|
||||
* @param mode - Directory mode (e.g., 0o755)
|
||||
*/
|
||||
public mode(mode: TFileMode): this {
|
||||
this.options.mode = mode;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter directory entries
|
||||
* @param filter - String pattern, RegExp, or filter function
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* // String pattern (glob-like)
|
||||
* .filter('*.ts')
|
||||
*
|
||||
* // RegExp
|
||||
* .filter(/\.ts$/)
|
||||
*
|
||||
* // Function
|
||||
* .filter(entry => entry.isFile && entry.name.endsWith('.ts'))
|
||||
* ```
|
||||
*/
|
||||
public filter(filter: string | RegExp | ((entry: IDirectoryEntry) => boolean)): this {
|
||||
this.options.filter = filter;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Include file statistics in directory listings
|
||||
*/
|
||||
public includeStats(): this {
|
||||
this.options.includeStats = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
// --- Action Methods (return Promises) ---
|
||||
|
||||
/**
|
||||
* List directory contents
|
||||
* @returns Array of directory entries
|
||||
*/
|
||||
public async list(): Promise<IDirectoryEntry[]> {
|
||||
const listOptions: IListOptions = {
|
||||
recursive: this.options.recursive,
|
||||
filter: this.options.filter,
|
||||
includeStats: this.options.includeStats,
|
||||
};
|
||||
return this.provider.listDirectory(this.path, listOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the directory
|
||||
*/
|
||||
public async create(): Promise<void> {
|
||||
return this.provider.createDirectory(this.path, {
|
||||
recursive: this.options.recursive,
|
||||
mode: this.options.mode,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete the directory
|
||||
*/
|
||||
public async delete(): Promise<void> {
|
||||
return this.provider.deleteDirectory(this.path, {
|
||||
recursive: this.options.recursive,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the directory exists
|
||||
* @returns True if directory exists
|
||||
*/
|
||||
public async exists(): Promise<boolean> {
|
||||
return this.provider.directoryExists(this.path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get directory statistics
|
||||
* @returns Directory stats
|
||||
*/
|
||||
public async stat(): Promise<IFileStats> {
|
||||
return this.provider.directoryStat(this.path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the directory path
|
||||
*/
|
||||
public getPath(): string {
|
||||
return this.path;
|
||||
}
|
||||
}
|
||||
209
ts/classes/smartfs.file.ts
Normal file
209
ts/classes/smartfs.file.ts
Normal file
@@ -0,0 +1,209 @@
|
||||
/**
|
||||
* File builder for fluent file operations
|
||||
*/
|
||||
|
||||
import type { ISmartFsProvider } from '../interfaces/mod.provider.js';
|
||||
import type {
|
||||
TEncoding,
|
||||
TFileMode,
|
||||
IFileStats,
|
||||
IReadOptions,
|
||||
IWriteOptions,
|
||||
IStreamOptions,
|
||||
ICopyOptions,
|
||||
} from '../interfaces/mod.types.js';
|
||||
|
||||
/**
|
||||
* File builder class for fluent file operations
|
||||
* Configuration methods return `this` for chaining
|
||||
* Action methods return Promises for execution
|
||||
*/
|
||||
export class SmartFsFile {
|
||||
private provider: ISmartFsProvider;
|
||||
private path: string;
|
||||
|
||||
// Configuration options
|
||||
private options: {
|
||||
encoding?: TEncoding;
|
||||
mode?: TFileMode;
|
||||
atomic?: boolean;
|
||||
chunkSize?: number;
|
||||
preserveTimestamps?: boolean;
|
||||
overwrite?: boolean;
|
||||
} = {};
|
||||
|
||||
constructor(provider: ISmartFsProvider, path: string) {
|
||||
this.provider = provider;
|
||||
this.path = this.provider.normalizePath(path);
|
||||
}
|
||||
|
||||
// --- Configuration Methods (return this for chaining) ---
|
||||
|
||||
/**
|
||||
* Set encoding for read/write operations
|
||||
* @param encoding - File encoding
|
||||
*/
|
||||
public encoding(encoding: TEncoding): this {
|
||||
this.options.encoding = encoding;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set file permissions/mode
|
||||
* @param mode - File mode (e.g., 0o644)
|
||||
*/
|
||||
public mode(mode: TFileMode): this {
|
||||
this.options.mode = mode;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable atomic write operations
|
||||
* Writes to a temporary file first, then renames
|
||||
*/
|
||||
public atomic(): this {
|
||||
this.options.atomic = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set chunk size for streaming operations
|
||||
* @param size - Chunk size in bytes
|
||||
*/
|
||||
public chunkSize(size: number): this {
|
||||
this.options.chunkSize = size;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Preserve timestamps when copying/moving
|
||||
*/
|
||||
public preserveTimestamps(): this {
|
||||
this.options.preserveTimestamps = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Allow overwriting existing files
|
||||
*/
|
||||
public overwrite(): this {
|
||||
this.options.overwrite = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
// --- Action Methods (return Promises) ---
|
||||
|
||||
/**
|
||||
* Read the file
|
||||
* @returns File content as Buffer or string (if encoding is set)
|
||||
*/
|
||||
public async read(): Promise<Buffer | string> {
|
||||
const readOptions: IReadOptions = {
|
||||
encoding: this.options.encoding,
|
||||
};
|
||||
return this.provider.readFile(this.path, readOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Write content to the file
|
||||
* @param content - Content to write
|
||||
*/
|
||||
public async write(content: string | Buffer): Promise<void> {
|
||||
const writeOptions: IWriteOptions = {
|
||||
encoding: this.options.encoding,
|
||||
mode: this.options.mode,
|
||||
atomic: this.options.atomic,
|
||||
};
|
||||
return this.provider.writeFile(this.path, content, writeOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Append content to the file
|
||||
* @param content - Content to append
|
||||
*/
|
||||
public async append(content: string | Buffer): Promise<void> {
|
||||
const writeOptions: IWriteOptions = {
|
||||
encoding: this.options.encoding,
|
||||
mode: this.options.mode,
|
||||
};
|
||||
return this.provider.appendFile(this.path, content, writeOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a readable stream for the file
|
||||
* @returns ReadableStream of Uint8Array
|
||||
*/
|
||||
public async readStream(): Promise<ReadableStream<Uint8Array>> {
|
||||
const streamOptions: IStreamOptions = {
|
||||
chunkSize: this.options.chunkSize,
|
||||
};
|
||||
return this.provider.createReadStream(this.path, streamOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a writable stream for the file
|
||||
* @returns WritableStream of Uint8Array
|
||||
*/
|
||||
public async writeStream(): Promise<WritableStream<Uint8Array>> {
|
||||
const streamOptions: IStreamOptions = {
|
||||
chunkSize: this.options.chunkSize,
|
||||
};
|
||||
return this.provider.createWriteStream(this.path, streamOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy the file to a new location
|
||||
* @param targetPath - Destination path
|
||||
*/
|
||||
public async copy(targetPath: string): Promise<void> {
|
||||
const normalizedTarget = this.provider.normalizePath(targetPath);
|
||||
const copyOptions: ICopyOptions = {
|
||||
preserveTimestamps: this.options.preserveTimestamps,
|
||||
overwrite: this.options.overwrite,
|
||||
};
|
||||
return this.provider.copyFile(this.path, normalizedTarget, copyOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Move the file to a new location
|
||||
* @param targetPath - Destination path
|
||||
*/
|
||||
public async move(targetPath: string): Promise<void> {
|
||||
const normalizedTarget = this.provider.normalizePath(targetPath);
|
||||
const copyOptions: ICopyOptions = {
|
||||
preserveTimestamps: this.options.preserveTimestamps,
|
||||
overwrite: this.options.overwrite,
|
||||
};
|
||||
return this.provider.moveFile(this.path, normalizedTarget, copyOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete the file
|
||||
*/
|
||||
public async delete(): Promise<void> {
|
||||
return this.provider.deleteFile(this.path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the file exists
|
||||
* @returns True if file exists
|
||||
*/
|
||||
public async exists(): Promise<boolean> {
|
||||
return this.provider.fileExists(this.path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get file statistics
|
||||
* @returns File stats
|
||||
*/
|
||||
public async stat(): Promise<IFileStats> {
|
||||
return this.provider.fileStat(this.path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the file path
|
||||
*/
|
||||
public getPath(): string {
|
||||
return this.path;
|
||||
}
|
||||
}
|
||||
207
ts/classes/smartfs.transaction.ts
Normal file
207
ts/classes/smartfs.transaction.ts
Normal file
@@ -0,0 +1,207 @@
|
||||
/**
|
||||
* Transaction builder for atomic multi-file operations
|
||||
*/
|
||||
|
||||
import type { ISmartFsProvider } from '../interfaces/mod.provider.js';
|
||||
import type { ITransactionOperation, TEncoding } from '../interfaces/mod.types.js';
|
||||
|
||||
/**
|
||||
* Transaction file operation builder
|
||||
* Allows chaining file operations within a transaction
|
||||
*/
|
||||
class TransactionFileBuilder {
|
||||
constructor(
|
||||
private transaction: SmartFsTransaction,
|
||||
private path: string,
|
||||
) {}
|
||||
|
||||
/**
|
||||
* Write content to the file in this transaction
|
||||
* @param content - Content to write
|
||||
* @param encoding - Optional encoding
|
||||
*/
|
||||
public write(content: string | Buffer, encoding?: TEncoding): SmartFsTransaction {
|
||||
this.transaction.addOperation({
|
||||
type: 'write',
|
||||
path: this.path,
|
||||
content,
|
||||
encoding,
|
||||
});
|
||||
return this.transaction;
|
||||
}
|
||||
|
||||
/**
|
||||
* Append content to the file in this transaction
|
||||
* @param content - Content to append
|
||||
* @param encoding - Optional encoding
|
||||
*/
|
||||
public append(content: string | Buffer, encoding?: TEncoding): SmartFsTransaction {
|
||||
this.transaction.addOperation({
|
||||
type: 'append',
|
||||
path: this.path,
|
||||
content,
|
||||
encoding,
|
||||
});
|
||||
return this.transaction;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete the file in this transaction
|
||||
*/
|
||||
public delete(): SmartFsTransaction {
|
||||
this.transaction.addOperation({
|
||||
type: 'delete',
|
||||
path: this.path,
|
||||
});
|
||||
return this.transaction;
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy the file to a new location in this transaction
|
||||
* @param targetPath - Destination path
|
||||
*/
|
||||
public copy(targetPath: string): SmartFsTransaction {
|
||||
this.transaction.addOperation({
|
||||
type: 'copy',
|
||||
path: this.path,
|
||||
targetPath,
|
||||
});
|
||||
return this.transaction;
|
||||
}
|
||||
|
||||
/**
|
||||
* Move the file to a new location in this transaction
|
||||
* @param targetPath - Destination path
|
||||
*/
|
||||
public move(targetPath: string): SmartFsTransaction {
|
||||
this.transaction.addOperation({
|
||||
type: 'move',
|
||||
path: this.path,
|
||||
targetPath,
|
||||
});
|
||||
return this.transaction;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Transaction builder class for atomic multi-file operations
|
||||
* Build up a set of operations, then commit atomically
|
||||
* Supports rollback on failure
|
||||
*/
|
||||
export class SmartFsTransaction {
|
||||
private provider: ISmartFsProvider;
|
||||
private operations: ITransactionOperation[] = [];
|
||||
private committed = false;
|
||||
private rolledBack = false;
|
||||
|
||||
constructor(provider: ISmartFsProvider) {
|
||||
this.provider = provider;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add a file operation to the transaction
|
||||
* @param path - Path to the file
|
||||
* @returns TransactionFileBuilder for chaining operations
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* await fs.transaction()
|
||||
* .file('/file1.txt').write('content1')
|
||||
* .file('/file2.txt').delete()
|
||||
* .commit()
|
||||
* ```
|
||||
*/
|
||||
public file(path: string): TransactionFileBuilder {
|
||||
const normalizedPath = this.provider.normalizePath(path);
|
||||
return new TransactionFileBuilder(this, normalizedPath);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an operation to the transaction (internal)
|
||||
*/
|
||||
public addOperation(operation: ITransactionOperation): void {
|
||||
if (this.committed) {
|
||||
throw new Error('Cannot add operations to a committed transaction');
|
||||
}
|
||||
if (this.rolledBack) {
|
||||
throw new Error('Cannot add operations to a rolled back transaction');
|
||||
}
|
||||
this.operations.push(operation);
|
||||
}
|
||||
|
||||
/**
|
||||
* Commit the transaction
|
||||
* All operations are executed atomically
|
||||
* If any operation fails, all operations are rolled back
|
||||
*/
|
||||
public async commit(): Promise<void> {
|
||||
if (this.committed) {
|
||||
throw new Error('Transaction already committed');
|
||||
}
|
||||
if (this.rolledBack) {
|
||||
throw new Error('Cannot commit a rolled back transaction');
|
||||
}
|
||||
|
||||
if (this.operations.length === 0) {
|
||||
this.committed = true;
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Prepare transaction (create backups for rollback)
|
||||
const preparedOperations = await this.provider.prepareTransaction(this.operations);
|
||||
this.operations = preparedOperations;
|
||||
|
||||
// Execute the transaction
|
||||
await this.provider.executeTransaction(this.operations);
|
||||
|
||||
this.committed = true;
|
||||
} catch (error) {
|
||||
// Rollback on error
|
||||
await this.rollback();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Rollback the transaction
|
||||
* Reverts all operations that have been executed
|
||||
*/
|
||||
public async rollback(): Promise<void> {
|
||||
if (this.committed) {
|
||||
throw new Error('Cannot rollback a committed transaction');
|
||||
}
|
||||
if (this.rolledBack) {
|
||||
throw new Error('Transaction already rolled back');
|
||||
}
|
||||
|
||||
if (this.operations.length === 0) {
|
||||
this.rolledBack = true;
|
||||
return;
|
||||
}
|
||||
|
||||
await this.provider.rollbackTransaction(this.operations);
|
||||
this.rolledBack = true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all operations in the transaction
|
||||
*/
|
||||
public getOperations(): readonly ITransactionOperation[] {
|
||||
return this.operations;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the transaction has been committed
|
||||
*/
|
||||
public isCommitted(): boolean {
|
||||
return this.committed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the transaction has been rolled back
|
||||
*/
|
||||
public isRolledBack(): boolean {
|
||||
return this.rolledBack;
|
||||
}
|
||||
}
|
||||
108
ts/classes/smartfs.ts
Normal file
108
ts/classes/smartfs.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
/**
|
||||
* SmartFS - Modern pluggable filesystem module
|
||||
* Main entry point for filesystem operations
|
||||
*/
|
||||
|
||||
import type { ISmartFsProvider } from '../interfaces/mod.provider.js';
|
||||
import { SmartFsFile } from './smartfs.file.js';
|
||||
import { SmartFsDirectory } from './smartfs.directory.js';
|
||||
import { SmartFsTransaction } from './smartfs.transaction.js';
|
||||
import { SmartFsWatcher } from './smartfs.watcher.js';
|
||||
|
||||
/**
|
||||
* SmartFS main class
|
||||
* Creates builder instances for fluent filesystem operations
|
||||
*/
|
||||
export class SmartFs {
|
||||
/**
|
||||
* The filesystem provider
|
||||
*/
|
||||
public provider: ISmartFsProvider;
|
||||
|
||||
/**
|
||||
* Create a new SmartFS instance with a provider
|
||||
* @param provider - Filesystem provider to use
|
||||
*/
|
||||
constructor(provider: ISmartFsProvider) {
|
||||
this.provider = provider;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a file builder for fluent file operations
|
||||
* @param path - Path to the file
|
||||
* @returns FileBuilder instance
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const content = await fs.file('/path/to/file.txt')
|
||||
* .encoding('utf8')
|
||||
* .read()
|
||||
* ```
|
||||
*/
|
||||
public file(path: string): SmartFsFile {
|
||||
return new SmartFsFile(this.provider, path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a directory builder for fluent directory operations
|
||||
* @param path - Path to the directory
|
||||
* @returns DirectoryBuilder instance
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const files = await fs.directory('/path')
|
||||
* .recursive()
|
||||
* .list()
|
||||
* ```
|
||||
*/
|
||||
public directory(path: string): SmartFsDirectory {
|
||||
return new SmartFsDirectory(this.provider, path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a transaction builder for atomic multi-file operations
|
||||
* @returns TransactionBuilder instance
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* await fs.transaction()
|
||||
* .file('/file1.txt').write('content1')
|
||||
* .file('/file2.txt').delete()
|
||||
* .commit()
|
||||
* ```
|
||||
*/
|
||||
public transaction(): SmartFsTransaction {
|
||||
return new SmartFsTransaction(this.provider);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a watcher builder for file system watching
|
||||
* @param path - Path to watch
|
||||
* @returns WatcherBuilder instance
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* const watcher = await fs.watch('/path')
|
||||
* .recursive()
|
||||
* .onChange(event => console.log(event))
|
||||
* .start()
|
||||
* ```
|
||||
*/
|
||||
public watch(path: string): SmartFsWatcher {
|
||||
return new SmartFsWatcher(this.provider, path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get provider capabilities
|
||||
*/
|
||||
public getCapabilities() {
|
||||
return this.provider.capabilities;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get provider name
|
||||
*/
|
||||
public getProviderName(): string {
|
||||
return this.provider.name;
|
||||
}
|
||||
}
|
||||
229
ts/classes/smartfs.watcher.ts
Normal file
229
ts/classes/smartfs.watcher.ts
Normal file
@@ -0,0 +1,229 @@
|
||||
/**
|
||||
* Watcher builder for file system watching
|
||||
*/
|
||||
|
||||
import type { ISmartFsProvider, IWatcherHandle } from '../interfaces/mod.provider.js';
|
||||
import type { IWatchEvent, IWatchOptions, TWatchEventType } from '../interfaces/mod.types.js';
|
||||
|
||||
/**
|
||||
* Event handler type
|
||||
*/
|
||||
type TEventHandler = (event: IWatchEvent) => void | Promise<void>;
|
||||
|
||||
/**
|
||||
* Active watcher handle that allows stopping the watcher
|
||||
*/
|
||||
export class SmartFsActiveWatcher {
|
||||
constructor(private handle: IWatcherHandle) {}
|
||||
|
||||
/**
|
||||
* Stop watching for file system changes
|
||||
*/
|
||||
public async stop(): Promise<void> {
|
||||
return this.handle.stop();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Watcher builder class for file system watching
|
||||
* Configuration methods return `this` for chaining
|
||||
* Call `.start()` to begin watching
|
||||
*/
|
||||
export class SmartFsWatcher {
|
||||
private provider: ISmartFsProvider;
|
||||
private path: string;
|
||||
|
||||
// Configuration options
|
||||
private options: {
|
||||
recursive?: boolean;
|
||||
filter?: string | RegExp | ((path: string) => boolean);
|
||||
debounce?: number;
|
||||
} = {};
|
||||
|
||||
// Event handlers
|
||||
private handlers: {
|
||||
change?: TEventHandler[];
|
||||
add?: TEventHandler[];
|
||||
delete?: TEventHandler[];
|
||||
all?: TEventHandler[];
|
||||
} = {};
|
||||
|
||||
// Debounce state
|
||||
private debounceTimers: Map<string, NodeJS.Timeout> = new Map();
|
||||
|
||||
constructor(provider: ISmartFsProvider, path: string) {
|
||||
this.provider = provider;
|
||||
this.path = this.provider.normalizePath(path);
|
||||
|
||||
if (!this.provider.capabilities.supportsWatch) {
|
||||
throw new Error(`Provider '${this.provider.name}' does not support file watching`);
|
||||
}
|
||||
}
|
||||
|
||||
// --- Configuration Methods (return this for chaining) ---
|
||||
|
||||
/**
|
||||
* Enable recursive watching (watch subdirectories)
|
||||
*/
|
||||
public recursive(): this {
|
||||
this.options.recursive = true;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter watched paths
|
||||
* @param filter - String pattern, RegExp, or filter function
|
||||
*
|
||||
* @example
|
||||
* ```typescript
|
||||
* // String pattern (glob-like)
|
||||
* .filter('*.ts')
|
||||
*
|
||||
* // RegExp
|
||||
* .filter(/\.ts$/)
|
||||
*
|
||||
* // Function
|
||||
* .filter(path => path.endsWith('.ts'))
|
||||
* ```
|
||||
*/
|
||||
public filter(filter: string | RegExp | ((path: string) => boolean)): this {
|
||||
this.options.filter = filter;
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Debounce events (wait N milliseconds before firing)
|
||||
* Useful for avoiding rapid-fire events
|
||||
* @param ms - Debounce delay in milliseconds
|
||||
*/
|
||||
public debounce(ms: number): this {
|
||||
this.options.debounce = ms;
|
||||
return this;
|
||||
}
|
||||
|
||||
// --- Event Handler Registration (return this for chaining) ---
|
||||
|
||||
/**
|
||||
* Register handler for 'change' events (file modified)
|
||||
* @param handler - Event handler function
|
||||
*/
|
||||
public onChange(handler: TEventHandler): this {
|
||||
if (!this.handlers.change) {
|
||||
this.handlers.change = [];
|
||||
}
|
||||
this.handlers.change.push(handler);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register handler for 'add' events (file created)
|
||||
* @param handler - Event handler function
|
||||
*/
|
||||
public onAdd(handler: TEventHandler): this {
|
||||
if (!this.handlers.add) {
|
||||
this.handlers.add = [];
|
||||
}
|
||||
this.handlers.add.push(handler);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register handler for 'delete' events (file deleted)
|
||||
* @param handler - Event handler function
|
||||
*/
|
||||
public onDelete(handler: TEventHandler): this {
|
||||
if (!this.handlers.delete) {
|
||||
this.handlers.delete = [];
|
||||
}
|
||||
this.handlers.delete.push(handler);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Register handler for all events
|
||||
* @param handler - Event handler function
|
||||
*/
|
||||
public onAll(handler: TEventHandler): this {
|
||||
if (!this.handlers.all) {
|
||||
this.handlers.all = [];
|
||||
}
|
||||
this.handlers.all.push(handler);
|
||||
return this;
|
||||
}
|
||||
|
||||
// --- Action Method ---
|
||||
|
||||
/**
|
||||
* Start watching for file system changes
|
||||
* @returns Active watcher handle that can be stopped
|
||||
*/
|
||||
public async start(): Promise<SmartFsActiveWatcher> {
|
||||
const watchOptions: IWatchOptions = {
|
||||
recursive: this.options.recursive,
|
||||
filter: this.options.filter,
|
||||
debounce: this.options.debounce,
|
||||
};
|
||||
|
||||
// Create the callback that dispatches to handlers
|
||||
const callback = async (event: IWatchEvent) => {
|
||||
await this.handleEvent(event);
|
||||
};
|
||||
|
||||
const handle = await this.provider.watch(this.path, callback, watchOptions);
|
||||
return new SmartFsActiveWatcher(handle);
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle incoming watch events (internal)
|
||||
*/
|
||||
private async handleEvent(event: IWatchEvent): Promise<void> {
|
||||
// Apply debouncing if configured
|
||||
if (this.options.debounce && this.options.debounce > 0) {
|
||||
const key = `${event.type}:${event.path}`;
|
||||
|
||||
// Clear existing timer
|
||||
const existingTimer = this.debounceTimers.get(key);
|
||||
if (existingTimer) {
|
||||
clearTimeout(existingTimer);
|
||||
}
|
||||
|
||||
// Set new timer
|
||||
const timer = setTimeout(async () => {
|
||||
this.debounceTimers.delete(key);
|
||||
await this.dispatchEvent(event);
|
||||
}, this.options.debounce);
|
||||
|
||||
this.debounceTimers.set(key, timer);
|
||||
} else {
|
||||
// No debouncing, dispatch immediately
|
||||
await this.dispatchEvent(event);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Dispatch event to registered handlers (internal)
|
||||
*/
|
||||
private async dispatchEvent(event: IWatchEvent): Promise<void> {
|
||||
// Dispatch to type-specific handlers
|
||||
const typeHandlers = this.handlers[event.type];
|
||||
if (typeHandlers) {
|
||||
for (const handler of typeHandlers) {
|
||||
await handler(event);
|
||||
}
|
||||
}
|
||||
|
||||
// Dispatch to 'all' handlers
|
||||
if (this.handlers.all) {
|
||||
for (const handler of this.handlers.all) {
|
||||
await handler(event);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the watched path
|
||||
*/
|
||||
public getPath(): string {
|
||||
return this.path;
|
||||
}
|
||||
}
|
||||
40
ts/index.ts
Normal file
40
ts/index.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
/**
|
||||
* SmartFS - Modern pluggable filesystem module
|
||||
* @packageDocumentation
|
||||
*/
|
||||
|
||||
// Main classes
|
||||
export { SmartFs } from './classes/smartfs.js';
|
||||
export { SmartFsFile } from './classes/smartfs.file.js';
|
||||
export { SmartFsDirectory } from './classes/smartfs.directory.js';
|
||||
export { SmartFsTransaction } from './classes/smartfs.transaction.js';
|
||||
export { SmartFsWatcher, SmartFsActiveWatcher } from './classes/smartfs.watcher.js';
|
||||
|
||||
// Providers
|
||||
export { SmartFsProviderNode } from './providers/smartfs.provider.node.js';
|
||||
export { SmartFsProviderMemory } from './providers/smartfs.provider.memory.js';
|
||||
|
||||
// Interfaces and Types
|
||||
export type {
|
||||
ISmartFsProvider,
|
||||
IProviderCapabilities,
|
||||
TWatchCallback,
|
||||
IWatcherHandle,
|
||||
} from './interfaces/mod.provider.js';
|
||||
|
||||
export type {
|
||||
TEncoding,
|
||||
TFileMode,
|
||||
IFileStats,
|
||||
IDirectoryEntry,
|
||||
TWatchEventType,
|
||||
IWatchEvent,
|
||||
TTransactionOperationType,
|
||||
ITransactionOperation,
|
||||
IReadOptions,
|
||||
IWriteOptions,
|
||||
IStreamOptions,
|
||||
ICopyOptions,
|
||||
IListOptions,
|
||||
IWatchOptions,
|
||||
} from './interfaces/mod.types.js';
|
||||
201
ts/interfaces/mod.provider.ts
Normal file
201
ts/interfaces/mod.provider.ts
Normal file
@@ -0,0 +1,201 @@
|
||||
/**
|
||||
* Provider interface for SmartFS
|
||||
* All filesystem backends must implement this interface
|
||||
*/
|
||||
|
||||
import type {
|
||||
IFileStats,
|
||||
IDirectoryEntry,
|
||||
IWatchEvent,
|
||||
IReadOptions,
|
||||
IWriteOptions,
|
||||
IStreamOptions,
|
||||
ICopyOptions,
|
||||
IListOptions,
|
||||
IWatchOptions,
|
||||
ITransactionOperation,
|
||||
} from './mod.types.js';
|
||||
|
||||
/**
|
||||
* Provider capabilities interface
|
||||
*/
|
||||
export interface IProviderCapabilities {
|
||||
/**
|
||||
* Supports file watching
|
||||
*/
|
||||
supportsWatch: boolean;
|
||||
|
||||
/**
|
||||
* Supports atomic writes
|
||||
*/
|
||||
supportsAtomic: boolean;
|
||||
|
||||
/**
|
||||
* Supports transactions
|
||||
*/
|
||||
supportsTransactions: boolean;
|
||||
|
||||
/**
|
||||
* Supports streaming
|
||||
*/
|
||||
supportsStreaming: boolean;
|
||||
|
||||
/**
|
||||
* Supports symbolic links
|
||||
*/
|
||||
supportsSymlinks: boolean;
|
||||
|
||||
/**
|
||||
* Supports file permissions
|
||||
*/
|
||||
supportsPermissions: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Watch callback type
|
||||
*/
|
||||
export type TWatchCallback = (event: IWatchEvent) => void | Promise<void>;
|
||||
|
||||
/**
|
||||
* Watcher handle interface
|
||||
*/
|
||||
export interface IWatcherHandle {
|
||||
/**
|
||||
* Stop watching
|
||||
*/
|
||||
stop(): Promise<void>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Base provider interface that all filesystem backends must implement
|
||||
*/
|
||||
export interface ISmartFsProvider {
|
||||
/**
|
||||
* Provider name (e.g., 'node', 'memory', 's3')
|
||||
*/
|
||||
readonly name: string;
|
||||
|
||||
/**
|
||||
* Provider capabilities
|
||||
*/
|
||||
readonly capabilities: IProviderCapabilities;
|
||||
|
||||
// --- File Operations ---
|
||||
|
||||
/**
|
||||
* Read a file
|
||||
*/
|
||||
readFile(path: string, options?: IReadOptions): Promise<Buffer | string>;
|
||||
|
||||
/**
|
||||
* Write a file
|
||||
*/
|
||||
writeFile(path: string, content: string | Buffer, options?: IWriteOptions): Promise<void>;
|
||||
|
||||
/**
|
||||
* Append to a file
|
||||
*/
|
||||
appendFile(path: string, content: string | Buffer, options?: IWriteOptions): Promise<void>;
|
||||
|
||||
/**
|
||||
* Delete a file
|
||||
*/
|
||||
deleteFile(path: string): Promise<void>;
|
||||
|
||||
/**
|
||||
* Copy a file
|
||||
*/
|
||||
copyFile(from: string, to: string, options?: ICopyOptions): Promise<void>;
|
||||
|
||||
/**
|
||||
* Move a file
|
||||
*/
|
||||
moveFile(from: string, to: string, options?: ICopyOptions): Promise<void>;
|
||||
|
||||
/**
|
||||
* Check if a file exists
|
||||
*/
|
||||
fileExists(path: string): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Get file statistics
|
||||
*/
|
||||
fileStat(path: string): Promise<IFileStats>;
|
||||
|
||||
/**
|
||||
* Create a readable stream
|
||||
*/
|
||||
createReadStream(path: string, options?: IStreamOptions): Promise<ReadableStream<Uint8Array>>;
|
||||
|
||||
/**
|
||||
* Create a writable stream
|
||||
*/
|
||||
createWriteStream(path: string, options?: IStreamOptions): Promise<WritableStream<Uint8Array>>;
|
||||
|
||||
// --- Directory Operations ---
|
||||
|
||||
/**
|
||||
* List directory contents
|
||||
*/
|
||||
listDirectory(path: string, options?: IListOptions): Promise<IDirectoryEntry[]>;
|
||||
|
||||
/**
|
||||
* Create a directory
|
||||
*/
|
||||
createDirectory(path: string, options?: { recursive?: boolean; mode?: number }): Promise<void>;
|
||||
|
||||
/**
|
||||
* Delete a directory
|
||||
*/
|
||||
deleteDirectory(path: string, options?: { recursive?: boolean }): Promise<void>;
|
||||
|
||||
/**
|
||||
* Check if a directory exists
|
||||
*/
|
||||
directoryExists(path: string): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Get directory statistics
|
||||
*/
|
||||
directoryStat(path: string): Promise<IFileStats>;
|
||||
|
||||
// --- Watch Operations ---
|
||||
|
||||
/**
|
||||
* Watch a path for changes
|
||||
* Returns a handle to stop watching
|
||||
*/
|
||||
watch(path: string, callback: TWatchCallback, options?: IWatchOptions): Promise<IWatcherHandle>;
|
||||
|
||||
// --- Transaction Operations ---
|
||||
|
||||
/**
|
||||
* Execute a transaction
|
||||
* Providers should implement this to support atomic multi-file operations
|
||||
* If not supported, should execute operations sequentially
|
||||
*/
|
||||
executeTransaction(operations: ITransactionOperation[]): Promise<void>;
|
||||
|
||||
/**
|
||||
* Prepare a transaction (create backups for rollback)
|
||||
* Returns prepared operations with backup data
|
||||
*/
|
||||
prepareTransaction(operations: ITransactionOperation[]): Promise<ITransactionOperation[]>;
|
||||
|
||||
/**
|
||||
* Rollback a transaction using backup data
|
||||
*/
|
||||
rollbackTransaction(operations: ITransactionOperation[]): Promise<void>;
|
||||
|
||||
// --- Path Operations ---
|
||||
|
||||
/**
|
||||
* Normalize a path according to the provider's conventions
|
||||
*/
|
||||
normalizePath(path: string): string;
|
||||
|
||||
/**
|
||||
* Join path segments
|
||||
*/
|
||||
joinPath(...segments: string[]): string;
|
||||
}
|
||||
217
ts/interfaces/mod.types.ts
Normal file
217
ts/interfaces/mod.types.ts
Normal file
@@ -0,0 +1,217 @@
|
||||
/**
|
||||
* Core type definitions for SmartFS
|
||||
*/
|
||||
|
||||
/**
|
||||
* File encoding types
|
||||
*/
|
||||
export type TEncoding = 'utf8' | 'utf-8' | 'ascii' | 'base64' | 'hex' | 'binary' | 'buffer';
|
||||
|
||||
/**
|
||||
* File mode (permissions)
|
||||
*/
|
||||
export type TFileMode = number;
|
||||
|
||||
/**
|
||||
* File statistics interface
|
||||
*/
|
||||
export interface IFileStats {
|
||||
/**
|
||||
* File size in bytes
|
||||
*/
|
||||
size: number;
|
||||
|
||||
/**
|
||||
* Creation time
|
||||
*/
|
||||
birthtime: Date;
|
||||
|
||||
/**
|
||||
* Last modification time
|
||||
*/
|
||||
mtime: Date;
|
||||
|
||||
/**
|
||||
* Last access time
|
||||
*/
|
||||
atime: Date;
|
||||
|
||||
/**
|
||||
* Is this a file?
|
||||
*/
|
||||
isFile: boolean;
|
||||
|
||||
/**
|
||||
* Is this a directory?
|
||||
*/
|
||||
isDirectory: boolean;
|
||||
|
||||
/**
|
||||
* Is this a symbolic link?
|
||||
*/
|
||||
isSymbolicLink: boolean;
|
||||
|
||||
/**
|
||||
* File permissions/mode
|
||||
*/
|
||||
mode: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Directory entry interface
|
||||
*/
|
||||
export interface IDirectoryEntry {
|
||||
/**
|
||||
* Entry name (filename or directory name)
|
||||
*/
|
||||
name: string;
|
||||
|
||||
/**
|
||||
* Full path to the entry
|
||||
*/
|
||||
path: string;
|
||||
|
||||
/**
|
||||
* Is this entry a file?
|
||||
*/
|
||||
isFile: boolean;
|
||||
|
||||
/**
|
||||
* Is this entry a directory?
|
||||
*/
|
||||
isDirectory: boolean;
|
||||
|
||||
/**
|
||||
* Is this entry a symbolic link?
|
||||
*/
|
||||
isSymbolicLink: boolean;
|
||||
|
||||
/**
|
||||
* File statistics
|
||||
*/
|
||||
stats?: IFileStats;
|
||||
}
|
||||
|
||||
/**
|
||||
* Watch event types
|
||||
*/
|
||||
export type TWatchEventType = 'add' | 'change' | 'delete';
|
||||
|
||||
/**
|
||||
* Watch event interface
|
||||
*/
|
||||
export interface IWatchEvent {
|
||||
/**
|
||||
* Event type
|
||||
*/
|
||||
type: TWatchEventType;
|
||||
|
||||
/**
|
||||
* Path that triggered the event
|
||||
*/
|
||||
path: string;
|
||||
|
||||
/**
|
||||
* Timestamp of the event
|
||||
*/
|
||||
timestamp: Date;
|
||||
|
||||
/**
|
||||
* File statistics (if available)
|
||||
*/
|
||||
stats?: IFileStats;
|
||||
}
|
||||
|
||||
/**
|
||||
* Transaction operation types
|
||||
*/
|
||||
export type TTransactionOperationType = 'write' | 'delete' | 'copy' | 'move' | 'append';
|
||||
|
||||
/**
|
||||
* Transaction operation interface
|
||||
*/
|
||||
export interface ITransactionOperation {
|
||||
/**
|
||||
* Operation type
|
||||
*/
|
||||
type: TTransactionOperationType;
|
||||
|
||||
/**
|
||||
* Source path
|
||||
*/
|
||||
path: string;
|
||||
|
||||
/**
|
||||
* Target path (for copy/move operations)
|
||||
*/
|
||||
targetPath?: string;
|
||||
|
||||
/**
|
||||
* Content to write (for write/append operations)
|
||||
*/
|
||||
content?: string | Buffer;
|
||||
|
||||
/**
|
||||
* Encoding (for write/append operations)
|
||||
*/
|
||||
encoding?: TEncoding;
|
||||
|
||||
/**
|
||||
* Backup data for rollback
|
||||
*/
|
||||
backup?: {
|
||||
existed: boolean;
|
||||
content?: Buffer;
|
||||
stats?: IFileStats;
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Read options interface
|
||||
*/
|
||||
export interface IReadOptions {
|
||||
encoding?: TEncoding;
|
||||
}
|
||||
|
||||
/**
|
||||
* Write options interface
|
||||
*/
|
||||
export interface IWriteOptions {
|
||||
encoding?: TEncoding;
|
||||
mode?: TFileMode;
|
||||
atomic?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stream options interface
|
||||
*/
|
||||
export interface IStreamOptions {
|
||||
chunkSize?: number;
|
||||
highWaterMark?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Copy/Move options interface
|
||||
*/
|
||||
export interface ICopyOptions {
|
||||
preserveTimestamps?: boolean;
|
||||
overwrite?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* List options interface
|
||||
*/
|
||||
export interface IListOptions {
|
||||
recursive?: boolean;
|
||||
filter?: string | RegExp | ((entry: IDirectoryEntry) => boolean);
|
||||
includeStats?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Watch options interface
|
||||
*/
|
||||
export interface IWatchOptions {
|
||||
recursive?: boolean;
|
||||
filter?: string | RegExp | ((path: string) => boolean);
|
||||
debounce?: number;
|
||||
}
|
||||
5
ts/paths.ts
Normal file
5
ts/paths.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import * as plugins from './smartfs.plugins.js';
|
||||
export const packageDir = plugins.path.join(
|
||||
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
|
||||
'../',
|
||||
);
|
||||
642
ts/providers/smartfs.provider.memory.ts
Normal file
642
ts/providers/smartfs.provider.memory.ts
Normal file
@@ -0,0 +1,642 @@
|
||||
/**
|
||||
* In-memory filesystem provider for SmartFS
|
||||
* Perfect for testing and temporary storage
|
||||
*/
|
||||
|
||||
import type {
|
||||
ISmartFsProvider,
|
||||
IProviderCapabilities,
|
||||
TWatchCallback,
|
||||
IWatcherHandle,
|
||||
} from '../interfaces/mod.provider.js';
|
||||
|
||||
import type {
|
||||
IFileStats,
|
||||
IDirectoryEntry,
|
||||
IReadOptions,
|
||||
IWriteOptions,
|
||||
IStreamOptions,
|
||||
ICopyOptions,
|
||||
IListOptions,
|
||||
IWatchOptions,
|
||||
ITransactionOperation,
|
||||
IWatchEvent,
|
||||
TWatchEventType,
|
||||
} from '../interfaces/mod.types.js';
|
||||
|
||||
/**
|
||||
* In-memory file entry
|
||||
*/
|
||||
interface IMemoryEntry {
|
||||
type: 'file' | 'directory';
|
||||
content?: Buffer;
|
||||
created: Date;
|
||||
modified: Date;
|
||||
accessed: Date;
|
||||
mode: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Watcher registration
|
||||
*/
|
||||
interface IWatcherRegistration {
|
||||
path: string;
|
||||
callback: TWatchCallback;
|
||||
options?: IWatchOptions;
|
||||
}
|
||||
|
||||
/**
|
||||
* In-memory filesystem provider
|
||||
*/
|
||||
export class SmartFsProviderMemory implements ISmartFsProvider {
|
||||
public readonly name = 'memory';
|
||||
|
||||
public readonly capabilities: IProviderCapabilities = {
|
||||
supportsWatch: true,
|
||||
supportsAtomic: true,
|
||||
supportsTransactions: true,
|
||||
supportsStreaming: true,
|
||||
supportsSymlinks: false, // Not implemented yet
|
||||
supportsPermissions: true,
|
||||
};
|
||||
|
||||
private storage: Map<string, IMemoryEntry> = new Map();
|
||||
private watchers: Map<string, IWatcherRegistration> = new Map();
|
||||
private nextWatcherId = 1;
|
||||
|
||||
constructor() {
|
||||
// Create root directory
|
||||
this.storage.set('/', {
|
||||
type: 'directory',
|
||||
created: new Date(),
|
||||
modified: new Date(),
|
||||
accessed: new Date(),
|
||||
mode: 0o755,
|
||||
});
|
||||
}
|
||||
|
||||
// --- File Operations ---
|
||||
|
||||
public async readFile(path: string, options?: IReadOptions): Promise<Buffer | string> {
|
||||
const entry = this.storage.get(path);
|
||||
|
||||
if (!entry) {
|
||||
throw new Error(`ENOENT: no such file or directory, open '${path}'`);
|
||||
}
|
||||
|
||||
if (entry.type !== 'file') {
|
||||
throw new Error(`EISDIR: illegal operation on a directory, read '${path}'`);
|
||||
}
|
||||
|
||||
entry.accessed = new Date();
|
||||
|
||||
if (!entry.content) {
|
||||
return options?.encoding ? '' : Buffer.alloc(0);
|
||||
}
|
||||
|
||||
if (options?.encoding && options.encoding !== 'buffer') {
|
||||
return entry.content.toString(options.encoding as BufferEncoding);
|
||||
}
|
||||
|
||||
return entry.content;
|
||||
}
|
||||
|
||||
public async writeFile(path: string, content: string | Buffer, options?: IWriteOptions): Promise<void> {
|
||||
const buffer = Buffer.isBuffer(content) ? content : Buffer.from(content, options?.encoding as BufferEncoding);
|
||||
|
||||
// Ensure parent directory exists
|
||||
await this.ensureParentDirectory(path);
|
||||
|
||||
const now = new Date();
|
||||
const entry = this.storage.get(path);
|
||||
|
||||
if (entry && entry.type === 'directory') {
|
||||
throw new Error(`EISDIR: illegal operation on a directory, open '${path}'`);
|
||||
}
|
||||
|
||||
this.storage.set(path, {
|
||||
type: 'file',
|
||||
content: buffer,
|
||||
created: entry?.created || now,
|
||||
modified: now,
|
||||
accessed: now,
|
||||
mode: options?.mode || 0o644,
|
||||
});
|
||||
|
||||
await this.emitWatchEvent(path, entry ? 'change' : 'add');
|
||||
}
|
||||
|
||||
public async appendFile(path: string, content: string | Buffer, options?: IWriteOptions): Promise<void> {
|
||||
const buffer = Buffer.isBuffer(content) ? content : Buffer.from(content, options?.encoding as BufferEncoding);
|
||||
|
||||
const entry = this.storage.get(path);
|
||||
|
||||
if (entry && entry.type === 'directory') {
|
||||
throw new Error(`EISDIR: illegal operation on a directory, open '${path}'`);
|
||||
}
|
||||
|
||||
const existingContent = entry?.content || Buffer.alloc(0);
|
||||
const newContent = Buffer.concat([existingContent, buffer]);
|
||||
|
||||
await this.writeFile(path, newContent, options);
|
||||
}
|
||||
|
||||
public async deleteFile(path: string): Promise<void> {
|
||||
const entry = this.storage.get(path);
|
||||
|
||||
if (!entry) {
|
||||
throw new Error(`ENOENT: no such file or directory, unlink '${path}'`);
|
||||
}
|
||||
|
||||
if (entry.type === 'directory') {
|
||||
throw new Error(`EISDIR: illegal operation on a directory, unlink '${path}'`);
|
||||
}
|
||||
|
||||
this.storage.delete(path);
|
||||
await this.emitWatchEvent(path, 'delete');
|
||||
}
|
||||
|
||||
public async copyFile(from: string, to: string, options?: ICopyOptions): Promise<void> {
|
||||
const fromEntry = this.storage.get(from);
|
||||
|
||||
if (!fromEntry) {
|
||||
throw new Error(`ENOENT: no such file or directory, copyfile '${from}'`);
|
||||
}
|
||||
|
||||
if (fromEntry.type !== 'file') {
|
||||
throw new Error(`EISDIR: illegal operation on a directory, copyfile '${from}'`);
|
||||
}
|
||||
|
||||
const toEntry = this.storage.get(to);
|
||||
if (toEntry && !options?.overwrite) {
|
||||
throw new Error(`EEXIST: file already exists, copyfile '${from}' -> '${to}'`);
|
||||
}
|
||||
|
||||
const now = new Date();
|
||||
this.storage.set(to, {
|
||||
type: 'file',
|
||||
content: fromEntry.content ? Buffer.from(fromEntry.content) : undefined,
|
||||
created: now,
|
||||
modified: options?.preserveTimestamps ? fromEntry.modified : now,
|
||||
accessed: now,
|
||||
mode: fromEntry.mode,
|
||||
});
|
||||
|
||||
await this.emitWatchEvent(to, toEntry ? 'change' : 'add');
|
||||
}
|
||||
|
||||
public async moveFile(from: string, to: string, options?: ICopyOptions): Promise<void> {
|
||||
await this.copyFile(from, to, options);
|
||||
await this.deleteFile(from);
|
||||
}
|
||||
|
||||
public async fileExists(path: string): Promise<boolean> {
|
||||
const entry = this.storage.get(path);
|
||||
return entry !== undefined && entry.type === 'file';
|
||||
}
|
||||
|
||||
public async fileStat(path: string): Promise<IFileStats> {
|
||||
const entry = this.storage.get(path);
|
||||
|
||||
if (!entry) {
|
||||
throw new Error(`ENOENT: no such file or directory, stat '${path}'`);
|
||||
}
|
||||
|
||||
if (entry.type !== 'file') {
|
||||
throw new Error(`EISDIR: illegal operation on a directory, stat '${path}'`);
|
||||
}
|
||||
|
||||
return {
|
||||
size: entry.content?.length || 0,
|
||||
birthtime: entry.created,
|
||||
mtime: entry.modified,
|
||||
atime: entry.accessed,
|
||||
isFile: true,
|
||||
isDirectory: false,
|
||||
isSymbolicLink: false,
|
||||
mode: entry.mode,
|
||||
};
|
||||
}
|
||||
|
||||
public async createReadStream(path: string, options?: IStreamOptions): Promise<ReadableStream<Uint8Array>> {
|
||||
const content = await this.readFile(path);
|
||||
const buffer = Buffer.isBuffer(content) ? content : Buffer.from(content);
|
||||
const chunkSize = options?.chunkSize || 64 * 1024;
|
||||
|
||||
let offset = 0;
|
||||
|
||||
return new ReadableStream({
|
||||
pull(controller) {
|
||||
if (offset >= buffer.length) {
|
||||
controller.close();
|
||||
return;
|
||||
}
|
||||
|
||||
const end = Math.min(offset + chunkSize, buffer.length);
|
||||
const chunk = buffer.subarray(offset, end);
|
||||
controller.enqueue(new Uint8Array(chunk));
|
||||
offset = end;
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
public async createWriteStream(path: string, options?: IStreamOptions): Promise<WritableStream<Uint8Array>> {
|
||||
const chunks: Buffer[] = [];
|
||||
|
||||
return new WritableStream({
|
||||
write: async (chunk) => {
|
||||
chunks.push(Buffer.from(chunk));
|
||||
},
|
||||
close: async () => {
|
||||
const content = Buffer.concat(chunks);
|
||||
await this.writeFile(path, content);
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
// --- Directory Operations ---
|
||||
|
||||
public async listDirectory(path: string, options?: IListOptions): Promise<IDirectoryEntry[]> {
|
||||
const entry = this.storage.get(path);
|
||||
|
||||
if (!entry) {
|
||||
throw new Error(`ENOENT: no such file or directory, scandir '${path}'`);
|
||||
}
|
||||
|
||||
if (entry.type !== 'directory') {
|
||||
throw new Error(`ENOTDIR: not a directory, scandir '${path}'`);
|
||||
}
|
||||
|
||||
const entries: IDirectoryEntry[] = [];
|
||||
const normalizedPath = this.normalizePath(path);
|
||||
const prefix = normalizedPath === '/' ? '/' : `${normalizedPath}/`;
|
||||
|
||||
for (const [entryPath, entryData] of this.storage.entries()) {
|
||||
if (entryPath === normalizedPath) continue;
|
||||
|
||||
if (options?.recursive) {
|
||||
// Recursive: include all descendants
|
||||
if (entryPath.startsWith(prefix)) {
|
||||
const relativePath = entryPath.slice(prefix.length);
|
||||
const name = relativePath.split('/').pop()!;
|
||||
|
||||
const directoryEntry: IDirectoryEntry = {
|
||||
name,
|
||||
path: entryPath,
|
||||
isFile: entryData.type === 'file',
|
||||
isDirectory: entryData.type === 'directory',
|
||||
isSymbolicLink: false,
|
||||
};
|
||||
|
||||
if (this.matchesFilter(directoryEntry, options.filter)) {
|
||||
if (options.includeStats) {
|
||||
directoryEntry.stats = await this.getEntryStats(entryPath, entryData);
|
||||
}
|
||||
entries.push(directoryEntry);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Non-recursive: only direct children
|
||||
if (entryPath.startsWith(prefix) && !entryPath.slice(prefix.length).includes('/')) {
|
||||
const name = entryPath.slice(prefix.length);
|
||||
|
||||
const directoryEntry: IDirectoryEntry = {
|
||||
name,
|
||||
path: entryPath,
|
||||
isFile: entryData.type === 'file',
|
||||
isDirectory: entryData.type === 'directory',
|
||||
isSymbolicLink: false,
|
||||
};
|
||||
|
||||
if (this.matchesFilter(directoryEntry, options?.filter)) {
|
||||
if (options?.includeStats) {
|
||||
directoryEntry.stats = await this.getEntryStats(entryPath, entryData);
|
||||
}
|
||||
entries.push(directoryEntry);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
public async createDirectory(path: string, options?: { recursive?: boolean; mode?: number }): Promise<void> {
|
||||
const normalizedPath = this.normalizePath(path);
|
||||
|
||||
if (options?.recursive) {
|
||||
// Create parent directories
|
||||
const parts = normalizedPath.split('/').filter(Boolean);
|
||||
let currentPath = '/';
|
||||
|
||||
for (const part of parts) {
|
||||
currentPath = currentPath === '/' ? `/${part}` : `${currentPath}/${part}`;
|
||||
|
||||
if (!this.storage.has(currentPath)) {
|
||||
const now = new Date();
|
||||
this.storage.set(currentPath, {
|
||||
type: 'directory',
|
||||
created: now,
|
||||
modified: now,
|
||||
accessed: now,
|
||||
mode: options?.mode || 0o755,
|
||||
});
|
||||
await this.emitWatchEvent(currentPath, 'add');
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const entry = this.storage.get(normalizedPath);
|
||||
|
||||
if (entry) {
|
||||
throw new Error(`EEXIST: file already exists, mkdir '${normalizedPath}'`);
|
||||
}
|
||||
|
||||
const now = new Date();
|
||||
this.storage.set(normalizedPath, {
|
||||
type: 'directory',
|
||||
created: now,
|
||||
modified: now,
|
||||
accessed: now,
|
||||
mode: options?.mode || 0o755,
|
||||
});
|
||||
|
||||
await this.emitWatchEvent(normalizedPath, 'add');
|
||||
}
|
||||
}
|
||||
|
||||
public async deleteDirectory(path: string, options?: { recursive?: boolean }): Promise<void> {
|
||||
const entry = this.storage.get(path);
|
||||
|
||||
if (!entry) {
|
||||
throw new Error(`ENOENT: no such file or directory, rmdir '${path}'`);
|
||||
}
|
||||
|
||||
if (entry.type !== 'directory') {
|
||||
throw new Error(`ENOTDIR: not a directory, rmdir '${path}'`);
|
||||
}
|
||||
|
||||
if (options?.recursive) {
|
||||
// Delete all descendants
|
||||
const normalizedPath = this.normalizePath(path);
|
||||
const prefix = normalizedPath === '/' ? '/' : `${normalizedPath}/`;
|
||||
|
||||
const toDelete: string[] = [];
|
||||
for (const entryPath of this.storage.keys()) {
|
||||
if (entryPath.startsWith(prefix) || entryPath === normalizedPath) {
|
||||
toDelete.push(entryPath);
|
||||
}
|
||||
}
|
||||
|
||||
for (const entryPath of toDelete) {
|
||||
this.storage.delete(entryPath);
|
||||
await this.emitWatchEvent(entryPath, 'delete');
|
||||
}
|
||||
} else {
|
||||
// Check if directory is empty
|
||||
const children = await this.listDirectory(path);
|
||||
if (children.length > 0) {
|
||||
throw new Error(`ENOTEMPTY: directory not empty, rmdir '${path}'`);
|
||||
}
|
||||
|
||||
this.storage.delete(path);
|
||||
await this.emitWatchEvent(path, 'delete');
|
||||
}
|
||||
}
|
||||
|
||||
public async directoryExists(path: string): Promise<boolean> {
|
||||
const entry = this.storage.get(path);
|
||||
return entry !== undefined && entry.type === 'directory';
|
||||
}
|
||||
|
||||
public async directoryStat(path: string): Promise<IFileStats> {
|
||||
const entry = this.storage.get(path);
|
||||
|
||||
if (!entry) {
|
||||
throw new Error(`ENOENT: no such file or directory, stat '${path}'`);
|
||||
}
|
||||
|
||||
if (entry.type !== 'directory') {
|
||||
throw new Error(`ENOTDIR: not a directory, stat '${path}'`);
|
||||
}
|
||||
|
||||
return {
|
||||
size: 0,
|
||||
birthtime: entry.created,
|
||||
mtime: entry.modified,
|
||||
atime: entry.accessed,
|
||||
isFile: false,
|
||||
isDirectory: true,
|
||||
isSymbolicLink: false,
|
||||
mode: entry.mode,
|
||||
};
|
||||
}
|
||||
|
||||
// --- Watch Operations ---
|
||||
|
||||
public async watch(path: string, callback: TWatchCallback, options?: IWatchOptions): Promise<IWatcherHandle> {
|
||||
const watcherId = `watcher-${this.nextWatcherId++}`;
|
||||
|
||||
this.watchers.set(watcherId, {
|
||||
path: this.normalizePath(path),
|
||||
callback,
|
||||
options,
|
||||
});
|
||||
|
||||
return {
|
||||
stop: async () => {
|
||||
this.watchers.delete(watcherId);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// --- Transaction Operations ---
|
||||
|
||||
public async prepareTransaction(operations: ITransactionOperation[]): Promise<ITransactionOperation[]> {
|
||||
const prepared: ITransactionOperation[] = [];
|
||||
|
||||
for (const op of operations) {
|
||||
const preparedOp = { ...op };
|
||||
const entry = this.storage.get(op.path);
|
||||
|
||||
if (entry && entry.type === 'file') {
|
||||
preparedOp.backup = {
|
||||
existed: true,
|
||||
content: entry.content ? Buffer.from(entry.content) : undefined,
|
||||
stats: await this.getEntryStats(op.path, entry),
|
||||
};
|
||||
} else {
|
||||
preparedOp.backup = {
|
||||
existed: false,
|
||||
};
|
||||
}
|
||||
|
||||
prepared.push(preparedOp);
|
||||
}
|
||||
|
||||
return prepared;
|
||||
}
|
||||
|
||||
public async executeTransaction(operations: ITransactionOperation[]): Promise<void> {
|
||||
for (const op of operations) {
|
||||
try {
|
||||
switch (op.type) {
|
||||
case 'write':
|
||||
await this.writeFile(op.path, op.content!, { encoding: op.encoding });
|
||||
break;
|
||||
case 'append':
|
||||
await this.appendFile(op.path, op.content!, { encoding: op.encoding });
|
||||
break;
|
||||
case 'delete':
|
||||
await this.deleteFile(op.path);
|
||||
break;
|
||||
case 'copy':
|
||||
await this.copyFile(op.path, op.targetPath!);
|
||||
break;
|
||||
case 'move':
|
||||
await this.moveFile(op.path, op.targetPath!);
|
||||
break;
|
||||
}
|
||||
} catch (error) {
|
||||
// On error, rollback
|
||||
await this.rollbackTransaction(operations);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async rollbackTransaction(operations: ITransactionOperation[]): Promise<void> {
|
||||
for (let i = operations.length - 1; i >= 0; i--) {
|
||||
const op = operations[i];
|
||||
if (!op.backup) continue;
|
||||
|
||||
try {
|
||||
if (op.backup.existed && op.backup.content) {
|
||||
await this.writeFile(op.path, op.backup.content);
|
||||
} else if (!op.backup.existed) {
|
||||
try {
|
||||
await this.deleteFile(op.path);
|
||||
} catch {
|
||||
// Ignore errors
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Ignore rollback errors
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// --- Path Operations ---
|
||||
|
||||
public normalizePath(path: string): string {
|
||||
// Simple normalization
|
||||
let normalized = path.replace(/\\/g, '/');
|
||||
normalized = normalized.replace(/\/+/g, '/');
|
||||
if (normalized !== '/' && normalized.endsWith('/')) {
|
||||
normalized = normalized.slice(0, -1);
|
||||
}
|
||||
if (!normalized.startsWith('/')) {
|
||||
normalized = `/${normalized}`;
|
||||
}
|
||||
return normalized;
|
||||
}
|
||||
|
||||
public joinPath(...segments: string[]): string {
|
||||
return this.normalizePath(segments.join('/'));
|
||||
}
|
||||
|
||||
// --- Helper Methods ---
|
||||
|
||||
private async ensureParentDirectory(path: string): Promise<void> {
|
||||
const parentPath = path.split('/').slice(0, -1).join('/') || '/';
|
||||
if (!this.storage.has(parentPath)) {
|
||||
await this.createDirectory(parentPath, { recursive: true });
|
||||
}
|
||||
}
|
||||
|
||||
private async emitWatchEvent(path: string, type: TWatchEventType): Promise<void> {
|
||||
const normalizedPath = this.normalizePath(path);
|
||||
|
||||
for (const { path: watchPath, callback, options } of this.watchers.values()) {
|
||||
const shouldTrigger = options?.recursive
|
||||
? normalizedPath.startsWith(watchPath)
|
||||
: normalizedPath.split('/').slice(0, -1).join('/') === watchPath;
|
||||
|
||||
if (!shouldTrigger) continue;
|
||||
|
||||
// Apply filter
|
||||
if (options?.filter && !this.matchesPathFilter(normalizedPath, options.filter)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const entry = this.storage.get(normalizedPath);
|
||||
const event: IWatchEvent = {
|
||||
type,
|
||||
path: normalizedPath,
|
||||
timestamp: new Date(),
|
||||
stats: entry ? await this.getEntryStats(normalizedPath, entry) : undefined,
|
||||
};
|
||||
|
||||
await callback(event);
|
||||
}
|
||||
}
|
||||
|
||||
private async getEntryStats(path: string, entry: IMemoryEntry): Promise<IFileStats> {
|
||||
return {
|
||||
size: entry.content?.length || 0,
|
||||
birthtime: entry.created,
|
||||
mtime: entry.modified,
|
||||
atime: entry.accessed,
|
||||
isFile: entry.type === 'file',
|
||||
isDirectory: entry.type === 'directory',
|
||||
isSymbolicLink: false,
|
||||
mode: entry.mode,
|
||||
};
|
||||
}
|
||||
|
||||
private matchesFilter(
|
||||
entry: IDirectoryEntry,
|
||||
filter?: string | RegExp | ((entry: IDirectoryEntry) => boolean),
|
||||
): boolean {
|
||||
if (!filter) return true;
|
||||
|
||||
if (typeof filter === 'function') {
|
||||
return filter(entry);
|
||||
} else if (filter instanceof RegExp) {
|
||||
return filter.test(entry.name);
|
||||
} else {
|
||||
const pattern = filter.replace(/\*/g, '.*');
|
||||
const regex = new RegExp(`^${pattern}$`);
|
||||
return regex.test(entry.name);
|
||||
}
|
||||
}
|
||||
|
||||
private matchesPathFilter(
|
||||
path: string,
|
||||
filter: string | RegExp | ((path: string) => boolean),
|
||||
): boolean {
|
||||
if (typeof filter === 'function') {
|
||||
return filter(path);
|
||||
} else if (filter instanceof RegExp) {
|
||||
return filter.test(path);
|
||||
} else {
|
||||
const pattern = filter.replace(/\*/g, '.*');
|
||||
const regex = new RegExp(`^${pattern}$`);
|
||||
return regex.test(path);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all data (useful for testing)
|
||||
*/
|
||||
public clear(): void {
|
||||
this.storage.clear();
|
||||
// Recreate root
|
||||
this.storage.set('/', {
|
||||
type: 'directory',
|
||||
created: new Date(),
|
||||
modified: new Date(),
|
||||
accessed: new Date(),
|
||||
mode: 0o755,
|
||||
});
|
||||
}
|
||||
}
|
||||
514
ts/providers/smartfs.provider.node.ts
Normal file
514
ts/providers/smartfs.provider.node.ts
Normal file
@@ -0,0 +1,514 @@
|
||||
/**
|
||||
* Node.js filesystem provider for SmartFS
|
||||
* Uses Node.js fs/promises and fs.watch APIs
|
||||
*/
|
||||
|
||||
import * as fs from 'fs/promises';
|
||||
import * as fsSync from 'fs';
|
||||
import * as pathModule from 'path';
|
||||
import { Readable, Writable } from 'stream';
|
||||
|
||||
import type {
|
||||
ISmartFsProvider,
|
||||
IProviderCapabilities,
|
||||
TWatchCallback,
|
||||
IWatcherHandle,
|
||||
} from '../interfaces/mod.provider.js';
|
||||
|
||||
import type {
|
||||
IFileStats,
|
||||
IDirectoryEntry,
|
||||
IReadOptions,
|
||||
IWriteOptions,
|
||||
IStreamOptions,
|
||||
ICopyOptions,
|
||||
IListOptions,
|
||||
IWatchOptions,
|
||||
ITransactionOperation,
|
||||
IWatchEvent,
|
||||
TWatchEventType,
|
||||
} from '../interfaces/mod.types.js';
|
||||
|
||||
/**
|
||||
* Node.js filesystem provider
|
||||
*/
|
||||
export class SmartFsProviderNode implements ISmartFsProvider {
|
||||
public readonly name = 'node';
|
||||
|
||||
public readonly capabilities: IProviderCapabilities = {
|
||||
supportsWatch: true,
|
||||
supportsAtomic: true,
|
||||
supportsTransactions: true,
|
||||
supportsStreaming: true,
|
||||
supportsSymlinks: true,
|
||||
supportsPermissions: true,
|
||||
};
|
||||
|
||||
// --- File Operations ---
|
||||
|
||||
public async readFile(path: string, options?: IReadOptions): Promise<Buffer | string> {
|
||||
const encoding = options?.encoding === 'buffer' ? undefined : (options?.encoding as BufferEncoding);
|
||||
if (encoding) {
|
||||
return fs.readFile(path, { encoding });
|
||||
}
|
||||
return fs.readFile(path);
|
||||
}
|
||||
|
||||
public async writeFile(path: string, content: string | Buffer, options?: IWriteOptions): Promise<void> {
|
||||
const encoding = options?.encoding === 'buffer' ? undefined : (options?.encoding as BufferEncoding);
|
||||
const mode = options?.mode;
|
||||
|
||||
if (options?.atomic) {
|
||||
// Atomic write: write to temp file, then rename
|
||||
const tempPath = `${path}.tmp.${Date.now()}.${Math.random().toString(36).slice(2)}`;
|
||||
try {
|
||||
await fs.writeFile(tempPath, content, { encoding, mode });
|
||||
await fs.rename(tempPath, path);
|
||||
} catch (error) {
|
||||
// Clean up temp file on error
|
||||
try {
|
||||
await fs.unlink(tempPath);
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
} else {
|
||||
await fs.writeFile(path, content, { encoding, mode });
|
||||
}
|
||||
}
|
||||
|
||||
public async appendFile(path: string, content: string | Buffer, options?: IWriteOptions): Promise<void> {
|
||||
const encoding = options?.encoding === 'buffer' ? undefined : (options?.encoding as BufferEncoding);
|
||||
const mode = options?.mode;
|
||||
await fs.appendFile(path, content, { encoding, mode });
|
||||
}
|
||||
|
||||
public async deleteFile(path: string): Promise<void> {
|
||||
await fs.unlink(path);
|
||||
}
|
||||
|
||||
public async copyFile(from: string, to: string, options?: ICopyOptions): Promise<void> {
|
||||
// Copy the file
|
||||
await fs.copyFile(from, to);
|
||||
|
||||
// Preserve timestamps if requested
|
||||
if (options?.preserveTimestamps) {
|
||||
const stats = await fs.stat(from);
|
||||
await fs.utimes(to, stats.atime, stats.mtime);
|
||||
}
|
||||
}
|
||||
|
||||
public async moveFile(from: string, to: string, options?: ICopyOptions): Promise<void> {
|
||||
try {
|
||||
// Try rename first (fastest if on same filesystem)
|
||||
await fs.rename(from, to);
|
||||
|
||||
// Preserve timestamps if requested
|
||||
if (options?.preserveTimestamps) {
|
||||
const stats = await fs.stat(to);
|
||||
await fs.utimes(to, stats.atime, stats.mtime);
|
||||
}
|
||||
} catch (error: any) {
|
||||
if (error.code === 'EXDEV') {
|
||||
// Cross-device move: copy then delete
|
||||
await this.copyFile(from, to, options);
|
||||
await this.deleteFile(from);
|
||||
} else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async fileExists(path: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(path);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public async fileStat(path: string): Promise<IFileStats> {
|
||||
const stats = await fs.stat(path);
|
||||
return this.convertStats(stats);
|
||||
}
|
||||
|
||||
public async createReadStream(path: string, options?: IStreamOptions): Promise<ReadableStream<Uint8Array>> {
|
||||
const nodeStream = fsSync.createReadStream(path, {
|
||||
highWaterMark: options?.chunkSize || options?.highWaterMark,
|
||||
});
|
||||
|
||||
return this.nodeReadableToWeb(nodeStream);
|
||||
}
|
||||
|
||||
public async createWriteStream(path: string, options?: IStreamOptions): Promise<WritableStream<Uint8Array>> {
|
||||
const nodeStream = fsSync.createWriteStream(path, {
|
||||
highWaterMark: options?.chunkSize || options?.highWaterMark,
|
||||
});
|
||||
|
||||
return this.nodeWritableToWeb(nodeStream);
|
||||
}
|
||||
|
||||
// --- Directory Operations ---
|
||||
|
||||
public async listDirectory(path: string, options?: IListOptions): Promise<IDirectoryEntry[]> {
|
||||
const entries: IDirectoryEntry[] = [];
|
||||
|
||||
if (options?.recursive) {
|
||||
await this.listDirectoryRecursive(path, entries, options);
|
||||
} else {
|
||||
const dirents = await fs.readdir(path, { withFileTypes: true });
|
||||
|
||||
for (const dirent of dirents) {
|
||||
const entryPath = pathModule.join(path, dirent.name);
|
||||
const entry: IDirectoryEntry = {
|
||||
name: dirent.name,
|
||||
path: entryPath,
|
||||
isFile: dirent.isFile(),
|
||||
isDirectory: dirent.isDirectory(),
|
||||
isSymbolicLink: dirent.isSymbolicLink(),
|
||||
};
|
||||
|
||||
// Apply filter
|
||||
if (options?.filter && !this.matchesFilter(entry, options.filter)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Add stats if requested
|
||||
if (options?.includeStats) {
|
||||
try {
|
||||
entry.stats = await this.fileStat(entryPath);
|
||||
} catch {
|
||||
// Ignore stat errors
|
||||
}
|
||||
}
|
||||
|
||||
entries.push(entry);
|
||||
}
|
||||
}
|
||||
|
||||
return entries;
|
||||
}
|
||||
|
||||
private async listDirectoryRecursive(
|
||||
path: string,
|
||||
entries: IDirectoryEntry[],
|
||||
options?: IListOptions,
|
||||
): Promise<void> {
|
||||
const dirents = await fs.readdir(path, { withFileTypes: true });
|
||||
|
||||
for (const dirent of dirents) {
|
||||
const entryPath = pathModule.join(path, dirent.name);
|
||||
const entry: IDirectoryEntry = {
|
||||
name: dirent.name,
|
||||
path: entryPath,
|
||||
isFile: dirent.isFile(),
|
||||
isDirectory: dirent.isDirectory(),
|
||||
isSymbolicLink: dirent.isSymbolicLink(),
|
||||
};
|
||||
|
||||
// Apply filter
|
||||
if (options?.filter && !this.matchesFilter(entry, options.filter)) {
|
||||
// Skip this entry but continue recursion for directories
|
||||
if (dirent.isDirectory()) {
|
||||
await this.listDirectoryRecursive(entryPath, entries, options);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Add stats if requested
|
||||
if (options?.includeStats) {
|
||||
try {
|
||||
entry.stats = await this.fileStat(entryPath);
|
||||
} catch {
|
||||
// Ignore stat errors
|
||||
}
|
||||
}
|
||||
|
||||
entries.push(entry);
|
||||
|
||||
// Recurse into subdirectories
|
||||
if (dirent.isDirectory()) {
|
||||
await this.listDirectoryRecursive(entryPath, entries, options);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async createDirectory(path: string, options?: { recursive?: boolean; mode?: number }): Promise<void> {
|
||||
await fs.mkdir(path, {
|
||||
recursive: options?.recursive,
|
||||
mode: options?.mode,
|
||||
});
|
||||
}
|
||||
|
||||
public async deleteDirectory(path: string, options?: { recursive?: boolean }): Promise<void> {
|
||||
await fs.rm(path, {
|
||||
recursive: options?.recursive,
|
||||
force: true,
|
||||
});
|
||||
}
|
||||
|
||||
public async directoryExists(path: string): Promise<boolean> {
|
||||
try {
|
||||
const stats = await fs.stat(path);
|
||||
return stats.isDirectory();
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public async directoryStat(path: string): Promise<IFileStats> {
|
||||
const stats = await fs.stat(path);
|
||||
return this.convertStats(stats);
|
||||
}
|
||||
|
||||
// --- Watch Operations ---
|
||||
|
||||
public async watch(path: string, callback: TWatchCallback, options?: IWatchOptions): Promise<IWatcherHandle> {
|
||||
const watcher = fsSync.watch(
|
||||
path,
|
||||
{
|
||||
recursive: options?.recursive,
|
||||
},
|
||||
async (eventType, filename) => {
|
||||
if (!filename) return;
|
||||
|
||||
const fullPath = pathModule.join(path, filename);
|
||||
|
||||
// Apply filter
|
||||
if (options?.filter && !this.matchesPathFilter(fullPath, options.filter)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Determine event type
|
||||
let type: TWatchEventType = 'change';
|
||||
try {
|
||||
await fs.access(fullPath);
|
||||
type = eventType === 'rename' ? 'add' : 'change';
|
||||
} catch {
|
||||
type = 'delete';
|
||||
}
|
||||
|
||||
// Get stats if available
|
||||
let stats: IFileStats | undefined;
|
||||
if (type !== 'delete') {
|
||||
try {
|
||||
stats = await this.fileStat(fullPath);
|
||||
} catch {
|
||||
// Ignore stat errors
|
||||
}
|
||||
}
|
||||
|
||||
const event: IWatchEvent = {
|
||||
type,
|
||||
path: fullPath,
|
||||
timestamp: new Date(),
|
||||
stats,
|
||||
};
|
||||
|
||||
await callback(event);
|
||||
},
|
||||
);
|
||||
|
||||
return {
|
||||
stop: async () => {
|
||||
watcher.close();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// --- Transaction Operations ---
|
||||
|
||||
public async prepareTransaction(operations: ITransactionOperation[]): Promise<ITransactionOperation[]> {
|
||||
const prepared: ITransactionOperation[] = [];
|
||||
|
||||
for (const op of operations) {
|
||||
const preparedOp = { ...op };
|
||||
|
||||
// Create backup for rollback
|
||||
try {
|
||||
const exists = await this.fileExists(op.path);
|
||||
if (exists) {
|
||||
const content = await this.readFile(op.path);
|
||||
const stats = await this.fileStat(op.path);
|
||||
preparedOp.backup = {
|
||||
existed: true,
|
||||
content: Buffer.isBuffer(content) ? content : Buffer.from(content),
|
||||
stats,
|
||||
};
|
||||
} else {
|
||||
preparedOp.backup = {
|
||||
existed: false,
|
||||
};
|
||||
}
|
||||
} catch {
|
||||
preparedOp.backup = {
|
||||
existed: false,
|
||||
};
|
||||
}
|
||||
|
||||
prepared.push(preparedOp);
|
||||
}
|
||||
|
||||
return prepared;
|
||||
}
|
||||
|
||||
public async executeTransaction(operations: ITransactionOperation[]): Promise<void> {
|
||||
for (const op of operations) {
|
||||
try {
|
||||
switch (op.type) {
|
||||
case 'write':
|
||||
await this.writeFile(op.path, op.content!, { encoding: op.encoding });
|
||||
break;
|
||||
case 'append':
|
||||
await this.appendFile(op.path, op.content!, { encoding: op.encoding });
|
||||
break;
|
||||
case 'delete':
|
||||
await this.deleteFile(op.path);
|
||||
break;
|
||||
case 'copy':
|
||||
await this.copyFile(op.path, op.targetPath!);
|
||||
break;
|
||||
case 'move':
|
||||
await this.moveFile(op.path, op.targetPath!);
|
||||
break;
|
||||
}
|
||||
} catch (error) {
|
||||
// On error, rollback the transaction
|
||||
await this.rollbackTransaction(operations);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public async rollbackTransaction(operations: ITransactionOperation[]): Promise<void> {
|
||||
// Rollback in reverse order
|
||||
for (let i = operations.length - 1; i >= 0; i--) {
|
||||
const op = operations[i];
|
||||
if (!op.backup) continue;
|
||||
|
||||
try {
|
||||
if (op.backup.existed && op.backup.content) {
|
||||
// Restore original content
|
||||
await this.writeFile(op.path, op.backup.content);
|
||||
} else if (!op.backup.existed) {
|
||||
// Delete file that was created
|
||||
try {
|
||||
await this.deleteFile(op.path);
|
||||
} catch {
|
||||
// Ignore errors
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Ignore rollback errors
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// --- Path Operations ---
|
||||
|
||||
public normalizePath(path: string): string {
|
||||
return pathModule.normalize(path);
|
||||
}
|
||||
|
||||
public joinPath(...segments: string[]): string {
|
||||
return pathModule.join(...segments);
|
||||
}
|
||||
|
||||
// --- Helper Methods ---
|
||||
|
||||
private convertStats(stats: fsSync.Stats): IFileStats {
|
||||
return {
|
||||
size: stats.size,
|
||||
birthtime: stats.birthtime,
|
||||
mtime: stats.mtime,
|
||||
atime: stats.atime,
|
||||
isFile: stats.isFile(),
|
||||
isDirectory: stats.isDirectory(),
|
||||
isSymbolicLink: stats.isSymbolicLink(),
|
||||
mode: stats.mode,
|
||||
};
|
||||
}
|
||||
|
||||
private matchesFilter(
|
||||
entry: IDirectoryEntry,
|
||||
filter: string | RegExp | ((entry: IDirectoryEntry) => boolean),
|
||||
): boolean {
|
||||
if (typeof filter === 'function') {
|
||||
return filter(entry);
|
||||
} else if (filter instanceof RegExp) {
|
||||
return filter.test(entry.name);
|
||||
} else {
|
||||
// Simple glob-like pattern matching
|
||||
const pattern = filter.replace(/\*/g, '.*');
|
||||
const regex = new RegExp(`^${pattern}$`);
|
||||
return regex.test(entry.name);
|
||||
}
|
||||
}
|
||||
|
||||
private matchesPathFilter(
|
||||
path: string,
|
||||
filter: string | RegExp | ((path: string) => boolean),
|
||||
): boolean {
|
||||
if (typeof filter === 'function') {
|
||||
return filter(path);
|
||||
} else if (filter instanceof RegExp) {
|
||||
return filter.test(path);
|
||||
} else {
|
||||
// Simple glob-like pattern matching
|
||||
const pattern = filter.replace(/\*/g, '.*');
|
||||
const regex = new RegExp(`^${pattern}$`);
|
||||
return regex.test(path);
|
||||
}
|
||||
}
|
||||
|
||||
// --- Stream Conversion Helpers ---
|
||||
|
||||
private nodeReadableToWeb(nodeStream: Readable): ReadableStream<Uint8Array> {
|
||||
return new ReadableStream({
|
||||
start(controller) {
|
||||
nodeStream.on('data', (chunk: Buffer) => {
|
||||
controller.enqueue(new Uint8Array(chunk));
|
||||
});
|
||||
|
||||
nodeStream.on('end', () => {
|
||||
controller.close();
|
||||
});
|
||||
|
||||
nodeStream.on('error', (error) => {
|
||||
controller.error(error);
|
||||
});
|
||||
},
|
||||
cancel() {
|
||||
nodeStream.destroy();
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
private nodeWritableToWeb(nodeStream: Writable): WritableStream<Uint8Array> {
|
||||
return new WritableStream({
|
||||
write(chunk) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const canContinue = nodeStream.write(Buffer.from(chunk));
|
||||
if (canContinue) {
|
||||
resolve();
|
||||
} else {
|
||||
nodeStream.once('drain', resolve);
|
||||
nodeStream.once('error', reject);
|
||||
}
|
||||
});
|
||||
},
|
||||
close() {
|
||||
return new Promise((resolve, reject) => {
|
||||
nodeStream.end();
|
||||
nodeStream.once('finish', resolve);
|
||||
nodeStream.once('error', reject);
|
||||
});
|
||||
},
|
||||
abort(reason) {
|
||||
nodeStream.destroy(new Error(reason));
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
9
ts/smartfs.plugins.ts
Normal file
9
ts/smartfs.plugins.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
// native scope
|
||||
import * as path from 'path';
|
||||
|
||||
export { path };
|
||||
|
||||
// @push.rocks scope
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
|
||||
export { smartpath };
|
||||
12
tsconfig.json
Normal file
12
tsconfig.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"esModuleInterop": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"baseUrl": ".",
|
||||
"paths": {}
|
||||
},
|
||||
"exclude": ["dist_*/**/*.d.ts"]
|
||||
}
|
||||
Reference in New Issue
Block a user