Compare commits

..

5 Commits

17 changed files with 3693 additions and 4820 deletions

View File

@@ -1,5 +1,27 @@
# Changelog
## 2026-03-14 - 4.5.1 - fix(readme)
refresh documentation wording and simplify README formatting
- removes emoji-heavy headings and inline log examples for a cleaner presentation
- clarifies S3-compatible storage wording in the project description
- streamlines testing and error handling sections without changing library functionality
## 2026-03-14 - 4.5.0 - feat(storage)
generalize S3 client and watcher interfaces to storage-oriented naming with backward compatibility
- rename SmartBucket internals from s3Client to storageClient and normalize configuration using storage descriptor types
- update watcher and interface types from S3-specific names to storage-oriented equivalents while keeping deprecated aliases
- refresh tests and documentation comments to reflect S3-compatible object storage terminology
- bump build, test, AWS SDK, and related dependency versions
## 2026-01-25 - 4.4.1 - fix(tests)
add explicit 'as string' type assertions to environment variable retrievals in tests
- Add 'as string' assertions for S3_ACCESSKEY, S3_SECRETKEY, S3_ENDPOINT, and S3_BUCKET to satisfy TypeScript typings in tests
- Cast S3_PORT to string before parseInt to ensure correct input type and avoid compiler errors
- Changes are localized to test/test.listing.node+deno.ts
## 2026-01-25 - 4.4.0 - feat(watcher)
add polling-based BucketWatcher to detect add/modify/delete events and expose RxJS Observable and EventEmitter APIs

3046
deno.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,6 @@
{
"name": "@push.rocks/smartbucket",
"version": "4.4.0",
"version": "4.5.1",
"description": "A TypeScript library providing a cloud-agnostic interface for managing object storage with functionalities like bucket management, file and directory operations, and advanced features such as metadata handling and file locking.",
"main": "dist_ts/index.js",
"typings": "dist_ts/index.d.ts",
@@ -12,23 +12,24 @@
"build": "(tsbuild tsfolders --allowimplicitany)"
},
"devDependencies": {
"@git.zone/tsbuild": "^4.1.2",
"@git.zone/tsbuild": "^4.3.0",
"@git.zone/tsrun": "^2.0.1",
"@git.zone/tstest": "^3.1.6",
"@git.zone/tstest": "^3.3.2",
"@push.rocks/qenv": "^6.1.3",
"@push.rocks/tapbundle": "^6.0.3"
"@push.rocks/tapbundle": "^6.0.3",
"@types/node": "^22.15.29"
},
"dependencies": {
"@aws-sdk/client-s3": "^3.975.0",
"@aws-sdk/client-s3": "^3.1009.0",
"@push.rocks/smartmime": "^2.0.4",
"@push.rocks/smartpath": "^6.0.0",
"@push.rocks/smartpromise": "^4.2.3",
"@push.rocks/smartrx": "^3.0.10",
"@push.rocks/smartstream": "^3.2.5",
"@push.rocks/smartstream": "^3.4.0",
"@push.rocks/smartstring": "^4.1.0",
"@push.rocks/smartunique": "^3.0.9",
"@tsclass/tsclass": "^9.3.0",
"minimatch": "^10.1.1"
"@tsclass/tsclass": "^9.4.0",
"minimatch": "^10.2.4"
},
"private": false,
"files": [

4910
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

332
readme.md
View File

@@ -1,25 +1,25 @@
# @push.rocks/smartbucket 🪣
# @push.rocks/smartbucket
A powerful, cloud-agnostic TypeScript library for object storage that makes S3 feel like a modern filesystem. Built for developers who demand simplicity, type-safety, and advanced features like real-time bucket watching, metadata management, file locking, intelligent trash handling, and memory-efficient streaming.
A powerful, cloud-agnostic TypeScript library for object storage that makes S3-compatible storage feel like a modern filesystem. Built for developers who demand simplicity, type-safety, and advanced features like real-time bucket watching, metadata management, file locking, intelligent trash handling, and memory-efficient streaming.
## Issue Reporting and Security
For reporting bugs, issues, or security vulnerabilities, please visit [community.foss.global/](https://community.foss.global/). This is the central community hub for all issue reporting. Developers who sign and comply with our contribution agreement and go through identification can also get a [code.foss.global/](https://code.foss.global/) account to submit Pull Requests directly.
## Why SmartBucket? 🎯
## Why SmartBucket?
- **🌍 Cloud Agnostic** - Write once, run on AWS S3, MinIO, DigitalOcean Spaces, Backblaze B2, Wasabi, Cloudflare R2, or any S3-compatible storage
- **🚀 Modern TypeScript** - First-class TypeScript support with complete type definitions and async/await patterns
- **👀 Real-Time Watching** - Monitor bucket changes with polling-based watcher supporting RxJS and EventEmitter patterns
- **💾 Memory Efficient** - Handle millions of files with async generators, RxJS observables, and cursor pagination
- **🗑️ Smart Trash System** - Recover accidentally deleted files with built-in trash and restore functionality
- **🔒 File Locking** - Prevent concurrent modifications with built-in locking mechanisms
- **🏷️ Rich Metadata** - Attach custom metadata to any file for powerful organization and search
- **🌊 Streaming Support** - Efficient handling of large files with Node.js and Web streams
- **📁 Directory-like API** - Intuitive filesystem-like operations on object storage
- **Fail-Fast** - Strict-by-default API catches errors immediately with precise stack traces
- **Cloud Agnostic** - Write once, run on AWS S3, MinIO, DigitalOcean Spaces, Backblaze B2, Wasabi, Cloudflare R2, or any S3-compatible storage
- **Modern TypeScript** - First-class TypeScript support with complete type definitions and async/await patterns
- **Real-Time Watching** - Monitor bucket changes with polling-based watcher supporting RxJS and EventEmitter patterns
- **Memory Efficient** - Handle millions of files with async generators, RxJS observables, and cursor pagination
- **Smart Trash System** - Recover accidentally deleted files with built-in trash and restore functionality
- **File Locking** - Prevent concurrent modifications with built-in locking mechanisms
- **Rich Metadata** - Attach custom metadata to any file for powerful organization and search
- **Streaming Support** - Efficient handling of large files with Node.js and Web streams
- **Directory-like API** - Intuitive filesystem-like operations on object storage
- **Fail-Fast** - Strict-by-default API catches errors immediately with precise stack traces
## Quick Start 🚀
## Quick Start
```typescript
import { SmartBucket } from '@push.rocks/smartbucket';
@@ -44,22 +44,22 @@ await bucket.fastPut({
// Download it back
const data = await bucket.fastGet({ path: 'users/profile.json' });
console.log('📄', JSON.parse(data.toString()));
console.log(JSON.parse(data.toString()));
// List files efficiently (even with millions of objects!)
for await (const key of bucket.listAllObjects('users/')) {
console.log('🔍 Found:', key);
console.log('Found:', key);
}
// Watch for changes in real-time
const watcher = bucket.createWatcher({ prefix: 'uploads/', pollIntervalMs: 3000 });
watcher.changeSubject.subscribe((change) => {
console.log('🔔 Change detected:', change.type, change.key);
console.log('Change detected:', change.type, change.key);
});
await watcher.start();
```
## Install 📦
## Install
```bash
# Using pnpm (recommended)
@@ -69,24 +69,24 @@ pnpm add @push.rocks/smartbucket
npm install @push.rocks/smartbucket --save
```
## Usage 🚀
## Usage
### Table of Contents
1. [🏁 Getting Started](#-getting-started)
2. [🗂️ Working with Buckets](#-working-with-buckets)
3. [📁 File Operations](#-file-operations)
4. [📋 Memory-Efficient Listing](#-memory-efficient-listing)
5. [👀 Bucket Watching](#-bucket-watching)
6. [📂 Directory Management](#-directory-management)
7. [🌊 Streaming Operations](#-streaming-operations)
8. [🔒 File Locking](#-file-locking)
9. [🏷️ Metadata Management](#-metadata-management)
10. [🗑️ Trash & Recovery](#-trash--recovery)
11. [Advanced Features](#-advanced-features)
12. [☁️ Cloud Provider Support](#-cloud-provider-support)
1. [Getting Started](#getting-started)
2. [Working with Buckets](#working-with-buckets)
3. [File Operations](#file-operations)
4. [Memory-Efficient Listing](#memory-efficient-listing)
5. [Bucket Watching](#bucket-watching)
6. [Directory Management](#directory-management)
7. [Streaming Operations](#streaming-operations)
8. [File Locking](#file-locking)
9. [Metadata Management](#metadata-management)
10. [Trash & Recovery](#trash--recovery)
11. [Advanced Features](#advanced-features)
12. [Cloud Provider Support](#cloud-provider-support)
### 🏁 Getting Started
### Getting Started
First, set up your storage connection:
@@ -104,7 +104,7 @@ const smartBucket = new SmartBucket({
});
```
**For MinIO or self-hosted S3:**
**For MinIO or self-hosted storage:**
```typescript
const smartBucket = new SmartBucket({
accessKey: 'minioadmin',
@@ -115,14 +115,13 @@ const smartBucket = new SmartBucket({
});
```
### 🗂️ Working with Buckets
### Working with Buckets
#### Creating Buckets
```typescript
// Create a new bucket
const myBucket = await smartBucket.createBucket('my-awesome-bucket');
console.log(`✅ Bucket created: ${myBucket.name}`);
```
#### Getting Existing Buckets
@@ -134,7 +133,6 @@ const existingBucket = await smartBucket.getBucketByName('existing-bucket');
// Check first, then get (non-throwing approach)
if (await smartBucket.bucketExists('maybe-exists')) {
const bucket = await smartBucket.getBucketByName('maybe-exists');
console.log('✅ Found bucket:', bucket.name);
}
```
@@ -143,10 +141,9 @@ if (await smartBucket.bucketExists('maybe-exists')) {
```typescript
// Delete a bucket (must be empty)
await smartBucket.removeBucket('old-bucket');
console.log('🗑️ Bucket removed');
```
### 📁 File Operations
### File Operations
#### Upload Files
@@ -158,7 +155,6 @@ const file = await bucket.fastPut({
path: 'documents/report.pdf',
contents: Buffer.from('Your file content here')
});
console.log('✅ Uploaded:', file.getBasePath());
// Upload with string content
await bucket.fastPut({
@@ -180,9 +176,7 @@ try {
contents: 'new content'
});
} catch (error) {
console.error('Upload failed:', error.message);
// Error: Object already exists at path 'existing-file.txt' in bucket 'my-bucket'.
// Set overwrite:true to replace it.
console.error('Upload failed:', error.message);
}
```
@@ -193,7 +187,6 @@ try {
const fileContent = await bucket.fastGet({
path: 'documents/report.pdf'
});
console.log(`📄 File size: ${fileContent.length} bytes`);
// Get file as string
const textContent = fileContent.toString('utf-8');
@@ -208,7 +201,6 @@ const jsonData = JSON.parse(fileContent.toString());
const exists = await bucket.fastExists({
path: 'documents/report.pdf'
});
console.log(`File exists: ${exists ? '✅' : '❌'}`);
```
#### Delete Files
@@ -218,7 +210,6 @@ console.log(`File exists: ${exists ? '✅' : '❌'}`);
await bucket.fastRemove({
path: 'old-file.txt'
});
console.log('🗑️ File deleted permanently');
```
#### Copy & Move Files
@@ -229,14 +220,12 @@ await bucket.fastCopy({
sourcePath: 'original/file.txt',
destinationPath: 'backup/file-copy.txt'
});
console.log('📋 File copied');
// Move file (copy + delete original)
await bucket.fastMove({
sourcePath: 'temp/draft.txt',
destinationPath: 'final/document.txt'
});
console.log('📦 File moved');
// Copy to different bucket
const targetBucket = await smartBucket.getBucketByName('backup-bucket');
@@ -247,18 +236,18 @@ await bucket.fastCopy({
});
```
### 📋 Memory-Efficient Listing
### Memory-Efficient Listing
SmartBucket provides three powerful patterns for listing objects, optimized for handling **millions of files** efficiently:
#### Async Generators (Recommended)
#### Async Generators (Recommended)
Memory-efficient streaming using native JavaScript async iteration:
```typescript
// List all objects with prefix - streams one at a time!
for await (const key of bucket.listAllObjects('documents/')) {
console.log(`📄 Found: ${key}`);
console.log('Found:', key);
// Process each file individually (memory efficient!)
const content = await bucket.fastGet({ path: key });
@@ -268,39 +257,26 @@ for await (const key of bucket.listAllObjects('documents/')) {
if (shouldStop()) break;
}
// List all objects (no prefix)
const allKeys: string[] = [];
for await (const key of bucket.listAllObjects()) {
allKeys.push(key);
}
// Find objects matching glob patterns
for await (const key of bucket.findByGlob('**/*.json')) {
console.log(`📦 JSON file: ${key}`);
console.log('JSON file:', key);
}
// Complex glob patterns
for await (const key of bucket.findByGlob('npm/packages/*/index.json')) {
// Matches: npm/packages/foo/index.json, npm/packages/bar/index.json
console.log(`📦 Package index: ${key}`);
}
// More glob examples
for await (const key of bucket.findByGlob('logs/**/*.log')) {
console.log('📋 Log file:', key);
console.log('Package index:', key);
}
for await (const key of bucket.findByGlob('images/*.{jpg,png,gif}')) {
console.log('🖼️ Image:', key);
console.log('Image:', key);
}
```
**Why use async generators?**
- Processes one item at a time (constant memory usage)
- Supports early termination with `break`
- Native JavaScript - no dependencies
- Perfect for large buckets with millions of objects
- ✅ Works seamlessly with `for await...of` loops
- Processes one item at a time (constant memory usage)
- Supports early termination with `break`
- Native JavaScript - no dependencies
- Perfect for large buckets with millions of objects
#### RxJS Observables
@@ -317,16 +293,9 @@ bucket.listAllObjectsObservable('logs/')
map(key => ({ key, timestamp: Date.now() }))
)
.subscribe({
next: (item) => console.log(`📋 Log file: ${item.key}`),
error: (err) => console.error('Error:', err),
complete: () => console.log('Listing complete')
});
// Simple subscription without operators
bucket.listAllObjectsObservable('data/')
.subscribe({
next: (key) => processKey(key),
complete: () => console.log('✅ Done')
next: (item) => console.log('Log file:', item.key),
error: (err) => console.error('Error:', err),
complete: () => console.log('Listing complete')
});
// Combine with other observables
@@ -337,15 +306,9 @@ const backups$ = bucket.listAllObjectsObservable('backups/');
merge(logs$, backups$)
.pipe(filter(key => key.includes('2024')))
.subscribe(key => console.log('📅 2024 file:', key));
.subscribe(key => console.log('2024 file:', key));
```
**Why use observables?**
- ✅ Rich operator ecosystem (filter, map, debounce, etc.)
- ✅ Composable with other RxJS streams
- ✅ Perfect for reactive architectures
- ✅ Great for complex transformations
#### Cursor Pattern
Explicit pagination control for UI and resumable operations:
@@ -357,16 +320,13 @@ const cursor = bucket.createCursor('uploads/', { pageSize: 100 });
// Fetch pages manually
while (cursor.hasMore()) {
const page = await cursor.next();
console.log(`📄 Page has ${page.keys.length} items`);
console.log(`Page has ${page.keys.length} items`);
for (const key of page.keys) {
console.log(` - ${key}`);
}
if (page.done) {
console.log('✅ Reached end');
break;
}
if (page.done) break;
}
// Save and restore cursor state (perfect for resumable operations!)
@@ -380,42 +340,28 @@ const nextPage = await newCursor.next();
// Reset cursor to start over
cursor.reset();
const firstPage = await cursor.next(); // Back to the beginning
```
**Why use cursors?**
- ✅ Perfect for UI pagination (prev/next buttons)
- ✅ Save/restore state for resumable operations
- ✅ Explicit control over page fetching
- ✅ Great for implementing "Load More" buttons
#### Convenience Methods
```typescript
// Collect all keys into array (⚠️ WARNING: loads everything into memory!)
// Collect all keys into array (WARNING: loads everything into memory!)
const allKeys = await bucket.listAllObjectsArray('images/');
console.log(`📦 Found ${allKeys.length} images`);
// Only use for small result sets
const smallList = await bucket.listAllObjectsArray('config/');
if (smallList.length < 100) {
// Safe to process in memory
smallList.forEach(key => console.log(key));
}
console.log(`Found ${allKeys.length} images`);
```
**Performance Comparison:**
| Method | Memory Usage | Best For | Supports Early Exit |
|--------|-------------|----------|-------------------|
| **Async Generator** | O(1) - constant | Most use cases, large datasets | Yes |
| **Observable** | O(1) - constant | Reactive pipelines, RxJS apps | Yes |
| **Cursor** | O(pageSize) | UI pagination, resumable ops | Yes |
| **Array** | O(n) - grows with results | Small datasets (<10k items) | No |
| **Async Generator** | O(1) - constant | Most use cases, large datasets | Yes |
| **Observable** | O(1) - constant | Reactive pipelines, RxJS apps | Yes |
| **Cursor** | O(pageSize) | UI pagination, resumable ops | Yes |
| **Array** | O(n) - grows with results | Small datasets (<10k items) | No |
### 👀 Bucket Watching
### Bucket Watching
Monitor your S3 bucket for changes in real-time with the powerful `BucketWatcher`:
Monitor your storage bucket for changes in real-time with the powerful `BucketWatcher`:
```typescript
// Create a watcher for a specific prefix
@@ -428,21 +374,21 @@ const watcher = bucket.createWatcher({
// RxJS Observable pattern (recommended for reactive apps)
watcher.changeSubject.subscribe((change) => {
if (change.type === 'add') {
console.log('📥 New file:', change.key);
console.log('New file:', change.key);
} else if (change.type === 'modify') {
console.log('✏️ Modified:', change.key);
console.log('Modified:', change.key);
} else if (change.type === 'delete') {
console.log('🗑️ Deleted:', change.key);
console.log('Deleted:', change.key);
}
});
// EventEmitter pattern (classic Node.js style)
watcher.on('change', (change) => {
console.log(`🔔 ${change.type}: ${change.key}`);
console.log(`${change.type}: ${change.key}`);
});
watcher.on('error', (err) => {
console.error('Watcher error:', err);
console.error('Watcher error:', err);
});
// Start watching
@@ -450,7 +396,7 @@ await watcher.start();
// Wait until watcher is ready (initial state built)
await watcher.readyDeferred.promise;
console.log('👀 Watcher is now monitoring the bucket');
console.log('Watcher is now monitoring the bucket');
// ... your application runs ...
@@ -486,7 +432,7 @@ const watcher = bucket.createWatcher({
// Receive batched events as arrays
watcher.changeSubject.subscribe((changes) => {
if (Array.isArray(changes)) {
console.log(`📦 Batch of ${changes.length} changes:`);
console.log(`Batch of ${changes.length} changes:`);
changes.forEach(c => console.log(` - ${c.type}: ${c.key}`));
}
});
@@ -497,7 +443,7 @@ await watcher.start();
#### Change Event Structure
```typescript
interface IS3ChangeEvent {
interface IStorageChangeEvent {
type: 'add' | 'modify' | 'delete';
key: string; // Object key (path)
bucket: string; // Bucket name
@@ -509,14 +455,14 @@ interface IS3ChangeEvent {
#### Watch Use Cases
- 🔄 **Sync systems** - Detect changes to trigger synchronization
- 📊 **Analytics** - Track file uploads/modifications in real-time
- 🔔 **Notifications** - Alert users when their files are ready
- 🔄 **Processing pipelines** - Trigger workflows on new file uploads
- 🗄️ **Backup systems** - Detect changes for incremental backups
- 📝 **Audit logs** - Track all bucket activity
- **Sync systems** - Detect changes to trigger synchronization
- **Analytics** - Track file uploads/modifications in real-time
- **Notifications** - Alert users when their files are ready
- **Processing pipelines** - Trigger workflows on new file uploads
- **Backup systems** - Detect changes for incremental backups
- **Audit logs** - Track all bucket activity
### 📂 Directory Management
### Directory Management
SmartBucket provides powerful directory-like operations for organizing your files:
@@ -528,8 +474,8 @@ const baseDir = await bucket.getBaseDirectory();
const directories = await baseDir.listDirectories();
const files = await baseDir.listFiles();
console.log(`📁 Found ${directories.length} directories`);
console.log(`📄 Found ${files.length} files`);
console.log(`Found ${directories.length} directories`);
console.log(`Found ${files.length} files`);
// Navigate subdirectories
const subDir = await baseDir.getSubDirectoryByName('projects/2024');
@@ -542,10 +488,9 @@ await subDir.fastPut({
// Get directory tree structure
const tree = await subDir.getTreeArray();
console.log('🌳 Directory tree:', tree);
// Get directory path
console.log('📂 Base path:', subDir.getBasePath()); // "projects/2024/"
console.log('Base path:', subDir.getBasePath()); // "projects/2024/"
// Create empty file as placeholder
await subDir.createEmptyFile('placeholder.txt');
@@ -555,7 +500,7 @@ const fileExists = await subDir.fileExists({ path: 'report.pdf' });
const dirExists = await baseDir.directoryExists('projects');
```
### 🌊 Streaming Operations
### Streaming Operations
Handle large files efficiently with streaming support:
@@ -604,7 +549,6 @@ await bucket.fastPutStream({
'x-custom-header': 'my-value'
}
});
console.log('✅ Large file uploaded via stream');
```
#### Reactive Streams with RxJS
@@ -618,7 +562,7 @@ const replaySubject = await bucket.fastGetReplaySubject({
// Multiple subscribers can consume the same data
replaySubject.subscribe({
next: (chunk) => processChunk(chunk),
complete: () => console.log('Stream complete')
complete: () => console.log('Stream complete')
});
replaySubject.subscribe({
@@ -626,7 +570,7 @@ replaySubject.subscribe({
});
```
### 🔒 File Locking
### File Locking
Prevent concurrent modifications with built-in file locking:
@@ -636,19 +580,10 @@ const file = await baseDir.getFile({ path: 'important-config.json' });
// Lock file for 10 minutes
await file.lock({ timeoutMillis: 600000 });
console.log('🔒 File locked');
// Try to modify locked file (will throw error)
try {
await file.delete();
} catch (error) {
console.log('❌ Cannot delete locked file');
}
// Check lock status via metadata
const metadata = await file.getMetaData();
const isLocked = await metadata.checkLocked();
console.log(`Lock status: ${isLocked ? '🔒 Locked' : '🔓 Unlocked'}`);
// Get lock info
const lockInfo = await metadata.getLockInfo();
@@ -656,19 +591,18 @@ console.log(`Lock expires: ${new Date(lockInfo.expires)}`);
// Unlock when done
await file.unlock();
console.log('🔓 File unlocked');
// Force unlock (even if locked by another process)
await file.unlock({ force: true });
```
**Lock use cases:**
- 🔄 Prevent concurrent writes during critical updates
- 🔐 Protect configuration files during deployment
- 🚦 Coordinate distributed workers
- 🛡️ Ensure data consistency
- Prevent concurrent writes during critical updates
- Protect configuration files during deployment
- Coordinate distributed workers
- Ensure data consistency
### 🏷️ Metadata Management
### Metadata Management
Attach and manage rich metadata for your files:
@@ -697,36 +631,31 @@ await metadata.storeCustomMetaData({
// Retrieve metadata
const author = await metadata.getCustomMetaData({ key: 'author' });
console.log(`📝 Author: ${author}`);
// Delete metadata
await metadata.deleteCustomMetaData({ key: 'workflow' });
// Check if file has any metadata
const hasMetadata = await file.hasMetaData();
console.log(`Has metadata: ${hasMetadata ? '✅' : '❌'}`);
// Get file type detection
const fileType = await metadata.getFileType({ useFileExtension: true });
console.log(`📄 MIME type: ${fileType?.mime}`);
// Get file type from magic bytes (more accurate)
const detectedType = await metadata.getFileType({ useMagicBytes: true });
console.log(`🔮 Detected type: ${detectedType?.mime}`);
// Get file size
const size = await metadata.getSizeInBytes();
console.log(`📊 Size: ${size} bytes`);
```
**Metadata use cases:**
- 👤 Track file ownership and authorship
- 🏷️ Add tags and categories for search
- 📊 Store processing status or workflow state
- 🔍 Enable rich querying and filtering
- 📝 Maintain audit trails
- Track file ownership and authorship
- Add tags and categories for search
- Store processing status or workflow state
- Enable rich querying and filtering
- Maintain audit trails
### 🗑️ Trash & Recovery
### Trash & Recovery
SmartBucket includes an intelligent trash system for safe file deletion and recovery:
@@ -736,17 +665,14 @@ const file = await baseDir.getFile({ path: 'important-data.xlsx' });
// Move to trash instead of permanent deletion
await file.delete({ mode: 'trash' });
console.log('🗑️ File moved to trash (can be restored!)');
// Permanent deletion (use with caution!)
await file.delete({ mode: 'permanent' });
console.log('💀 File permanently deleted (cannot be recovered)');
// Access trash
const trash = await bucket.getTrash();
const trashDir = await trash.getTrashDir();
const trashedFiles = await trashDir.listFiles();
console.log(`📦 ${trashedFiles.length} files in trash`);
// Restore from trash
const trashedFile = await baseDir.getFile({
@@ -755,32 +681,30 @@ const trashedFile = await baseDir.getFile({
});
await trashedFile.restore({ useOriginalPath: true });
console.log('♻️ File restored to original location');
// Or restore to a different location
await trashedFile.restore({
toPath: 'recovered/important-data.xlsx'
});
console.log('♻️ File restored to new location');
```
**Trash features:**
- ♻️ Recover accidentally deleted files
- 🏷️ Preserves original path in metadata
- Tracks deletion timestamp
- 🔍 List and inspect trashed files
- Recover accidentally deleted files
- Preserves original path in metadata
- Tracks deletion timestamp
- List and inspect trashed files
### Advanced Features
### Advanced Features
#### File Statistics
```typescript
// Get detailed file statistics
const stats = await bucket.fastStat({ path: 'document.pdf' });
console.log(`📊 Size: ${stats.ContentLength} bytes`);
console.log(`📅 Last modified: ${stats.LastModified}`);
console.log(`🏷️ ETag: ${stats.ETag}`);
console.log(`🗂️ Content type: ${stats.ContentType}`);
console.log(`Size: ${stats.ContentLength} bytes`);
console.log(`Last modified: ${stats.LastModified}`);
console.log(`ETag: ${stats.ETag}`);
console.log(`Content type: ${stats.ContentType}`);
```
#### Magic Bytes Detection
@@ -793,7 +717,6 @@ const magicBytes = await bucket.getMagicBytes({
path: 'mystery-file',
length: 16
});
console.log(`🔮 Magic bytes: ${magicBytes.toString('hex')}`);
// Or from a File object
const baseDir = await bucket.getBaseDirectory();
@@ -802,9 +725,9 @@ const magic = await file.getMagicBytes({ length: 4 });
// Check file signatures
if (magic[0] === 0xFF && magic[1] === 0xD8) {
console.log('📸 This is a JPEG image');
console.log('This is a JPEG image');
} else if (magic[0] === 0x89 && magic[1] === 0x50) {
console.log('🖼️ This is a PNG image');
console.log('This is a PNG image');
}
```
@@ -816,7 +739,6 @@ const file = await baseDir.getFile({ path: 'config.json' });
// Read JSON data
const config = await file.getJsonData();
console.log('⚙️ Config loaded:', config);
// Update JSON data
config.version = '2.0';
@@ -824,7 +746,6 @@ config.updated = new Date().toISOString();
config.features.push('newFeature');
await file.writeJsonData(config);
console.log('💾 Config updated');
```
#### Directory & File Type Detection
@@ -835,35 +756,29 @@ const isDir = await bucket.isDirectory({ path: 'uploads/' });
// Check if path is a file
const isFile = await bucket.isFile({ path: 'uploads/document.pdf' });
console.log(`Is directory: ${isDir ? '📁' : '❌'}`);
console.log(`Is file: ${isFile ? '📄' : '❌'}`);
```
#### Clean Bucket Contents
```typescript
// Remove all files and directories (⚠️ use with caution!)
// Remove all files and directories (use with caution!)
await bucket.cleanAllContents();
console.log('🧹 Bucket cleaned');
```
### ☁️ Cloud Provider Support
### Cloud Provider Support
SmartBucket works seamlessly with all major S3-compatible providers:
| Provider | Status | Notes |
|----------|--------|-------|
| **AWS S3** | ✅ Full support | Native S3 API |
| **MinIO** | ✅ Full support | Self-hosted, perfect for development |
| **DigitalOcean Spaces** | ✅ Full support | Cost-effective S3-compatible |
| **Backblaze B2** | ✅ Full support | Very affordable storage |
| **Wasabi** | ✅ Full support | High-performance hot storage |
| **Google Cloud Storage** | ✅ Full support | Via S3-compatible API |
| **Cloudflare R2** | ✅ Full support | Zero egress fees |
| **Any S3-compatible** | ✅ Full support | Works with any S3-compatible provider |
The library automatically handles provider quirks and optimizes operations for each platform while maintaining a consistent API.
| **AWS S3** | Supported | Native S3 API |
| **MinIO** | Supported | Self-hosted, perfect for development |
| **DigitalOcean Spaces** | Supported | Cost-effective S3-compatible |
| **Backblaze B2** | Supported | Very affordable storage |
| **Wasabi** | Supported | High-performance hot storage |
| **Google Cloud Storage** | Supported | Via S3-compatible API |
| **Cloudflare R2** | Supported | Zero egress fees |
| **Any S3-compatible** | Supported | Works with any S3-compatible provider |
**Configuration examples:**
@@ -914,7 +829,7 @@ const r2Storage = new SmartBucket({
});
```
### 🔧 Advanced Configuration
### Advanced Configuration
```typescript
// Environment-based configuration with @push.rocks/qenv
@@ -932,9 +847,7 @@ const smartBucket = new SmartBucket({
});
```
### 🧪 Testing
SmartBucket is thoroughly tested with 97 comprehensive tests covering all features:
### Testing
```bash
# Run all tests
@@ -947,18 +860,18 @@ pnpm tstest test/test.watcher.node.ts --verbose
pnpm test --logfile
```
### 🛡️ Error Handling Best Practices
### Error Handling Best Practices
SmartBucket uses a **strict-by-default** approach - methods throw errors instead of returning null:
```typescript
// ✅ Good: Check existence first
// Check existence first
if (await bucket.fastExists({ path: 'file.txt' })) {
const content = await bucket.fastGet({ path: 'file.txt' });
process(content);
}
// ✅ Good: Try/catch for expected failures
// Try/catch for expected failures
try {
const file = await bucket.fastGet({ path: 'might-not-exist.txt' });
process(file);
@@ -967,7 +880,7 @@ try {
useDefault();
}
// ✅ Good: Explicit overwrite control
// Explicit overwrite control
try {
await bucket.fastPut({
path: 'existing-file.txt',
@@ -977,12 +890,9 @@ try {
} catch (error) {
console.log('File already exists');
}
// ❌ Bad: Assuming file exists without checking
const content = await bucket.fastGet({ path: 'file.txt' }); // May throw!
```
### 💡 Best Practices
### Best Practices
1. **Always use strict mode** for critical operations to catch errors early
2. **Check existence first** with `fastExists()`, `bucketExists()`, etc. before operations
@@ -995,7 +905,7 @@ const content = await bucket.fastGet({ path: 'file.txt' }); // May throw!
9. **Set explicit overwrite flags** to prevent accidental file overwrites
10. **Use the watcher** for real-time synchronization and event-driven architectures
### 📊 Performance Tips
### Performance Tips
- **Listing**: Use async generators or cursors for buckets with >10,000 objects
- **Uploads**: Use streams for files >100MB

View File

@@ -14,16 +14,16 @@ let testSmartbucket: smartbucket.SmartBucket;
// Setup: Create test bucket and populate with test data
tap.test('should create valid smartbucket and bucket', async () => {
testSmartbucket = new smartbucket.SmartBucket({
accessKey: await testQenv.getEnvVarOnDemand('S3_ACCESSKEY'),
accessSecret: await testQenv.getEnvVarOnDemand('S3_SECRETKEY'),
endpoint: await testQenv.getEnvVarOnDemand('S3_ENDPOINT'),
port: parseInt(await testQenv.getEnvVarOnDemand('S3_PORT')),
accessKey: await testQenv.getEnvVarOnDemand('S3_ACCESSKEY') as string,
accessSecret: await testQenv.getEnvVarOnDemand('S3_SECRETKEY') as string,
endpoint: await testQenv.getEnvVarOnDemand('S3_ENDPOINT') as string,
port: parseInt(await testQenv.getEnvVarOnDemand('S3_PORT') as string),
useSsl: false,
});
testBucket = await smartbucket.Bucket.getBucketByName(
testSmartbucket,
await testQenv.getEnvVarOnDemand('S3_BUCKET')
await testQenv.getEnvVarOnDemand('S3_BUCKET') as string
);
expect(testBucket).toBeInstanceOf(smartbucket.Bucket);
});

View File

@@ -3,7 +3,7 @@ import { expect, tap } from '@git.zone/tstest/tapbundle';
import * as plugins from '../ts/plugins.js';
import * as smartbucket from '../ts/index.js';
class FakeS3Client {
class FakeStorageClient {
private callIndex = 0;
constructor(private readonly pages: Array<Partial<plugins.s3.ListObjectsV2Output>>) {}
@@ -30,7 +30,7 @@ tap.test('MetaData.hasMetaData should return false when metadata file does not e
});
tap.test('getSubDirectoryByName should create correct parent chain for new nested directories', async () => {
const fakeSmartbucket = { s3Client: new FakeS3Client([{ Contents: [], CommonPrefixes: [] }]) } as unknown as smartbucket.SmartBucket;
const fakeSmartbucket = { storageClient: new FakeStorageClient([{ Contents: [], CommonPrefixes: [] }]) } as unknown as smartbucket.SmartBucket;
const bucket = new smartbucket.Bucket(fakeSmartbucket, 'test-bucket');
const baseDirectory = new smartbucket.Directory(bucket, null as any, '');
@@ -51,7 +51,7 @@ tap.test('listFiles should aggregate results across paginated ListObjectsV2 resp
Contents: Array.from({ length: 200 }, (_, index) => ({ Key: `file-${1000 + index}` })),
IsTruncated: false,
};
const fakeSmartbucket = { s3Client: new FakeS3Client([firstPage, secondPage]) } as unknown as smartbucket.SmartBucket;
const fakeSmartbucket = { storageClient: new FakeStorageClient([firstPage, secondPage]) } as unknown as smartbucket.SmartBucket;
const bucket = new smartbucket.Bucket(fakeSmartbucket, 'test-bucket');
const baseDirectory = new smartbucket.Directory(bucket, null as any, '');
@@ -61,7 +61,7 @@ tap.test('listFiles should aggregate results across paginated ListObjectsV2 resp
tap.test('listDirectories should aggregate CommonPrefixes across pagination', async () => {
const fakeSmartbucket = {
s3Client: new FakeS3Client([
storageClient: new FakeStorageClient([
{ CommonPrefixes: [{ Prefix: 'dirA/' }], IsTruncated: true, NextContinuationToken: 'token-1' },
{ CommonPrefixes: [{ Prefix: 'dirB/' }], IsTruncated: false },
]),

View File

@@ -2,7 +2,7 @@
import { tap, expect } from '@git.zone/tstest/tapbundle';
import * as smartbucket from '../ts/index.js';
import type { IS3ChangeEvent } from '../ts/interfaces.js';
import type { IStorageChangeEvent } from '../ts/interfaces.js';
// Get test configuration
import * as qenv from '@push.rocks/qenv';
@@ -57,7 +57,7 @@ tap.test('should create watcher with custom options', async () => {
// ==========================
tap.test('should detect add events for new files', async () => {
const events: IS3ChangeEvent[] = [];
const events: IStorageChangeEvent[] = [];
const watcher = testBucket.createWatcher({
prefix: 'watcher-test/',
pollIntervalMs: 500,
@@ -94,7 +94,7 @@ tap.test('should detect add events for new files', async () => {
// ==========================
tap.test('should detect modify events for changed files', async () => {
const events: IS3ChangeEvent[] = [];
const events: IStorageChangeEvent[] = [];
const watcher = testBucket.createWatcher({
prefix: 'watcher-test/',
pollIntervalMs: 500,
@@ -131,7 +131,7 @@ tap.test('should detect modify events for changed files', async () => {
// ==========================
tap.test('should detect delete events for removed files', async () => {
const events: IS3ChangeEvent[] = [];
const events: IStorageChangeEvent[] = [];
const watcher = testBucket.createWatcher({
prefix: 'watcher-test/',
pollIntervalMs: 500,
@@ -174,7 +174,7 @@ tap.test('should emit initial state as add events when includeInitial is true',
contents: 'content 2',
});
const events: IS3ChangeEvent[] = [];
const events: IStorageChangeEvent[] = [];
const watcher = testBucket.createWatcher({
prefix: 'watcher-initial/',
pollIntervalMs: 10000, // Long interval - we only care about initial events
@@ -206,13 +206,13 @@ tap.test('should emit initial state as add events when includeInitial is true',
// ==========================
tap.test('should emit events via EventEmitter pattern', async () => {
const events: IS3ChangeEvent[] = [];
const events: IStorageChangeEvent[] = [];
const watcher = testBucket.createWatcher({
prefix: 'watcher-emitter/',
pollIntervalMs: 500,
});
watcher.on('change', (event: IS3ChangeEvent) => {
watcher.on('change', (event: IStorageChangeEvent) => {
events.push(event);
});
@@ -239,7 +239,7 @@ tap.test('should emit events via EventEmitter pattern', async () => {
// ==========================
tap.test('should buffer events when bufferTimeMs is set', async () => {
const bufferedEvents: (IS3ChangeEvent | IS3ChangeEvent[])[] = [];
const bufferedEvents: (IStorageChangeEvent | IStorageChangeEvent[])[] = [];
const watcher = testBucket.createWatcher({
prefix: 'watcher-buffer/',
pollIntervalMs: 200,
@@ -327,8 +327,8 @@ tap.test('should stop gracefully with stop()', async () => {
await watcher.stop();
// Watcher should not poll after stop
const eventsCaptured: IS3ChangeEvent[] = [];
watcher.on('change', (event: IS3ChangeEvent) => {
const eventsCaptured: IStorageChangeEvent[] = [];
watcher.on('change', (event: IStorageChangeEvent) => {
eventsCaptured.push(event);
});
@@ -362,7 +362,7 @@ tap.test('should stop gracefully with close() alias', async () => {
// ==========================
tap.test('should only detect changes within specified prefix', async () => {
const events: IS3ChangeEvent[] = [];
const events: IStorageChangeEvent[] = [];
const watcher = testBucket.createWatcher({
prefix: 'watcher-prefix-a/',
pollIntervalMs: 500,

View File

@@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@push.rocks/smartbucket',
version: '4.4.0',
version: '4.5.1',
description: 'A TypeScript library providing a cloud-agnostic interface for managing object storage with functionalities like bucket management, file and directory operations, and advanced features such as metadata handling and file locking.'
}

View File

@@ -13,12 +13,12 @@ import { BucketWatcher } from './classes.watcher.js';
/**
* The bucket class exposes the basic functionality of a bucket.
* The functions of the bucket alone are enough to
* operate in S3 basic fashion on blobs of data.
* operate on blobs of data in an S3-compatible object store.
*/
export class Bucket {
public static async getBucketByName(smartbucketRef: SmartBucket, bucketNameArg: string): Promise<Bucket> {
const command = new plugins.s3.ListBucketsCommand({});
const buckets = await smartbucketRef.s3Client.send(command);
const buckets = await smartbucketRef.storageClient.send(command);
const foundBucket = buckets.Buckets!.find((bucket) => bucket.Name === bucketNameArg);
if (foundBucket) {
@@ -32,13 +32,13 @@ export class Bucket {
public static async createBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
const command = new plugins.s3.CreateBucketCommand({ Bucket: bucketName });
await smartbucketRef.s3Client.send(command);
await smartbucketRef.storageClient.send(command);
return new Bucket(smartbucketRef, bucketName);
}
public static async removeBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
const command = new plugins.s3.DeleteBucketCommand({ Bucket: bucketName });
await smartbucketRef.s3Client.send(command);
await smartbucketRef.storageClient.send(command);
}
public smartbucketRef: SmartBucket;
@@ -112,7 +112,7 @@ export class Bucket {
Key: reducedPath,
Body: optionsArg.contents,
});
await this.smartbucketRef.s3Client.send(command);
await this.smartbucketRef.storageClient.send(command);
console.log(`Object '${reducedPath}' has been successfully stored in bucket '${this.name}'.`);
const parsedPath = plugins.path.parse(reducedPath);
@@ -172,7 +172,7 @@ export class Bucket {
Bucket: this.name,
Key: optionsArg.path,
});
const response = await this.smartbucketRef.s3Client.send(command);
const response = await this.smartbucketRef.storageClient.send(command);
const replaySubject = new plugins.smartrx.rxjs.ReplaySubject<Buffer>();
// Convert the stream to a format that supports piping
@@ -216,7 +216,7 @@ export class Bucket {
Bucket: this.name,
Key: optionsArg.path,
});
const response = await this.smartbucketRef.s3Client.send(command);
const response = await this.smartbucketRef.storageClient.send(command);
const stream = response.Body as any; // SdkStreamMixin includes readable stream
const duplexStream = new plugins.smartstream.SmartDuplex<Buffer, Buffer>({
@@ -272,7 +272,7 @@ export class Bucket {
Body: optionsArg.readableStream,
Metadata: optionsArg.nativeMetadata,
});
await this.smartbucketRef.s3Client.send(command);
await this.smartbucketRef.storageClient.send(command);
console.log(
`Object '${optionsArg.path}' has been successfully stored in bucket '${this.name}'.`
@@ -297,7 +297,7 @@ export class Bucket {
const targetBucketName = optionsArg.targetBucket ? optionsArg.targetBucket.name : this.name;
// Retrieve current object information to use in copy conditions
const currentObjInfo = await this.smartbucketRef.s3Client.send(
const currentObjInfo = await this.smartbucketRef.storageClient.send(
new plugins.s3.HeadObjectCommand({
Bucket: this.name,
Key: optionsArg.sourcePath,
@@ -319,7 +319,7 @@ export class Bucket {
Metadata: newNativeMetadata,
MetadataDirective: optionsArg.deleteExistingNativeMetadata ? 'REPLACE' : 'COPY',
});
await this.smartbucketRef.s3Client.send(command);
await this.smartbucketRef.storageClient.send(command);
} catch (err) {
console.error('Error updating metadata:', err);
throw err; // rethrow to allow caller to handle
@@ -379,7 +379,7 @@ export class Bucket {
Bucket: this.name,
Key: optionsArg.path,
});
await this.smartbucketRef.s3Client.send(command);
await this.smartbucketRef.storageClient.send(command);
}
/**
@@ -393,7 +393,7 @@ export class Bucket {
Bucket: this.name,
Key: optionsArg.path,
});
await this.smartbucketRef.s3Client.send(command);
await this.smartbucketRef.storageClient.send(command);
console.log(`Object '${optionsArg.path}' exists in bucket '${this.name}'.`);
return true;
} catch (error: any) {
@@ -411,7 +411,7 @@ export class Bucket {
* deletes this bucket
*/
public async delete() {
await this.smartbucketRef.s3Client.send(
await this.smartbucketRef.storageClient.send(
new plugins.s3.DeleteBucketCommand({ Bucket: this.name })
);
}
@@ -422,7 +422,7 @@ export class Bucket {
Bucket: this.name,
Key: checkPath,
});
return this.smartbucketRef.s3Client.send(command);
return this.smartbucketRef.storageClient.send(command);
}
public async isDirectory(pathDescriptor: interfaces.IPathDecriptor): Promise<boolean> {
@@ -432,7 +432,7 @@ export class Bucket {
Prefix: checkPath,
Delimiter: '/',
});
const { CommonPrefixes } = await this.smartbucketRef.s3Client.send(command);
const { CommonPrefixes } = await this.smartbucketRef.storageClient.send(command);
return !!CommonPrefixes && CommonPrefixes.length > 0;
}
@@ -443,7 +443,7 @@ export class Bucket {
Prefix: checkPath,
Delimiter: '/',
});
const { Contents } = await this.smartbucketRef.s3Client.send(command);
const { Contents } = await this.smartbucketRef.storageClient.send(command);
return !!Contents && Contents.length > 0;
}
@@ -454,7 +454,7 @@ export class Bucket {
Key: optionsArg.path,
Range: `bytes=0-${optionsArg.length - 1}`,
});
const response = await this.smartbucketRef.s3Client.send(command);
const response = await this.smartbucketRef.storageClient.send(command);
const chunks: Buffer[] = [];
const stream = response.Body as any; // SdkStreamMixin includes readable stream
@@ -497,7 +497,7 @@ export class Bucket {
ContinuationToken: continuationToken,
});
const response = await this.smartbucketRef.s3Client.send(command);
const response = await this.smartbucketRef.storageClient.send(command);
for (const obj of response.Contents || []) {
if (obj.Key) yield obj.Key;
@@ -531,7 +531,7 @@ export class Bucket {
ContinuationToken: token,
});
const response = await this.smartbucketRef.s3Client.send(command);
const response = await this.smartbucketRef.storageClient.send(command);
for (const obj of response.Contents || []) {
if (obj.Key) subscriber.next(obj.Key);
@@ -646,7 +646,7 @@ export class Bucket {
// Explicitly type the response
const response: plugins.s3.ListObjectsV2Output =
await this.smartbucketRef.s3Client.send(listCommand);
await this.smartbucketRef.storageClient.send(listCommand);
console.log(`Cleaning contents of bucket '${this.name}': Now deleting ${response.Contents?.length} items...`);
@@ -660,7 +660,7 @@ export class Bucket {
},
});
await this.smartbucketRef.s3Client.send(deleteCommand);
await this.smartbucketRef.storageClient.send(deleteCommand);
}
// Update continuation token and truncation status

View File

@@ -135,7 +135,7 @@ export class Directory {
Delimiter: delimiter,
ContinuationToken: continuationToken,
});
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
const response = await this.bucketRef.smartbucketRef.storageClient.send(command);
if (response.Contents) {
allContents.push(...response.Contents);
@@ -213,7 +213,7 @@ export class Directory {
Prefix: this.getBasePath(),
Delimiter: '/',
});
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
const response = await this.bucketRef.smartbucketRef.storageClient.send(command);
return response.Contents;
}
@@ -222,12 +222,12 @@ export class Directory {
*/
public async getSubDirectoryByName(dirNameArg: string, optionsArg: {
/**
* in s3 a directory does not exist if it is empty
* in object storage a directory does not exist if it is empty
* this option returns a directory even if it is empty
*/
getEmptyDirectory?: boolean;
/**
* in s3 a directory does not exist if it is empty
* in object storage a directory does not exist if it is empty
* this option creates a directory even if it is empty using a initializer file
*/
createWithInitializerFile?: boolean;

View File

@@ -12,7 +12,7 @@ export class File {
/**
* creates a file in draft mode
* you need to call .save() to store it in s3
* you need to call .save() to store it in object storage
* @param optionsArg
*/
public static async create(optionsArg: {

View File

@@ -45,7 +45,7 @@ export class ListCursor {
ContinuationToken: this.continuationToken,
});
const response = await this.bucket.smartbucketRef.s3Client.send(command);
const response = await this.bucket.smartbucketRef.storageClient.send(command);
const keys = (response.Contents || [])
.map((obj) => obj.Key)

View File

@@ -2,26 +2,28 @@
import * as plugins from './plugins.js';
import { Bucket } from './classes.bucket.js';
import { normalizeS3Descriptor } from './helpers.js';
import { normalizeStorageDescriptor } from './helpers.js';
export class SmartBucket {
public config: plugins.tsclass.storage.IS3Descriptor;
public config: plugins.tsclass.storage.IStorageDescriptor;
public s3Client: plugins.s3.S3Client;
public storageClient: plugins.s3.S3Client;
/** @deprecated Use storageClient instead */
public get s3Client(): plugins.s3.S3Client {
return this.storageClient;
}
/**
* the constructor of SmartBucket
*/
/**
* the constructor of SmartBucket
*/
constructor(configArg: plugins.tsclass.storage.IS3Descriptor) {
constructor(configArg: plugins.tsclass.storage.IStorageDescriptor) {
this.config = configArg;
// Use the normalizer to handle various endpoint formats
const { normalized } = normalizeS3Descriptor(configArg);
const { normalized } = normalizeStorageDescriptor(configArg);
this.s3Client = new plugins.s3.S3Client({
this.storageClient = new plugins.s3.S3Client({
endpoint: normalized.endpointUrl,
region: normalized.region,
credentials: normalized.credentials,
@@ -47,7 +49,7 @@ export class SmartBucket {
*/
public async bucketExists(bucketNameArg: string): Promise<boolean> {
const command = new plugins.s3.ListBucketsCommand({});
const buckets = await this.s3Client.send(command);
const buckets = await this.storageClient.send(command);
return buckets.Buckets?.some(bucket => bucket.Name === bucketNameArg) ?? false;
}
}

View File

@@ -6,7 +6,7 @@ import type { Bucket } from './classes.bucket.js';
import { EventEmitter } from 'node:events';
/**
* BucketWatcher monitors an S3 bucket for changes (add/modify/delete)
* BucketWatcher monitors a storage bucket for changes (add/modify/delete)
* using a polling-based approach. Designed to follow the SmartdataDbWatcher pattern.
*
* @example
@@ -34,11 +34,11 @@ export class BucketWatcher extends EventEmitter {
public readyDeferred = plugins.smartpromise.defer();
/** Observable for receiving change events (supports RxJS operators) */
public changeSubject: plugins.smartrx.rxjs.Observable<interfaces.IS3ChangeEvent | interfaces.IS3ChangeEvent[]>;
public changeSubject: plugins.smartrx.rxjs.Observable<interfaces.IStorageChangeEvent | interfaces.IStorageChangeEvent[]>;
// Internal subjects and state
private rawSubject: plugins.smartrx.rxjs.Subject<interfaces.IS3ChangeEvent>;
private previousState: Map<string, interfaces.IS3ObjectState>;
private rawSubject: plugins.smartrx.rxjs.Subject<interfaces.IStorageChangeEvent>;
private previousState: Map<string, interfaces.IStorageObjectState>;
private pollIntervalId: ReturnType<typeof setInterval> | null = null;
private isPolling = false;
private isStopped = false;
@@ -65,13 +65,13 @@ export class BucketWatcher extends EventEmitter {
this.previousState = new Map();
// Initialize raw subject for emitting changes
this.rawSubject = new plugins.smartrx.rxjs.Subject<interfaces.IS3ChangeEvent>();
this.rawSubject = new plugins.smartrx.rxjs.Subject<interfaces.IStorageChangeEvent>();
// Configure the public observable with optional buffering
if (this.bufferTimeMs && this.bufferTimeMs > 0) {
this.changeSubject = this.rawSubject.pipe(
plugins.smartrx.rxjs.ops.bufferTime(this.bufferTimeMs),
plugins.smartrx.rxjs.ops.filter((events: interfaces.IS3ChangeEvent[]) => events.length > 0)
plugins.smartrx.rxjs.ops.filter((events: interfaces.IStorageChangeEvent[]) => events.length > 0)
);
} else {
this.changeSubject = this.rawSubject.asObservable();
@@ -174,7 +174,7 @@ export class BucketWatcher extends EventEmitter {
try {
// Build current state
const currentState = new Map<string, interfaces.IS3ObjectState>();
const currentState = new Map<string, interfaces.IStorageObjectState>();
for await (const obj of this.listObjectsWithMetadata()) {
if (this.isStopped) {
@@ -205,7 +205,7 @@ export class BucketWatcher extends EventEmitter {
/**
* Detect changes between current and previous state
*/
private detectChanges(currentState: Map<string, interfaces.IS3ObjectState>): void {
private detectChanges(currentState: Map<string, interfaces.IStorageObjectState>): void {
// Detect added and modified objects
for (const [key, current] of currentState) {
const previous = this.previousState.get(key);
@@ -253,7 +253,7 @@ export class BucketWatcher extends EventEmitter {
/**
* Emit a change event via both RxJS Subject and EventEmitter
*/
private emitChange(event: interfaces.IS3ChangeEvent): void {
private emitChange(event: interfaces.IStorageChangeEvent): void {
this.rawSubject.next(event);
this.emit('change', event);
}
@@ -277,7 +277,7 @@ export class BucketWatcher extends EventEmitter {
ContinuationToken: continuationToken,
});
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
const response = await this.bucketRef.smartbucketRef.storageClient.send(command);
for (const obj of response.Contents || []) {
yield obj;

View File

@@ -21,13 +21,13 @@ export const reducePathDescriptorToPath = async (pathDescriptorArg: interfaces.I
return returnPath;
}
// S3 Descriptor Normalization
export interface IS3Warning {
// Storage Descriptor Normalization
export interface IStorageWarning {
code: string;
message: string;
}
export interface INormalizedS3Config {
export interface INormalizedStorageConfig {
endpointUrl: string;
host: string;
protocol: 'http' | 'https';
@@ -40,7 +40,7 @@ export interface INormalizedS3Config {
forcePathStyle: boolean;
}
function coerceBooleanMaybe(value: unknown): { value: boolean | undefined; warning?: IS3Warning } {
function coerceBooleanMaybe(value: unknown): { value: boolean | undefined; warning?: IStorageWarning } {
if (typeof value === 'boolean') return { value };
if (typeof value === 'string') {
const v = value.trim().toLowerCase();
@@ -66,7 +66,7 @@ function coerceBooleanMaybe(value: unknown): { value: boolean | undefined; warni
return { value: undefined };
}
function coercePortMaybe(port: unknown): { value: number | undefined; warning?: IS3Warning } {
function coercePortMaybe(port: unknown): { value: number | undefined; warning?: IStorageWarning } {
if (port === undefined || port === null || port === '') return { value: undefined };
const n = typeof port === 'number' ? port : Number(String(port).trim());
if (!Number.isFinite(n) || !Number.isInteger(n) || n <= 0 || n > 65535) {
@@ -81,8 +81,8 @@ function coercePortMaybe(port: unknown): { value: number | undefined; warning?:
return { value: n };
}
function sanitizeEndpointString(raw: unknown): { value: string; warnings: IS3Warning[] } {
const warnings: IS3Warning[] = [];
function sanitizeEndpointString(raw: unknown): { value: string; warnings: IStorageWarning[] } {
const warnings: IStorageWarning[] = [];
let s = String(raw ?? '').trim();
if (s !== String(raw ?? '')) {
warnings.push({
@@ -138,17 +138,17 @@ function parseEndpointHostPort(
return { hadScheme, host, port, extras };
}
export function normalizeS3Descriptor(
input: plugins.tsclass.storage.IS3Descriptor,
export function normalizeStorageDescriptor(
input: plugins.tsclass.storage.IStorageDescriptor,
logger?: { warn: (msg: string) => void }
): { normalized: INormalizedS3Config; warnings: IS3Warning[] } {
const warnings: IS3Warning[] = [];
const logWarn = (w: IS3Warning) => {
): { normalized: INormalizedStorageConfig; warnings: IStorageWarning[] } {
const warnings: IStorageWarning[] = [];
const logWarn = (w: IStorageWarning) => {
warnings.push(w);
if (logger) {
logger.warn(`[SmartBucket S3] ${w.code}: ${w.message}`);
logger.warn(`[SmartBucket] ${w.code}: ${w.message}`);
} else {
console.warn(`[SmartBucket S3] ${w.code}: ${w.message}`);
console.warn(`[SmartBucket] ${w.code}: ${w.message}`);
}
};
@@ -163,7 +163,7 @@ export function normalizeS3Descriptor(
endpointSanWarnings.forEach(logWarn);
if (!endpointStr) {
throw new Error('S3 endpoint is required (got empty string). Provide hostname or URL.');
throw new Error('Storage endpoint is required (got empty string). Provide hostname or URL.');
}
// Provisional protocol selection for parsing host:port forms

View File

@@ -10,9 +10,9 @@ export interface IPathDecriptor {
// ================================
/**
* Internal state tracking for an S3 object
* Internal state tracking for a storage object
*/
export interface IS3ObjectState {
export interface IStorageObjectState {
key: string;
etag: string;
size: number;
@@ -22,7 +22,7 @@ export interface IS3ObjectState {
/**
* Change event emitted by BucketWatcher
*/
export interface IS3ChangeEvent {
export interface IStorageChangeEvent {
type: 'add' | 'modify' | 'delete';
key: string;
size?: number;
@@ -54,4 +54,14 @@ export interface IBucketWatcherOptions {
pageSize?: number;
// Future websocket options will be added here
// websocketUrl?: string;
}
}
// ================================
// Deprecated aliases
// ================================
/** @deprecated Use IStorageObjectState instead */
export type IS3ObjectState = IStorageObjectState;
/** @deprecated Use IStorageChangeEvent instead */
export type IS3ChangeEvent = IStorageChangeEvent;