Compare commits

..

20 Commits

Author SHA1 Message Date
dd6efa4908 v4.1.0 2025-11-20 13:58:02 +00:00
1f4b7319d3 feat(core): Add S3 endpoint normalization, directory pagination, improved metadata checks, trash support, and related tests 2025-11-20 13:58:02 +00:00
b8e5d9a222 v4.0.1 2025-11-20 13:38:53 +00:00
429375a643 fix(plugins): Use explicit node: imports for native path and stream modules in ts/plugins.ts 2025-11-20 13:38:53 +00:00
e147a077f3 v4.0.0 2025-11-20 13:20:19 +00:00
5889396134 BREAKING CHANGE(core): Make API strict-by-default: remove *Strict variants, throw on not-found/exists conflicts, add explicit exists() methods, update docs/tests and bump deps 2025-11-20 13:20:19 +00:00
0c631383e1 3.3.10 2025-08-18 02:43:29 +00:00
d852d8c85b fix(helpers): Normalize and robustly parse S3 endpoint configuration; use normalized descriptor in SmartBucket and update dev tooling 2025-08-18 02:43:29 +00:00
fa4c44ae04 3.3.9 2025-08-15 18:31:42 +00:00
708b0b63b1 fix(docs): Revise README with detailed usage examples and add local Claude settings 2025-08-15 18:31:42 +00:00
8554554642 3.3.8 2025-08-15 18:28:27 +00:00
a04aabf78b fix(tests): Update tests to use @git.zone/tstest, upgrade dependencies, remove GitLab CI and add local CI/workspace config 2025-08-15 18:28:27 +00:00
47cf2cc2cb 3.3.7 2024-12-02 17:00:42 +01:00
ef20e15d20 fix(package): Update author field in package.json 2024-12-02 17:00:42 +01:00
39a4bd6ab7 3.3.6 2024-12-02 17:00:17 +01:00
c2a30654c5 fix(package): Fix license field in package.json to reflect MIT licensing 2024-12-02 17:00:17 +01:00
8085033de4 3.3.5 2024-11-25 17:43:01 +01:00
75dd1d43a9 fix(test): Refactor trash test to improve metadata and deletion validation 2024-11-25 17:43:00 +01:00
8ba7cdc873 3.3.4 2024-11-25 17:36:14 +01:00
ed8db4536b fix(core): Minor refactoring and cleanup of TypeScript source files for improved readability and maintainability. 2024-11-25 17:36:14 +01:00
25 changed files with 14169 additions and 3701 deletions

View File

@@ -1,140 +0,0 @@
# gitzone ci_default
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
cache:
paths:
- .npmci_cache/
key: '$CI_BUILD_STAGE'
stages:
- security
- test
- release
- metadata
before_script:
- npm install -g @shipzone/npmci
# ====================
# security stage
# ====================
mirror:
stage: security
script:
- npmci git mirror
only:
- tags
tags:
- lossless
- docker
- notpriv
auditProductionDependencies:
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
stage: security
script:
- npmci npm prepare
- npmci command npm install --production --ignore-scripts
- npmci command npm config set registry https://registry.npmjs.org
- npmci command npm audit --audit-level=high --only=prod --production
tags:
- docker
allow_failure: true
auditDevDependencies:
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
stage: security
script:
- npmci npm prepare
- npmci command npm install --ignore-scripts
- npmci command npm config set registry https://registry.npmjs.org
- npmci command npm audit --audit-level=high --only=dev
tags:
- docker
allow_failure: true
# ====================
# test stage
# ====================
testStable:
stage: test
script:
- npmci npm prepare
- npmci node install stable
- npmci npm install
- npmci npm test
coverage: /\d+.?\d+?\%\s*coverage/
tags:
- docker
testBuild:
stage: test
script:
- npmci npm prepare
- npmci node install stable
- npmci npm install
- npmci command npm run build
coverage: /\d+.?\d+?\%\s*coverage/
tags:
- docker
release:
stage: release
script:
- npmci node install stable
- npmci npm publish
only:
- tags
tags:
- lossless
- docker
- notpriv
# ====================
# metadata stage
# ====================
codequality:
stage: metadata
allow_failure: true
only:
- tags
script:
- npmci command npm install -g typescript
- npmci npm prepare
- npmci npm install
tags:
- lossless
- docker
- priv
trigger:
stage: metadata
script:
- npmci trigger
only:
- tags
tags:
- lossless
- docker
- notpriv
pages:
stage: metadata
script:
- npmci node install lts
- npmci command npm install -g @git.zone/tsdoc
- npmci npm prepare
- npmci npm install
- npmci command tsdoc
tags:
- lossless
- docker
- notpriv
only:
- tags
artifacts:
expire_in: 1 week
paths:
- public
allow_failure: true

View File

@@ -1,5 +1,170 @@
# Changelog # Changelog
## 2025-11-20 - 4.1.0 - feat(core)
Add S3 endpoint normalization, directory pagination, improved metadata checks, trash support, and related tests
- Add normalizeS3Descriptor helper to sanitize and normalize various S3 endpoint formats and emit warnings for mismatches (helpers.ts).
- Use normalized endpoint and credentials when constructing S3 client in SmartBucket (classes.smartbucket.ts).
- Implement paginated listing helper listObjectsV2AllPages in Directory and use it for listFiles and listDirectories to aggregate Contents and CommonPrefixes across pages (classes.directory.ts).
- Improve MetaData.hasMetaData to catch NotFound errors and return false instead of throwing (classes.metadata.ts).
- Export metadata and trash modules from index (ts/index.ts) and add a Trash class with utilities for trashed files and key encoding (classes.trash.ts).
- Enhance Bucket operations: fastCopy now preserves or replaces native metadata correctly, cleanAllContents supports paginated deletion, and improved fastExists error handling (classes.bucket.ts).
- Fix Directory.getSubDirectoryByName to construct new Directory instances with the correct parent directory reference.
- Add tests covering metadata absence and pagination behavior (test/test.local.node+deno.ts).
## 2025-11-20 - 4.0.1 - fix(plugins)
Use explicit node: imports for native path and stream modules in ts/plugins.ts
- Replaced imports of 'path' and 'stream' with 'node:path' and 'node:stream' in ts/plugins.ts.
- Ensures correct ESM resolution of Node built-ins when package.json type is 'module' and avoids accidental conflicts with userland packages.
## 2025-11-20 - 4.0.0 - BREAKING CHANGE(core)
Make API strict-by-default: remove *Strict variants, throw on not-found/exists conflicts, add explicit exists() methods, update docs/tests and bump deps
- Breaking: Core API methods are strict by default and now throw errors instead of returning null when targets are missing or already exist (e.g. getBucketByName, getFile, getSubDirectoryByName, fastPut, fastPutStream).
- Removed *Strict variants: fastPutStrict, getBucketByNameStrict, getFileStrict, getSubDirectoryByNameStrict — use the base methods which are now strict.
- Added explicit existence checks: bucketExists (SmartBucket), fileExists (Directory/fileExists), directoryExists (Directory.directoryExists), and fastExists (Bucket.fastExists) to allow non-throwing checks before operations.
- Return type updates: fastPut now returns Promise<File> (no null), getBucketByName/getFile/getSubDirectoryByName now return the respective objects or throw.
- Improved error messages to guide callers (e.g. suggest setting overwrite:true on fastPut when object exists).
- Updated README, changelog and tests to reflect the new strict semantics and usage patterns.
- Developer/runtime dependency bumps: @git.zone/tsbuild, @git.zone/tsrun, @git.zone/tstest, @aws-sdk/client-s3, @push.rocks/smartstring, @tsclass/tsclass (version bumps recorded in package.json).
- Major version bump to 4.0.0 to reflect breaking API changes.
## 2025-11-20 - 4.0.0 - BREAKING: Strict by default + exists methods
Complete API overhaul: all methods throw by default, removed all *Strict variants, added dedicated exists methods
**Breaking Changes:**
**Putters (Write Operations):**
- `fastPut`: Return type `Promise<File | null>``Promise<File>`, throws when file exists and overwrite is false
- `fastPutStream`: Now throws when file exists and overwrite is false (previously returned silently)
- `fastPutStrict`: **Removed** - use `fastPut` directly
**Getters (Read Operations):**
- `getBucketByName`: Return type `Promise<Bucket | null>``Promise<Bucket>`, throws when bucket not found
- `getBucketByNameStrict`: **Removed** - use `getBucketByName` directly
- `getFile`: Return type `Promise<File | null>``Promise<File>`, throws when file not found
- `getFileStrict`: **Removed** - use `getFile` directly
- `getSubDirectoryByName`: Return type `Promise<Directory | null>``Promise<Directory>`, throws when directory not found
- `getSubDirectoryByNameStrict`: **Removed** - use `getSubDirectoryByName` directly
**New Methods (Existence Checks):**
- `bucket.fastExists({ path })` - ✅ Already existed
- `directory.fileExists({ path })` - **NEW** - Check if file exists
- `directory.directoryExists(name)` - **NEW** - Check if subdirectory exists
- `smartBucket.bucketExists(name)` - **NEW** - Check if bucket exists
**Benefits:**
-**Simpler API**: Removed 4 redundant *Strict methods
-**Type-safe**: No nullable returns - `Promise<T>` not `Promise<T | null>`
-**Fail-fast**: Errors throw immediately with precise stack traces
-**Consistent**: All methods behave the same way
-**Explicit**: Use exists() to check, then get() to retrieve
-**Better debugging**: Error location is always precise
**Migration Guide:**
```typescript
// ============================================
// Pattern 1: Check then Get (Recommended)
// ============================================
// Before (v3.x):
const bucket = await smartBucket.getBucketByName('my-bucket');
if (bucket) {
// use bucket
}
// After (v4.0):
if (await smartBucket.bucketExists('my-bucket')) {
const bucket = await smartBucket.getBucketByName('my-bucket'); // guaranteed to exist
// use bucket
}
// ============================================
// Pattern 2: Try/Catch
// ============================================
// Before (v3.x):
const file = await directory.getFile({ path: 'file.txt' });
if (!file) {
// Handle not found
}
// After (v4.0):
try {
const file = await directory.getFile({ path: 'file.txt' });
// use file
} catch (error) {
// Handle not found
}
// ============================================
// Pattern 3: Remove *Strict calls
// ============================================
// Before (v3.x):
const file = await directory.getFileStrict({ path: 'file.txt' });
// After (v4.0):
const file = await directory.getFile({ path: 'file.txt' }); // already strict
// ============================================
// Pattern 4: Write Operations
// ============================================
// Before (v3.x):
const file = await bucket.fastPutStrict({ path: 'file.txt', contents: 'data' });
// After (v4.0):
const file = await bucket.fastPut({ path: 'file.txt', contents: 'data' }); // already strict
```
## 2025-08-18 - 3.3.10 - fix(helpers)
Normalize and robustly parse S3 endpoint configuration; use normalized descriptor in SmartBucket and update dev tooling
- Add normalizeS3Descriptor to ts/helpers.ts: robust endpoint parsing, coercion of useSsl/port, sanitization, warnings for dropped URL parts, and canonical endpoint URL output.
- Update SmartBucket (ts/classes.smartbucket.ts) to use the normalized endpoint, region, credentials and forcePathStyle from normalizeS3Descriptor.
- Adjust dev tooling: bump @git.zone/tsbuild -> ^2.6.7, @git.zone/tstest -> ^2.3.4, @push.rocks/qenv -> ^6.1.3 and update test script to run tstest with --verbose --logfile --timeout 60.
- Add .claude/settings.local.json containing local assistant/CI permission settings (local config only).
## 2025-08-15 - 3.3.9 - fix(docs)
Revise README with detailed usage examples and add local Claude settings
- Revamped README: reorganized content, added emojis and clearer headings for install, getting started, bucket/file/directory operations, streaming, metadata, trash/recovery, locking, and advanced configuration.
- Added many concrete code examples for SmartBucket, Bucket, Directory, File, streaming (node/web), RxJS replay subjects, metadata handling, trash workflow, file locking, magic-bytes detection, JSON operations, and cleaning bucket contents.
- Included testing instructions (pnpm test) and a Best Practices section with recommendations for strict mode, streaming, metadata, trash usage, and locking.
- Added .claude/settings.local.json to include local Claude configuration and tool permissions.
- No source code or public API changes; documentation and local tooling config only.
## 2025-08-15 - 3.3.8 - fix(tests)
Update tests to use @git.zone/tstest, upgrade dependencies, remove GitLab CI and add local CI/workspace config
- Tests: replace imports from @push.rocks/tapbundle with @git.zone/tstest/tapbundle and switch tap.start() to export default tap.start()
- Dependencies: bump @aws-sdk/client-s3 and several @push.rocks packages; upgrade @tsclass/tsclass to a newer major
- DevDependencies: upgrade @git.zone/tsbuild, @git.zone/tstest, @push.rocks/qenv, and @push.rocks/tapbundle
- CI/config: remove .gitlab-ci.yml, add .claude/settings.local.json
- Workspace: add pnpm-workspace.yaml and packageManager field in package.json
## 2024-12-02 - 3.3.7 - fix(package)
Update author field in package.json
- Corrected the author field from 'Lossless GmbH' to 'Task Venture Capital GmbH' in the package.json file.
## 2024-12-02 - 3.3.6 - fix(package)
Fix license field in package.json to reflect MIT licensing
## 2024-11-25 - 3.3.5 - fix(test)
Refactor trash test to improve metadata validation
- Added new checks in trash tests to ensure metadata files are correctly moved to trash.
- Validated the presence and integrity of metadata within trashed files.
## 2024-11-25 - 3.3.4 - fix(core)
Minor refactoring and cleanup of TypeScript source files for improved readability and maintainability.
## 2024-11-24 - 3.3.3 - fix(documentation) ## 2024-11-24 - 3.3.3 - fix(documentation)
Improved documentation accuracy and consistency Improved documentation accuracy and consistency

8120
deno.lock generated Normal file

File diff suppressed because it is too large Load Diff

19
license Normal file
View File

@@ -0,0 +1,19 @@
Copyright (c) 2014 Task Venture Capital GmbH (hello@task.vc)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

4
package-lock.json generated
View File

@@ -1,12 +1,12 @@
{ {
"name": "@push.rocks/smartbucket", "name": "@push.rocks/smartbucket",
"version": "3.3.3", "version": "3.3.10",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@push.rocks/smartbucket", "name": "@push.rocks/smartbucket",
"version": "3.3.3", "version": "3.3.10",
"license": "UNLICENSED", "license": "UNLICENSED",
"dependencies": { "dependencies": {
"@push.rocks/smartpath": "^5.0.18", "@push.rocks/smartpath": "^5.0.18",

View File

@@ -1,33 +1,33 @@
{ {
"name": "@push.rocks/smartbucket", "name": "@push.rocks/smartbucket",
"version": "3.3.3", "version": "4.1.0",
"description": "A TypeScript library providing a cloud-agnostic interface for managing object storage with functionalities like bucket management, file and directory operations, and advanced features such as metadata handling and file locking.", "description": "A TypeScript library providing a cloud-agnostic interface for managing object storage with functionalities like bucket management, file and directory operations, and advanced features such as metadata handling and file locking.",
"main": "dist_ts/index.js", "main": "dist_ts/index.js",
"typings": "dist_ts/index.d.ts", "typings": "dist_ts/index.d.ts",
"type": "module", "type": "module",
"author": "Lossless GmbH", "author": "Task Venture Capital GmbH",
"license": "UNLICENSED", "license": "MIT",
"scripts": { "scripts": {
"test": "(tstest test/)", "test": "(tstest test/ --verbose --logfile --timeout 60)",
"build": "(tsbuild --web --allowimplicitany)" "build": "(tsbuild --web --allowimplicitany)"
}, },
"devDependencies": { "devDependencies": {
"@git.zone/tsbuild": "^2.1.84", "@git.zone/tsbuild": "^3.1.0",
"@git.zone/tsrun": "^1.2.49", "@git.zone/tsrun": "^2.0.0",
"@git.zone/tstest": "^1.0.90", "@git.zone/tstest": "^3.0.1",
"@push.rocks/qenv": "^6.1.0", "@push.rocks/qenv": "^6.1.3",
"@push.rocks/tapbundle": "^5.5.3" "@push.rocks/tapbundle": "^6.0.3"
}, },
"dependencies": { "dependencies": {
"@aws-sdk/client-s3": "^3.699.0", "@aws-sdk/client-s3": "^3.936.0",
"@push.rocks/smartmime": "^2.0.4", "@push.rocks/smartmime": "^2.0.4",
"@push.rocks/smartpath": "^5.0.18", "@push.rocks/smartpath": "^6.0.0",
"@push.rocks/smartpromise": "^4.0.4", "@push.rocks/smartpromise": "^4.2.3",
"@push.rocks/smartrx": "^3.0.7", "@push.rocks/smartrx": "^3.0.10",
"@push.rocks/smartstream": "^3.2.5", "@push.rocks/smartstream": "^3.2.5",
"@push.rocks/smartstring": "^4.0.15", "@push.rocks/smartstring": "^4.1.0",
"@push.rocks/smartunique": "^3.0.9", "@push.rocks/smartunique": "^3.0.9",
"@tsclass/tsclass": "^4.1.2" "@tsclass/tsclass": "^9.3.0"
}, },
"private": false, "private": false,
"files": [ "files": [
@@ -71,5 +71,6 @@
"repository": { "repository": {
"type": "git", "type": "git",
"url": "https://code.foss.global/push.rocks/smartbucket.git" "url": "https://code.foss.global/push.rocks/smartbucket.git"
} },
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748"
} }

8267
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

4
pnpm-workspace.yaml Normal file
View File

@@ -0,0 +1,4 @@
onlyBuiltDependencies:
- esbuild
- mongodb-memory-server
- puppeteer

View File

@@ -1,3 +1,5 @@
* The project uses the official s3 client, not the minio client. * The project uses the official s3 client, not the minio client.
* notice the difference between *Strict methods and the normal methods. * **All methods throw by default** (strict mode): - Put operations: `fastPut`, `fastPutStream` throw when file exists and overwrite is false - Get operations: `getBucketByName`, `getFile`, `getSubDirectoryByName` throw when not found
* **Use exists() methods to check before getting**: `bucketExists`, `fileExists`, `directoryExists`, `fastExists`
* **No *Strict methods**: All removed (fastPutStrict, getBucketByNameStrict, getFileStrict, getSubDirectoryByNameStrict)
* metadata is handled though the MetaData class. Important! * metadata is handled though the MetaData class. Important!

562
readme.md
View File

@@ -1,280 +1,468 @@
```markdown # @push.rocks/smartbucket 🪣
# @push.rocks/smartbucket
A comprehensive TypeScript library for cloud-agnostic object storage offering bucket management, file operations, and advanced data streaming. > A powerful, cloud-agnostic TypeScript library for object storage with advanced features like file locking, metadata management, and intelligent trash handling.
## Install ## Install 📦
To install `@push.rocks/smartbucket`, ensure you have Node.js and npm installed. Then, run the following command in your project directory: To install `@push.rocks/smartbucket`, run:
```bash ```bash
npm install @push.rocks/smartbucket --save npm install @push.rocks/smartbucket --save
``` ```
This command will add `@push.rocks/smartbucket` to your project's dependencies and install it along with its requirements in the `node_modules` directory. Or if you're using pnpm (recommended):
## Usage ```bash
pnpm add @push.rocks/smartbucket
```
## Usage 🚀
### Introduction ### Introduction
`@push.rocks/smartbucket` provides a robust set of features to manage cloud storage operations in a cloud-agnostic manner. By leveraging this library, you can seamlessly interact with object storage services like AWS S3, without being tied to any vendor-specific implementations. This library not only abstracts basic file operations but also integrates advanced capabilities such as metadata management, data streaming, file locking, and bucket policies, all through a simplified API. `@push.rocks/smartbucket` provides a unified, cloud-agnostic API for object storage operations across major providers like AWS S3, Google Cloud Storage, MinIO, and more. It abstracts away provider-specific complexities while offering advanced features like metadata management, file locking, streaming operations, and intelligent trash management.
### Table of Contents ### Table of Contents
1. [Setting Up](#setting-up) 1. [🏁 Getting Started](#-getting-started)
2. [Working with Buckets](#working-with-buckets) 2. [🗂️ Working with Buckets](#-working-with-buckets)
- [Creating a New Bucket](#creating-a-new-bucket) 3. [📁 File Operations](#-file-operations)
- [Listing Buckets](#listing-buckets) 4. [📂 Directory Management](#-directory-management)
- [Deleting Buckets](#deleting-buckets) 5. [🌊 Streaming Operations](#-streaming-operations)
3. [File Operations in Buckets](#file-operations-in-buckets) 6. [🔒 File Locking](#-file-locking)
- [Uploading Files](#uploading-files) 7. [🏷️ Metadata Management](#-metadata-management)
- [Downloading Files](#downloading-files) 8. [🗑️ Trash & Recovery](#-trash--recovery)
- [Streaming Files](#streaming-files) 9. [⚡ Advanced Features](#-advanced-features)
- [Deleting Files](#deleting-files) 10. [☁️ Cloud Provider Support](#-cloud-provider-support)
4. [Directory Operations](#directory-operations)
- [Listing Directories and Files](#listing-directories-and-files)
- [Managing Files in Directories](#managing-files-in-directories)
5. [Advanced Features](#advanced-features)
- [Bucket Policies](#bucket-policies)
- [Metadata Management](#metadata-management)
- [File Locking](#file-locking)
- [Trash Management](#trash-management)
6. [Cloud Agnosticism](#cloud-agnosticism)
### Setting Up ### 🏁 Getting Started
Begin by importing the necessary classes from the `@push.rocks/smartbucket` package into your TypeScript file. Create an instance of `SmartBucket` with your storage configuration: First, set up your storage connection:
```typescript ```typescript
import { import { SmartBucket } from '@push.rocks/smartbucket';
SmartBucket,
Bucket,
Directory,
File
} from '@push.rocks/smartbucket';
const mySmartBucket = new SmartBucket({ // Initialize with your cloud storage credentials
accessKey: "yourAccessKey", const smartBucket = new SmartBucket({
accessSecret: "yourSecretKey", accessKey: 'your-access-key',
endpoint: "yourEndpointURL", accessSecret: 'your-secret-key',
endpoint: 's3.amazonaws.com', // Or your provider's endpoint
port: 443, port: 443,
useSsl: true useSsl: true,
region: 'us-east-1' // Optional, defaults to 'us-east-1'
}); });
``` ```
Replace `"yourAccessKey"`, `"yourSecretKey"`, and `"yourEndpointURL"` with actual data specific to your cloud provider. ### 🗂️ Working with Buckets
### Working with Buckets #### Creating Buckets
#### Creating a New Bucket
Creating a bucket involves invoking the `createBucket` method. Note that bucket names are unique and follow the rules of the cloud provider:
```typescript ```typescript
async function createBucket(bucketName: string) { // Create a new bucket
try { const myBucket = await smartBucket.createBucket('my-awesome-bucket');
const newBucket: Bucket = await mySmartBucket.createBucket(bucketName); console.log(`✅ Bucket created: ${myBucket.name}`);
console.log(`Bucket ${bucketName} created successfully.`); ```
} catch (error) {
console.error("Error creating bucket:", error); #### Getting Existing Buckets
```typescript
// Get a bucket reference
const existingBucket = await smartBucket.getBucketByName('existing-bucket');
// Or use strict mode (throws if bucket doesn't exist)
const bucketStrict = await smartBucket.getBucketByNameStrict('must-exist-bucket');
```
#### Removing Buckets
```typescript
// Delete a bucket (must be empty)
await smartBucket.removeBucket('old-bucket');
console.log('🗑️ Bucket removed');
```
### 📁 File Operations
#### Upload Files
```typescript
const bucket = await smartBucket.getBucketByName('my-bucket');
// Simple file upload (returns File object)
const file = await bucket.fastPut({
path: 'documents/report.pdf',
contents: Buffer.from('Your file content here')
});
// Upload with string content
await bucket.fastPut({
path: 'notes/todo.txt',
contents: 'Buy milk\nCall mom\nRule the world'
});
// Upload with overwrite control
const uploadedFile = await bucket.fastPut({
path: 'images/logo.png',
contents: imageBuffer,
overwrite: true // Set to true to replace existing files
});
// Error handling: fastPut throws if file exists and overwrite is false
try {
await bucket.fastPut({
path: 'existing-file.txt',
contents: 'new content'
});
} catch (error) {
console.error('Upload failed:', error.message);
// Error: Object already exists at path 'existing-file.txt' in bucket 'my-bucket'. Set overwrite:true to replace it.
}
```
#### Download Files
```typescript
// Get file as Buffer
const fileContent = await bucket.fastGet({
path: 'documents/report.pdf'
});
console.log(`📄 File size: ${fileContent.length} bytes`);
// Get file as string
const textContent = fileContent.toString('utf-8');
```
#### Check File Existence
```typescript
const exists = await bucket.fastExists({
path: 'documents/report.pdf'
});
console.log(`File exists: ${exists ? '✅' : '❌'}`);
```
#### Delete Files
```typescript
// Permanent deletion
await bucket.fastRemove({
path: 'old-file.txt'
});
```
#### Copy & Move Files
```typescript
// Copy file within bucket
await bucket.fastCopy({
sourcePath: 'original/file.txt',
destinationPath: 'backup/file-copy.txt'
});
// Move file (copy + delete original)
await bucket.fastMove({
sourcePath: 'temp/draft.txt',
destinationPath: 'final/document.txt'
});
```
### 📂 Directory Management
SmartBucket provides powerful directory-like operations for organizing your files:
```typescript
// Get base directory
const baseDir = await bucket.getBaseDirectory();
// List directories and files
const directories = await baseDir.listDirectories();
const files = await baseDir.listFiles();
console.log(`📁 Found ${directories.length} directories`);
console.log(`📄 Found ${files.length} files`);
// Navigate subdirectories
const subDir = await baseDir.getSubDirectoryByName('projects/2024');
// Create nested file
await subDir.fastPut({
path: 'report.pdf',
contents: reportBuffer
});
// Get directory tree structure
const tree = await subDir.getTreeArray();
console.log('🌳 Directory tree:', tree);
// Create empty file as placeholder
await subDir.createEmptyFile('placeholder.txt');
```
### 🌊 Streaming Operations
Handle large files efficiently with streaming:
#### Download Streams
```typescript
// Node.js stream
const nodeStream = await bucket.fastGetStream(
{ path: 'large-video.mp4' },
'nodestream'
);
nodeStream.pipe(fs.createWriteStream('local-video.mp4'));
// Web stream (for modern environments)
const webStream = await bucket.fastGetStream(
{ path: 'large-file.zip' },
'webstream'
);
```
#### Upload Streams
```typescript
// Stream upload from file
const readStream = fs.createReadStream('big-data.csv');
await bucket.fastPutStream({
path: 'uploads/big-data.csv',
stream: readStream,
metadata: {
contentType: 'text/csv',
userMetadata: {
uploadedBy: 'data-team',
version: '2.0'
}
} }
} });
createBucket("myNewBucket");
``` ```
#### Listing Buckets #### Reactive Streams with RxJS
While the library uses cloud-provider capabilities like AWS SDK to list existing buckets, `smartbucket` is aimed at simplifying content management within them.
#### Deleting Buckets
To delete a bucket, simply call the `removeBucket` function:
```typescript ```typescript
async function deleteBucket(bucketName: string) { // Get file as ReplaySubject for reactive programming
try { const replaySubject = await bucket.fastGetReplaySubject({
await mySmartBucket.removeBucket(bucketName); path: 'data/sensor-readings.json',
console.log(`Bucket ${bucketName} deleted successfully.`); chunkSize: 1024
} catch (error) { });
console.error("Error deleting bucket:", error);
}
}
deleteBucket("anotherBucketName"); replaySubject.subscribe({
next: (chunk) => processChunk(chunk),
complete: () => console.log('✅ Stream complete')
});
``` ```
### File Operations in Buckets ### 🔒 File Locking
SmartBucket offers a unified API to execute file-based operations efficiently. Prevent accidental modifications with file locking:
#### Uploading Files
Upload a file using the `fastPut` method, specifying the bucket name, file path, and content:
```typescript ```typescript
async function uploadFile(bucketName: string, filePath: string, fileContent: Buffer | string) { const file = await bucket.getBaseDirectory()
const bucket: Bucket = await mySmartBucket.getBucketByName(bucketName); .getFileStrict({ path: 'important-config.json' });
await bucket.fastPut({ path: filePath, contents: fileContent });
console.log(`File uploaded to ${filePath}`); // Lock file for 10 minutes
await file.lock({ timeoutMillis: 600000 });
console.log('🔒 File locked');
// Try to modify locked file (will throw error)
try {
await file.delete();
} catch (error) {
console.log('❌ Cannot delete locked file');
} }
uploadFile("myBucket", "example.txt", "This is a sample file content."); // Unlock when done
await file.unlock();
console.log('🔓 File unlocked');
``` ```
#### Downloading Files ### 🏷️ Metadata Management
Download files using `fastGet`. It retrieves the file content as a buffer: Attach and manage metadata for your files:
```typescript ```typescript
async function downloadFile(bucketName: string, filePath: string) { const file = await bucket.getBaseDirectory()
const bucket: Bucket = await mySmartBucket.getBucketByName(bucketName); .getFileStrict({ path: 'document.pdf' });
const content: Buffer = await bucket.fastGet({ path: filePath });
console.log("Downloaded content:", content.toString());
}
downloadFile("myBucket", "example.txt"); // Get metadata handler
const metadata = await file.getMetaData();
// Set custom metadata
await metadata.setCustomMetaData({
key: 'author',
value: 'John Doe'
});
await metadata.setCustomMetaData({
key: 'department',
value: 'Engineering'
});
// Retrieve metadata
const author = await metadata.getCustomMetaData({ key: 'author' });
console.log(`📝 Author: ${author}`);
// Get all metadata
const allMeta = await metadata.getAllCustomMetaData();
console.log('📋 All metadata:', allMeta);
``` ```
#### Streaming Files ### 🗑️ Trash & Recovery
For large-scale applications, stream files without loading them fully into memory: SmartBucket includes an intelligent trash system for safe file deletion:
```typescript ```typescript
async function streamFile(bucketName: string, filePath: string) { const file = await bucket.getBaseDirectory()
const bucket: Bucket = await mySmartBucket.getBucketByName(bucketName); .getFileStrict({ path: 'important-data.xlsx' });
const stream = await bucket.fastGetStream({ path: filePath }, "nodestream");
stream.on('data', chunk => console.log("Chunk:", chunk.toString()));
stream.on('end', () => console.log("Download completed."));
}
streamFile("myBucket", "largefile.txt"); // Move to trash instead of permanent deletion
await file.delete({ mode: 'trash' });
console.log('🗑️ File moved to trash');
// Access trash
const trash = await bucket.getTrash();
const trashDir = await trash.getTrashDir();
const trashedFiles = await trashDir.listFiles();
console.log(`📦 ${trashedFiles.length} files in trash`);
// Restore from trash
const trashedFile = await bucket.getBaseDirectory()
.getFileStrict({
path: 'important-data.xlsx',
getFromTrash: true
});
await trashedFile.restore({ useOriginalPath: true });
console.log('♻️ File restored successfully');
// Permanent deletion from trash
await trash.emptyTrash();
console.log('🧹 Trash emptied');
``` ```
#### Deleting Files ### ⚡ Advanced Features
Delete files with precision using `fastRemove`: #### File Statistics
```typescript ```typescript
async function deleteFile(bucketName: string, filePath: string) { // Get detailed file statistics
const bucket: Bucket = await mySmartBucket.getBucketByName(bucketName); const stats = await bucket.fastStat({ path: 'document.pdf' });
await bucket.fastRemove({ path: filePath }); console.log(`📊 Size: ${stats.size} bytes`);
console.log(`File ${filePath} deleted.`); console.log(`📅 Last modified: ${stats.lastModified}`);
} console.log(`🏷️ ETag: ${stats.etag}`);
deleteFile("myBucket", "example.txt");
``` ```
### Directory Operations #### Magic Bytes Detection
Leverage directory functionalities to better organize and manage files within buckets.
#### Listing Directories and Files
Listing contents showcases a directorys structure and file contents:
```typescript ```typescript
async function listDirectory(bucketName: string, directoryPath: string) { // Read first bytes for file type detection
const bucket: Bucket = await mySmartBucket.getBucketByName(bucketName); const magicBytes = await bucket.getMagicBytes({
const baseDirectory: Directory = await bucket.getBaseDirectory(); path: 'mystery-file',
const targetDirectory = await baseDirectory.getSubDirectoryByName(directoryPath); length: 16
});
console.log('Directories:'); // Or from a File object
(await targetDirectory.listDirectories()).forEach(dir => console.log(dir.name)); const file = await bucket.getBaseDirectory()
.getFileStrict({ path: 'image.jpg' });
console.log('Files:'); const magic = await file.getMagicBytes({ length: 4 });
(await targetDirectory.listFiles()).forEach(file => console.log(file.name)); console.log(`🔮 Magic bytes: ${magic.toString('hex')}`);
}
listDirectory("myBucket", "path/to/directory");
``` ```
#### Managing Files in Directories #### JSON Data Operations
Additional functionalities allow file management, inclusive of handling sub-directories:
```typescript ```typescript
async function manageFilesInDirectory(bucketName: string, directoryPath: string, fileName: string, content: string) { const file = await bucket.getBaseDirectory()
const bucket: Bucket = await mySmartBucket.getBucketByName(bucketName); .getFileStrict({ path: 'config.json' });
const baseDirectory: Directory = await bucket.getBaseDirectory();
const directory = await baseDirectory.getSubDirectoryByName(directoryPath) ?? baseDirectory;
await directory.fastPut({ path: fileName, contents: content }); // Read JSON data
console.log(`File ${fileName} created in ${directoryPath}`); const config = await file.getJsonData();
console.log('⚙️ Config loaded:', config);
const fileContent = await directory.fastGet({ path: fileName }); // Update JSON data
console.log(`Content of ${fileName}: ${fileContent.toString()}`); config.version = '2.0';
} config.updated = new Date().toISOString();
await file.writeJsonData(config);
manageFilesInDirectory("myBucket", "myDir", "example.txt", "File content here"); console.log('💾 Config updated');
``` ```
### Advanced Features #### Directory & File Type Detection
The librarys advanced features streamline intricate cloud storage workflows.
#### Bucket Policies
The library offers tools for maintaining consistent bucket policies across storage providers, assisting in defining access roles and permissions.
#### Metadata Management
Easily manage and store metadata by using the `MetaData` utility:
```typescript ```typescript
async function handleMetadata(bucketName: string, filePath: string) { // Check if path is a directory
const bucket: Bucket = await mySmartBucket.getBucketByName(bucketName); const isDir = await bucket.isDirectory({ path: 'uploads/' });
const meta = await bucket.fastStat({ path: filePath });
console.log("Metadata:", meta.Metadata);
}
handleMetadata("myBucket", "example.txt"); // Check if path is a file
const isFile = await bucket.isFile({ path: 'uploads/document.pdf' });
console.log(`Is directory: ${isDir ? '📁' : '❌'}`);
console.log(`Is file: ${isFile ? '📄' : '❌'}`);
``` ```
#### File Locking #### Clean Bucket Contents
Prevent accidental writes by locking files:
```typescript ```typescript
async function lockFile(bucketName: string, filePath: string) { // Remove all files and directories (use with caution!)
const bucket: Bucket = await mySmartBucket.getBucketByName(bucketName); await bucket.cleanAllContents();
const file: File = await bucket.getBaseDirectory().getFileStrict({ path: filePath }); console.log('🧹 Bucket cleaned');
await file.lock({ timeoutMillis: 600000 }); // Lock for 10 minutes
console.log(`File ${filePath} locked.`);
}
lockFile("myBucket", "example.txt");
``` ```
#### Trash Management ### ☁️ Cloud Provider Support
SmartBucket enables a safe deletion mode where files can be moved to a recycling bin, allowing for restoration: SmartBucket works seamlessly with:
-**AWS S3** - Full compatibility with S3 API
-**Google Cloud Storage** - Via S3-compatible API
-**MinIO** - Self-hosted S3-compatible storage
-**DigitalOcean Spaces** - S3-compatible object storage
-**Backblaze B2** - Cost-effective cloud storage
-**Wasabi** - High-performance S3-compatible storage
-**Any S3-compatible provider**
The library automatically handles provider quirks and optimizes operations for each platform while maintaining a consistent API.
### 🔧 Advanced Configuration
```typescript ```typescript
async function trashAndRestoreFile(bucketName: string, filePath: string) { // Configure with custom options
const bucket: Bucket = await mySmartBucket.getBucketByName(bucketName); const smartBucket = new SmartBucket({
const file: File = await bucket.getBaseDirectory().getFileStrict({ path: filePath }); accessKey: process.env.S3_ACCESS_KEY,
accessSecret: process.env.S3_SECRET_KEY,
endpoint: process.env.S3_ENDPOINT,
port: 443,
useSsl: true,
region: 'eu-central-1',
// Additional S3 client options can be passed through
});
// Move the file to trash // Environment-based configuration
await file.delete({ mode: 'trash' }); import { Qenv } from '@push.rocks/qenv';
console.log(`File ${filePath} moved to trash.`); const qenv = new Qenv('./', './.nogit/');
// Retrieve the file from the trash const smartBucket = new SmartBucket({
const trashFile = await bucket.getTrash().getTrashedFileByOriginalName({ path: filePath }); accessKey: await qenv.getEnvVarOnDemandStrict('S3_ACCESS_KEY'),
await trashFile.restore(); accessSecret: await qenv.getEnvVarOnDemandStrict('S3_SECRET'),
console.log(`File ${filePath} restored from trash.`); endpoint: await qenv.getEnvVarOnDemandStrict('S3_ENDPOINT'),
} });
trashAndRestoreFile("myBucket", "example.txt");
``` ```
### Cloud Agnosticism ### 🧪 Testing
`@push.rocks/smartbucket` supports a multitude of cloud providers, enhancing flexibility in adopting different cloud strategies without the need for extensive code rewrite. It offers a uniform interface allowing to perform operations seamlessly between different storage solutions such as AWS S3, Google Cloud Storage, and more. This aspect empowers organizations to align their storage decisions with business needs rather than technical constraints. SmartBucket is thoroughly tested. Run tests with:
By following this guide, you should be well-equipped to handle cloud storage operations using the `@push.rocks/smartbucket` library. Diligently constructed code examples elucidate the extensive functionalities offered by the library, aligned with best practices in cloud storage. For a deeper dive into any specific feature, refer to the comprehensive documentation provided with the library and the official documentation of the cloud providers you are integrating with. ```bash
pnpm test
``` ```
### 🤝 Best Practices
1. **Always use strict mode** for critical operations to catch errors early
2. **Implement proper error handling** for network and permission issues
3. **Use streaming** for large files to optimize memory usage
4. **Leverage metadata** for organizing and searching files
5. **Enable trash mode** for important data to prevent accidental loss
6. **Lock files** during critical operations to prevent race conditions
7. **Clean up resources** properly when done
## License and Legal Information ## License and Legal Information
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.

View File

@@ -0,0 +1,76 @@
import { expect, tap } from '@git.zone/tstest/tapbundle';
import * as plugins from '../ts/plugins.js';
import * as smartbucket from '../ts/index.js';
class FakeS3Client {
private callIndex = 0;
constructor(private readonly pages: Array<Partial<plugins.s3.ListObjectsV2Output>>) {}
public async send(_command: any) {
const page = this.pages[this.callIndex] || { Contents: [], CommonPrefixes: [], IsTruncated: false };
this.callIndex += 1;
return page;
}
}
tap.test('MetaData.hasMetaData should return false when metadata file does not exist', async () => {
const fakeFile = {
name: 'file.txt',
parentDirectoryRef: {
async getFile() {
throw new Error(`File not found at path 'file.txt.metadata'`);
},
},
} as unknown as smartbucket.File;
const hasMetaData = await smartbucket.MetaData.hasMetaData({ file: fakeFile });
expect(hasMetaData).toBeFalse();
});
tap.test('getSubDirectoryByName should create correct parent chain for new nested directories', async () => {
const fakeSmartbucket = { s3Client: new FakeS3Client([{ Contents: [], CommonPrefixes: [] }]) } as unknown as smartbucket.SmartBucket;
const bucket = new smartbucket.Bucket(fakeSmartbucket, 'test-bucket');
const baseDirectory = new smartbucket.Directory(bucket, null as any, '');
const nestedDirectory = await baseDirectory.getSubDirectoryByName('level1/level2', { getEmptyDirectory: true });
expect(nestedDirectory.name).toEqual('level2');
expect(nestedDirectory.parentDirectoryRef.name).toEqual('level1');
expect(nestedDirectory.getBasePath()).toEqual('level1/level2/');
});
tap.test('listFiles should aggregate results across paginated ListObjectsV2 responses', async () => {
const firstPage = {
Contents: Array.from({ length: 1000 }, (_, index) => ({ Key: `file-${index}` })),
IsTruncated: true,
NextContinuationToken: 'token-1',
};
const secondPage = {
Contents: Array.from({ length: 200 }, (_, index) => ({ Key: `file-${1000 + index}` })),
IsTruncated: false,
};
const fakeSmartbucket = { s3Client: new FakeS3Client([firstPage, secondPage]) } as unknown as smartbucket.SmartBucket;
const bucket = new smartbucket.Bucket(fakeSmartbucket, 'test-bucket');
const baseDirectory = new smartbucket.Directory(bucket, null as any, '');
const files = await baseDirectory.listFiles();
expect(files.length).toEqual(1200);
});
tap.test('listDirectories should aggregate CommonPrefixes across pagination', async () => {
const fakeSmartbucket = {
s3Client: new FakeS3Client([
{ CommonPrefixes: [{ Prefix: 'dirA/' }], IsTruncated: true, NextContinuationToken: 'token-1' },
{ CommonPrefixes: [{ Prefix: 'dirB/' }], IsTruncated: false },
]),
} as unknown as smartbucket.SmartBucket;
const bucket = new smartbucket.Bucket(fakeSmartbucket, 'test-bucket');
const baseDirectory = new smartbucket.Directory(bucket, null as any, '');
const directories = await baseDirectory.listDirectories();
expect(directories.map((d) => d.name)).toEqual(['dirA', 'dirB']);
});
export default tap.start();

View File

@@ -0,0 +1,7 @@
import { tap, expect } from '@git.zone/tstest/tapbundle';
tap.test('test metadata functionality', async () => {
})
export default tap.start();

View File

@@ -1,7 +0,0 @@
import { tap, expect } from '@push.rocks/tapbundle';
tap.test('test metadata functionality', async () => {
})
tap.start();

View File

@@ -1,4 +1,4 @@
import { expect, expectAsync, tap } from '@push.rocks/tapbundle'; import { expect, tap } from '@git.zone/tstest/tapbundle';
import { Qenv } from '@push.rocks/qenv'; import { Qenv } from '@push.rocks/qenv';
import * as smartbucket from '../ts/index.js'; import * as smartbucket from '../ts/index.js';
@@ -16,7 +16,7 @@ tap.test('should create a valid smartbucket', async () => {
endpoint: await testQenv.getEnvVarOnDemandStrict('S3_ENDPOINT'), endpoint: await testQenv.getEnvVarOnDemandStrict('S3_ENDPOINT'),
}); });
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket); expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
myBucket = await testSmartbucket.getBucketByNameStrict(await testQenv.getEnvVarOnDemandStrict('S3_BUCKET'),); myBucket = await testSmartbucket.getBucketByName(await testQenv.getEnvVarOnDemandStrict('S3_BUCKET'),);
expect(myBucket).toBeInstanceOf(smartbucket.Bucket); expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
expect(myBucket.name).toEqual('test-pushrocks-smartbucket'); expect(myBucket.name).toEqual('test-pushrocks-smartbucket');
}); });
@@ -126,4 +126,4 @@ tap.test('clean up directory style tests', async () => {
await myBucket.fastRemove({ path: 'file1.txt' }); await myBucket.fastRemove({ path: 'file1.txt' });
}); });
tap.start(); export default tap.start();

View File

@@ -1,4 +1,4 @@
import { expect, expectAsync, tap } from '@push.rocks/tapbundle'; import { expect, tap } from '@git.zone/tstest/tapbundle';
import { jestExpect } from '@push.rocks/tapbundle/node'; import { jestExpect } from '@push.rocks/tapbundle/node';
import { Qenv } from '@push.rocks/qenv'; import { Qenv } from '@push.rocks/qenv';
@@ -17,7 +17,7 @@ tap.test('should create a valid smartbucket', async () => {
endpoint: await testQenv.getEnvVarOnDemandStrict('S3_ENDPOINT'), endpoint: await testQenv.getEnvVarOnDemandStrict('S3_ENDPOINT'),
}); });
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket); expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
myBucket = await testSmartbucket.getBucketByNameStrict(await testQenv.getEnvVarOnDemandStrict('S3_BUCKET'),); myBucket = await testSmartbucket.getBucketByName(await testQenv.getEnvVarOnDemandStrict('S3_BUCKET'),);
expect(myBucket).toBeInstanceOf(smartbucket.Bucket); expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
expect(myBucket.name).toEqual('test-pushrocks-smartbucket'); expect(myBucket.name).toEqual('test-pushrocks-smartbucket');
}); });
@@ -30,7 +30,7 @@ tap.test('should clean all contents', async () => {
tap.test('should delete a file into the normally', async () => { tap.test('should delete a file into the normally', async () => {
const path = 'trashtest/trashme.txt'; const path = 'trashtest/trashme.txt';
const file = await myBucket.fastPutStrict({ const file = await myBucket.fastPut({
path, path,
contents: 'I\'m in the trash test content!', contents: 'I\'m in the trash test content!',
}); });
@@ -44,7 +44,7 @@ tap.test('should delete a file into the normally', async () => {
tap.test('should put a file into the trash', async () => { tap.test('should put a file into the trash', async () => {
const path = 'trashtest/trashme.txt'; const path = 'trashtest/trashme.txt';
const file = await myBucket.fastPutStrict({ const file = await myBucket.fastPut({
path, path,
contents: 'I\'m in the trash test content!', contents: 'I\'m in the trash test content!',
}); });
@@ -52,7 +52,21 @@ tap.test('should put a file into the trash', async () => {
console.log(fileMetadata.toString()); console.log(fileMetadata.toString());
expect(await file.getMetaData().then((meta) => meta.metadataFile.getJsonData())).toEqual({}); expect(await file.getMetaData().then((meta) => meta.metadataFile.getJsonData())).toEqual({});
await file.delete({ mode: 'trash' }); await file.delete({ mode: 'trash' });
jestExpect(await file.getMetaData().then((meta) => meta.metadataFile.getJsonData())).toEqual({
const getTrashContents = async () => {
const trash = await myBucket.getTrash();
const trashDir = await trash.getTrashDir();
return await trashDir.listFiles();
}
const trashedFiles = await getTrashContents();
expect(trashedFiles.length).toEqual(2);
const trashedMetaFile = trashedFiles.find(file => file.name.endsWith('.metadata'));
expect(trashedMetaFile).toBeDefined();
expect(trashedMetaFile).toBeInstanceOf(smartbucket.File);
jestExpect(await trashedMetaFile!.getJsonData()).toEqual({
custom_recycle: { custom_recycle: {
deletedAt: jestExpect.any(Number), deletedAt: jestExpect.any(Number),
originalPath: "trashtest/trashme.txt", originalPath: "trashtest/trashme.txt",
@@ -62,7 +76,7 @@ tap.test('should put a file into the trash', async () => {
tap.test('should restore a file from trash', async () => { tap.test('should restore a file from trash', async () => {
const baseDirectory = await myBucket.getBaseDirectory(); const baseDirectory = await myBucket.getBaseDirectory();
const file = await baseDirectory.getFileStrict({ const file = await baseDirectory.getFile({
path: 'trashtest/trashme.txt', path: 'trashtest/trashme.txt',
getFromTrash: true getFromTrash: true
}); });

View File

@@ -3,6 +3,6 @@
*/ */
export const commitinfo = { export const commitinfo = {
name: '@push.rocks/smartbucket', name: '@push.rocks/smartbucket',
version: '3.3.3', version: '4.1.0',
description: 'A TypeScript library providing a cloud-agnostic interface for managing object storage with functionalities like bucket management, file and directory operations, and advanced features such as metadata handling and file locking.' description: 'A TypeScript library providing a cloud-agnostic interface for managing object storage with functionalities like bucket management, file and directory operations, and advanced features such as metadata handling and file locking.'
} }

View File

@@ -14,7 +14,7 @@ import { Trash } from './classes.trash.js';
* operate in S3 basic fashion on blobs of data. * operate in S3 basic fashion on blobs of data.
*/ */
export class Bucket { export class Bucket {
public static async getBucketByName(smartbucketRef: SmartBucket, bucketNameArg: string) { public static async getBucketByName(smartbucketRef: SmartBucket, bucketNameArg: string): Promise<Bucket> {
const command = new plugins.s3.ListBucketsCommand({}); const command = new plugins.s3.ListBucketsCommand({});
const buckets = await smartbucketRef.s3Client.send(command); const buckets = await smartbucketRef.s3Client.send(command);
const foundBucket = buckets.Buckets!.find((bucket) => bucket.Name === bucketNameArg); const foundBucket = buckets.Buckets!.find((bucket) => bucket.Name === bucketNameArg);
@@ -24,8 +24,7 @@ export class Bucket {
console.log(`Taking this as base for new Bucket instance`); console.log(`Taking this as base for new Bucket instance`);
return new this(smartbucketRef, bucketNameArg); return new this(smartbucketRef, bucketNameArg);
} else { } else {
console.log(`did not find bucket by name: ${bucketNameArg}`); throw new Error(`Bucket '${bucketNameArg}' not found.`);
return null;
} }
} }
@@ -71,7 +70,7 @@ export class Bucket {
} }
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg); const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
const baseDirectory = await this.getBaseDirectory(); const baseDirectory = await this.getBaseDirectory();
return await baseDirectory.getSubDirectoryByNameStrict(checkPath, { return await baseDirectory.getSubDirectoryByName(checkPath, {
getEmptyDirectory: true, getEmptyDirectory: true,
}); });
} }
@@ -88,15 +87,16 @@ export class Bucket {
contents: string | Buffer; contents: string | Buffer;
overwrite?: boolean; overwrite?: boolean;
} }
): Promise<File | null> { ): Promise<File> {
try { try {
const reducedPath = await helpers.reducePathDescriptorToPath(optionsArg); const reducedPath = await helpers.reducePathDescriptorToPath(optionsArg);
const exists = await this.fastExists({ path: reducedPath }); const exists = await this.fastExists({ path: reducedPath });
if (exists && !optionsArg.overwrite) { if (exists && !optionsArg.overwrite) {
const errorText = `Object already exists at path '${reducedPath}' in bucket '${this.name}'.`; throw new Error(
console.error(errorText); `Object already exists at path '${reducedPath}' in bucket '${this.name}'. ` +
return null; `Set overwrite:true to replace it.`
);
} else if (exists && optionsArg.overwrite) { } else if (exists && optionsArg.overwrite) {
console.log( console.log(
`Overwriting existing object at path '${reducedPath}' in bucket '${this.name}'.` `Overwriting existing object at path '${reducedPath}' in bucket '${this.name}'.`
@@ -129,13 +129,6 @@ export class Bucket {
} }
} }
public async fastPutStrict(...args: Parameters<Bucket['fastPut']>) {
const file = await this.fastPut(...args);
if (!file) {
throw new Error(`File not stored at path '${args[0].path}'`);
}
return file;
}
/** /**
* get file * get file
@@ -259,10 +252,10 @@ export class Bucket {
const exists = await this.fastExists({ path: optionsArg.path }); const exists = await this.fastExists({ path: optionsArg.path });
if (exists && !optionsArg.overwrite) { if (exists && !optionsArg.overwrite) {
console.error( throw new Error(
`Object already exists at path '${optionsArg.path}' in bucket '${this.name}'.` `Object already exists at path '${optionsArg.path}' in bucket '${this.name}'. ` +
`Set overwrite:true to replace it.`
); );
return;
} else if (exists && optionsArg.overwrite) { } else if (exists && optionsArg.overwrite) {
console.log( console.log(
`Overwriting existing object at path '${optionsArg.path}' in bucket '${this.name}'.` `Overwriting existing object at path '${optionsArg.path}' in bucket '${this.name}'.`
@@ -460,7 +453,7 @@ export class Bucket {
Range: `bytes=0-${optionsArg.length - 1}`, Range: `bytes=0-${optionsArg.length - 1}`,
}); });
const response = await this.smartbucketRef.s3Client.send(command); const response = await this.smartbucketRef.s3Client.send(command);
const chunks = []; const chunks: Buffer[] = [];
const stream = response.Body as any; // SdkStreamMixin includes readable stream const stream = response.Body as any; // SdkStreamMixin includes readable stream
for await (const chunk of stream) { for await (const chunk of stream) {

View File

@@ -69,7 +69,7 @@ export class Directory {
path: string; path: string;
createWithContents?: string | Buffer; createWithContents?: string | Buffer;
getFromTrash?: boolean; getFromTrash?: boolean;
}): Promise<File | null> { }): Promise<File> {
const pathDescriptor = { const pathDescriptor = {
directory: this, directory: this,
path: optionsArg.path, path: optionsArg.path,
@@ -83,7 +83,7 @@ export class Directory {
return trashedFile; return trashedFile;
} }
if (!exists && !optionsArg.createWithContents) { if (!exists && !optionsArg.createWithContents) {
return null; throw new Error(`File not found at path '${optionsArg.path}'`);
} }
if (!exists && optionsArg.createWithContents) { if (!exists && optionsArg.createWithContents) {
await File.create({ await File.create({
@@ -98,32 +98,66 @@ export class Directory {
}); });
} }
/** /**
* gets a file strictly * Check if a file exists in this directory
* @param args
* @returns
*/ */
public async getFileStrict(...args: Parameters<Directory['getFile']>) { public async fileExists(optionsArg: { path: string }): Promise<boolean> {
const file = await this.getFile(...args); const pathDescriptor = {
if (!file) { directory: this,
throw new Error(`File not found at path '${args[0].path}'`); path: optionsArg.path,
} };
return file; return this.bucketRef.fastExists({
path: await helpers.reducePathDescriptorToPath(pathDescriptor),
});
}
/**
* Check if a subdirectory exists
*/
public async directoryExists(dirNameArg: string): Promise<boolean> {
const directories = await this.listDirectories();
return directories.some(dir => dir.name === dirNameArg);
}
/**
* Collects all ListObjectsV2 pages for a prefix.
*/
private async listObjectsV2AllPages(prefix: string, delimiter?: string) {
const allContents: plugins.s3._Object[] = [];
const allCommonPrefixes: plugins.s3.CommonPrefix[] = [];
let continuationToken: string | undefined;
do {
const command = new plugins.s3.ListObjectsV2Command({
Bucket: this.bucketRef.name,
Prefix: prefix,
Delimiter: delimiter,
ContinuationToken: continuationToken,
});
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
if (response.Contents) {
allContents.push(...response.Contents);
}
if (response.CommonPrefixes) {
allCommonPrefixes.push(...response.CommonPrefixes);
}
continuationToken = response.IsTruncated ? response.NextContinuationToken : undefined;
} while (continuationToken);
return { contents: allContents, commonPrefixes: allCommonPrefixes };
} }
/** /**
* lists all files * lists all files
*/ */
public async listFiles(): Promise<File[]> { public async listFiles(): Promise<File[]> {
const command = new plugins.s3.ListObjectsV2Command({ const { contents } = await this.listObjectsV2AllPages(this.getBasePath(), '/');
Bucket: this.bucketRef.name,
Prefix: this.getBasePath(),
Delimiter: '/',
});
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
const fileArray: File[] = []; const fileArray: File[] = [];
response.Contents?.forEach((item) => { contents.forEach((item) => {
if (item.Key && !item.Key.endsWith('/')) { if (item.Key && !item.Key.endsWith('/')) {
const subtractedPath = item.Key.replace(this.getBasePath(), ''); const subtractedPath = item.Key.replace(this.getBasePath(), '');
if (!subtractedPath.includes('/')) { if (!subtractedPath.includes('/')) {
@@ -145,16 +179,11 @@ export class Directory {
*/ */
public async listDirectories(): Promise<Directory[]> { public async listDirectories(): Promise<Directory[]> {
try { try {
const command = new plugins.s3.ListObjectsV2Command({ const { commonPrefixes } = await this.listObjectsV2AllPages(this.getBasePath(), '/');
Bucket: this.bucketRef.name,
Prefix: this.getBasePath(),
Delimiter: '/',
});
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
const directoryArray: Directory[] = []; const directoryArray: Directory[] = [];
if (response.CommonPrefixes) { if (commonPrefixes) {
response.CommonPrefixes.forEach((item) => { commonPrefixes.forEach((item) => {
if (item.Prefix) { if (item.Prefix) {
const subtractedPath = item.Prefix.replace(this.getBasePath(), ''); const subtractedPath = item.Prefix.replace(this.getBasePath(), '');
if (subtractedPath.endsWith('/')) { if (subtractedPath.endsWith('/')) {
@@ -206,7 +235,7 @@ export class Directory {
* if the path is a file path, it will be treated as a file and the parent directory will be returned * if the path is a file path, it will be treated as a file and the parent directory will be returned
*/ */
couldBeFilePath?: boolean; couldBeFilePath?: boolean;
} = {}): Promise<Directory | null> { } = {}): Promise<Directory> {
const dirNameArray = dirNameArg.split('/').filter(str => str.trim() !== ""); const dirNameArray = dirNameArg.split('/').filter(str => str.trim() !== "");
@@ -226,7 +255,7 @@ export class Directory {
return returnDirectory; return returnDirectory;
} }
if (optionsArg.getEmptyDirectory || optionsArg.createWithInitializerFile) { if (optionsArg.getEmptyDirectory || optionsArg.createWithInitializerFile) {
returnDirectory = new Directory(this.bucketRef, this, dirNameToSearch); returnDirectory = new Directory(this.bucketRef, directoryArg, dirNameToSearch);
} }
if (isFinalDirectory && optionsArg.createWithInitializerFile) { if (isFinalDirectory && optionsArg.createWithInitializerFile) {
returnDirectory?.createEmptyFile('00init.txt'); returnDirectory?.createEmptyFile('00init.txt');
@@ -253,16 +282,12 @@ export class Directory {
wantedDirectory = await getDirectory(directoryToSearchIn, dirNameToSearch, counter === dirNameArray.length); wantedDirectory = await getDirectory(directoryToSearchIn, dirNameToSearch, counter === dirNameArray.length);
} }
return wantedDirectory || null; if (!wantedDirectory) {
throw new Error(`Directory not found at path '${dirNameArg}'`);
}
return wantedDirectory;
} }
public async getSubDirectoryByNameStrict(...args: Parameters<Directory['getSubDirectoryByName']>) {
const directory = await this.getSubDirectoryByName(...args);
if (!directory) {
throw new Error(`Directory not found at path '${args[0]}'`);
}
return directory;
}
/** /**
* moves the directory * moves the directory
@@ -360,7 +385,7 @@ export class Directory {
*/ */
mode?: 'permanent' | 'trash'; mode?: 'permanent' | 'trash';
}) { }) {
const file = await this.getFileStrict({ const file = await this.getFile({
path: optionsArg.path, path: optionsArg.path,
}); });
await file.delete({ await file.delete({

View File

@@ -245,7 +245,7 @@ export class File {
// lets update references of this // lets update references of this
const baseDirectory = await this.parentDirectoryRef.bucketRef.getBaseDirectory(); const baseDirectory = await this.parentDirectoryRef.bucketRef.getBaseDirectory();
this.parentDirectoryRef = await baseDirectory.getSubDirectoryByNameStrict( this.parentDirectoryRef = await baseDirectory.getSubDirectoryByName(
await helpers.reducePathDescriptorToPath(pathDescriptorArg), await helpers.reducePathDescriptorToPath(pathDescriptorArg),
{ {
couldBeFilePath: true, couldBeFilePath: true,

View File

@@ -4,11 +4,23 @@ import { File } from './classes.file.js';
export class MetaData { export class MetaData {
public static async hasMetaData(optionsArg: { file: File }) { public static async hasMetaData(optionsArg: { file: File }) {
// lets find the existing metadata file // try finding the existing metadata file; return false if it doesn't exist
const existingFile = await optionsArg.file.parentDirectoryRef.getFile({ try {
path: optionsArg.file.name + '.metadata', const existingFile = await optionsArg.file.parentDirectoryRef.getFile({
}); path: optionsArg.file.name + '.metadata',
return !!existingFile; });
return !!existingFile;
} catch (error: any) {
const message = error?.message || '';
const isNotFound =
message.includes('File not found') ||
error?.name === 'NotFound' ||
error?.$metadata?.httpStatusCode === 404;
if (isNotFound) {
return false;
}
throw error;
}
} }
// static // static
@@ -17,7 +29,7 @@ export class MetaData {
metaData.fileRef = optionsArg.file; metaData.fileRef = optionsArg.file;
// lets find the existing metadata file // lets find the existing metadata file
metaData.metadataFile = await metaData.fileRef.parentDirectoryRef.getFileStrict({ metaData.metadataFile = await metaData.fileRef.parentDirectoryRef.getFile({
path: metaData.fileRef.name + '.metadata', path: metaData.fileRef.name + '.metadata',
createWithContents: '{}', createWithContents: '{}',
}); });

View File

@@ -2,6 +2,7 @@
import * as plugins from './plugins.js'; import * as plugins from './plugins.js';
import { Bucket } from './classes.bucket.js'; import { Bucket } from './classes.bucket.js';
import { normalizeS3Descriptor } from './helpers.js';
export class SmartBucket { export class SmartBucket {
public config: plugins.tsclass.storage.IS3Descriptor; public config: plugins.tsclass.storage.IS3Descriptor;
@@ -17,18 +18,14 @@ export class SmartBucket {
constructor(configArg: plugins.tsclass.storage.IS3Descriptor) { constructor(configArg: plugins.tsclass.storage.IS3Descriptor) {
this.config = configArg; this.config = configArg;
const protocol = configArg.useSsl === false ? 'http' : 'https'; // Use the normalizer to handle various endpoint formats
const port = configArg.port ? `:${configArg.port}` : ''; const { normalized } = normalizeS3Descriptor(configArg);
const endpoint = `${protocol}://${configArg.endpoint}${port}`;
this.s3Client = new plugins.s3.S3Client({ this.s3Client = new plugins.s3.S3Client({
endpoint, endpoint: normalized.endpointUrl,
region: configArg.region || 'us-east-1', region: normalized.region,
credentials: { credentials: normalized.credentials,
accessKeyId: configArg.accessKey, forcePathStyle: normalized.forcePathStyle, // Necessary for S3-compatible storage like MinIO or Wasabi
secretAccessKey: configArg.accessSecret,
},
forcePathStyle: true, // Necessary for S3-compatible storage like MinIO or Wasabi
}); });
} }
@@ -45,11 +42,12 @@ export class SmartBucket {
return Bucket.getBucketByName(this, bucketNameArg); return Bucket.getBucketByName(this, bucketNameArg);
} }
public async getBucketByNameStrict(...args: Parameters<SmartBucket['getBucketByName']>) { /**
const bucket = await this.getBucketByName(...args); * Check if a bucket exists
if (!bucket) { */
throw new Error(`Bucket ${args[0]} does not exist.`); public async bucketExists(bucketNameArg: string): Promise<boolean> {
} const command = new plugins.s3.ListBucketsCommand({});
return bucket; const buckets = await this.s3Client.send(command);
return buckets.Buckets?.some(bucket => bucket.Name === bucketNameArg) ?? false;
} }
} }

View File

@@ -21,7 +21,7 @@ export class Trash {
const trashDir = await this.getTrashDir(); const trashDir = await this.getTrashDir();
const originalPath = await helpers.reducePathDescriptorToPath(pathDescriptor); const originalPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
const trashKey = await this.getTrashKeyByOriginalBasePath(originalPath); const trashKey = await this.getTrashKeyByOriginalBasePath(originalPath);
return trashDir.getFileStrict({ path: trashKey }); return trashDir.getFile({ path: trashKey });
} }
public async getTrashKeyByOriginalBasePath (originalPath: string): Promise<string> { public async getTrashKeyByOriginalBasePath (originalPath: string): Promise<string> {

View File

@@ -20,3 +20,235 @@ export const reducePathDescriptorToPath = async (pathDescriptorArg: interfaces.I
} }
return returnPath; return returnPath;
} }
// S3 Descriptor Normalization
export interface IS3Warning {
code: string;
message: string;
}
export interface INormalizedS3Config {
endpointUrl: string;
host: string;
protocol: 'http' | 'https';
port?: number;
region: string;
credentials: {
accessKeyId: string;
secretAccessKey: string;
};
forcePathStyle: boolean;
}
function coerceBooleanMaybe(value: unknown): { value: boolean | undefined; warning?: IS3Warning } {
if (typeof value === 'boolean') return { value };
if (typeof value === 'string') {
const v = value.trim().toLowerCase();
if (v === 'true' || v === '1') {
return {
value: true,
warning: {
code: 'SBK_S3_COERCED_USESSL',
message: `Coerced useSsl='${value}' (string) to boolean true.`
}
};
}
if (v === 'false' || v === '0') {
return {
value: false,
warning: {
code: 'SBK_S3_COERCED_USESSL',
message: `Coerced useSsl='${value}' (string) to boolean false.`
}
};
}
}
return { value: undefined };
}
function coercePortMaybe(port: unknown): { value: number | undefined; warning?: IS3Warning } {
if (port === undefined || port === null || port === '') return { value: undefined };
const n = typeof port === 'number' ? port : Number(String(port).trim());
if (!Number.isFinite(n) || !Number.isInteger(n) || n <= 0 || n > 65535) {
return {
value: undefined,
warning: {
code: 'SBK_S3_INVALID_PORT',
message: `Invalid port '${String(port)}' - expected integer in [1..65535].`
}
};
}
return { value: n };
}
function sanitizeEndpointString(raw: unknown): { value: string; warnings: IS3Warning[] } {
const warnings: IS3Warning[] = [];
let s = String(raw ?? '').trim();
if (s !== String(raw ?? '')) {
warnings.push({
code: 'SBK_S3_TRIMMED_ENDPOINT',
message: 'Trimmed surrounding whitespace from endpoint.'
});
}
return { value: s, warnings };
}
function parseEndpointHostPort(
endpoint: string,
provisionalProtocol: 'http' | 'https'
): {
hadScheme: boolean;
host: string;
port?: number;
extras: {
droppedPath?: boolean;
droppedQuery?: boolean;
droppedCreds?: boolean
}
} {
let url: URL | undefined;
const extras: { droppedPath?: boolean; droppedQuery?: boolean; droppedCreds?: boolean } = {};
// Check if endpoint already has a scheme
const hasScheme = /^https?:\/\//i.test(endpoint);
// Try parsing as full URL first
try {
if (hasScheme) {
url = new URL(endpoint);
} else {
// Not a full URL; try host[:port] by attaching provisional scheme
// Remove anything after first '/' for safety
const cleanEndpoint = endpoint.replace(/\/.*/, '');
url = new URL(`${provisionalProtocol}://${cleanEndpoint}`);
}
} catch (e) {
throw new Error(`Unable to parse endpoint '${endpoint}'.`);
}
// Check for dropped components
if (url.username || url.password) extras.droppedCreds = true;
if (url.pathname && url.pathname !== '/') extras.droppedPath = true;
if (url.search) extras.droppedQuery = true;
const hadScheme = hasScheme;
const host = url.hostname; // hostnames lowercased by URL; IPs preserved
const port = url.port ? Number(url.port) : undefined;
return { hadScheme, host, port, extras };
}
export function normalizeS3Descriptor(
input: plugins.tsclass.storage.IS3Descriptor,
logger?: { warn: (msg: string) => void }
): { normalized: INormalizedS3Config; warnings: IS3Warning[] } {
const warnings: IS3Warning[] = [];
const logWarn = (w: IS3Warning) => {
warnings.push(w);
if (logger) {
logger.warn(`[SmartBucket S3] ${w.code}: ${w.message}`);
} else {
console.warn(`[SmartBucket S3] ${w.code}: ${w.message}`);
}
};
// Coerce and sanitize inputs
const { value: coercedUseSsl, warning: useSslWarn } = coerceBooleanMaybe((input as any).useSsl);
if (useSslWarn) logWarn(useSslWarn);
const { value: coercedPort, warning: portWarn } = coercePortMaybe((input as any).port);
if (portWarn) logWarn(portWarn);
const { value: endpointStr, warnings: endpointSanWarnings } = sanitizeEndpointString((input as any).endpoint);
endpointSanWarnings.forEach(logWarn);
if (!endpointStr) {
throw new Error('S3 endpoint is required (got empty string). Provide hostname or URL.');
}
// Provisional protocol selection for parsing host:port forms
const provisionalProtocol: 'http' | 'https' = coercedUseSsl === false ? 'http' : 'https';
const { hadScheme, host, port: epPort, extras } = parseEndpointHostPort(endpointStr, provisionalProtocol);
if (extras.droppedCreds) {
logWarn({
code: 'SBK_S3_DROPPED_CREDENTIALS',
message: 'Ignored credentials in endpoint URL.'
});
}
if (extras.droppedPath) {
logWarn({
code: 'SBK_S3_DROPPED_PATH',
message: 'Removed path segment from endpoint URL; S3 endpoint should be host[:port] only.'
});
}
if (extras.droppedQuery) {
logWarn({
code: 'SBK_S3_DROPPED_QUERY',
message: 'Removed query string from endpoint URL; S3 endpoint should be host[:port] only.'
});
}
// Final protocol decision
let finalProtocol: 'http' | 'https';
if (hadScheme) {
// Scheme from endpoint wins
const schemeFromEndpoint = endpointStr.trim().toLowerCase().startsWith('http://') ? 'http' : 'https';
finalProtocol = schemeFromEndpoint;
if (typeof coercedUseSsl === 'boolean') {
const expected = coercedUseSsl ? 'https' : 'http';
if (expected !== finalProtocol) {
logWarn({
code: 'SBK_S3_SCHEME_CONFLICT',
message: `useSsl=${String(coercedUseSsl)} conflicts with endpoint scheme '${finalProtocol}'; using endpoint scheme.`
});
}
}
} else {
if (typeof coercedUseSsl === 'boolean') {
finalProtocol = coercedUseSsl ? 'https' : 'http';
} else {
finalProtocol = 'https';
logWarn({
code: 'SBK_S3_GUESSED_PROTOCOL',
message: "No scheme in endpoint and useSsl not provided; defaulting to 'https'."
});
}
}
// Final port decision
let finalPort: number | undefined = undefined;
if (coercedPort !== undefined && epPort !== undefined && coercedPort !== epPort) {
logWarn({
code: 'SBK_S3_PORT_CONFLICT',
message: `Port in config (${coercedPort}) conflicts with endpoint port (${epPort}); using config port.`
});
finalPort = coercedPort;
} else {
finalPort = (coercedPort !== undefined) ? coercedPort : epPort;
}
// Build canonical endpoint URL (origin only, no trailing slash)
const url = new URL(`${finalProtocol}://${host}`);
if (finalPort !== undefined) url.port = String(finalPort);
const endpointUrl = url.origin;
const region = input.region || 'us-east-1';
return {
normalized: {
endpointUrl,
host,
protocol: finalProtocol,
port: finalPort,
region,
credentials: {
accessKeyId: input.accessKey,
secretAccessKey: input.accessSecret,
},
forcePathStyle: true,
},
warnings,
};
}

View File

@@ -2,3 +2,5 @@ export * from './classes.smartbucket.js';
export * from './classes.bucket.js'; export * from './classes.bucket.js';
export * from './classes.directory.js'; export * from './classes.directory.js';
export * from './classes.file.js'; export * from './classes.file.js';
export * from './classes.metadata.js';
export * from './classes.trash.js';

View File

@@ -1,8 +1,8 @@
// plugins.ts // plugins.ts
// node native // node native
import * as path from 'path'; import * as path from 'node:path';
import * as stream from 'stream'; import * as stream from 'node:stream';
export { path, stream }; export { path, stream };