Compare commits
18 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 575cff4d09 | |||
| 6760fd480d | |||
| bd73004bd6 | |||
| 65c7bcf12c | |||
| dd6efa4908 | |||
| 1f4b7319d3 | |||
| b8e5d9a222 | |||
| 429375a643 | |||
| e147a077f3 | |||
| 5889396134 | |||
| 0c631383e1 | |||
| d852d8c85b | |||
| fa4c44ae04 | |||
| 708b0b63b1 | |||
| 8554554642 | |||
| a04aabf78b | |||
| 47cf2cc2cb | |||
| ef20e15d20 |
140
.gitlab-ci.yml
140
.gitlab-ci.yml
@@ -1,140 +0,0 @@
|
||||
# gitzone ci_default
|
||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||
|
||||
cache:
|
||||
paths:
|
||||
- .npmci_cache/
|
||||
key: '$CI_BUILD_STAGE'
|
||||
|
||||
stages:
|
||||
- security
|
||||
- test
|
||||
- release
|
||||
- metadata
|
||||
|
||||
before_script:
|
||||
- npm install -g @shipzone/npmci
|
||||
|
||||
# ====================
|
||||
# security stage
|
||||
# ====================
|
||||
mirror:
|
||||
stage: security
|
||||
script:
|
||||
- npmci git mirror
|
||||
only:
|
||||
- tags
|
||||
tags:
|
||||
- lossless
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
auditProductionDependencies:
|
||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||
stage: security
|
||||
script:
|
||||
- npmci npm prepare
|
||||
- npmci command npm install --production --ignore-scripts
|
||||
- npmci command npm config set registry https://registry.npmjs.org
|
||||
- npmci command npm audit --audit-level=high --only=prod --production
|
||||
tags:
|
||||
- docker
|
||||
allow_failure: true
|
||||
|
||||
auditDevDependencies:
|
||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||
stage: security
|
||||
script:
|
||||
- npmci npm prepare
|
||||
- npmci command npm install --ignore-scripts
|
||||
- npmci command npm config set registry https://registry.npmjs.org
|
||||
- npmci command npm audit --audit-level=high --only=dev
|
||||
tags:
|
||||
- docker
|
||||
allow_failure: true
|
||||
|
||||
# ====================
|
||||
# test stage
|
||||
# ====================
|
||||
|
||||
testStable:
|
||||
stage: test
|
||||
script:
|
||||
- npmci npm prepare
|
||||
- npmci node install stable
|
||||
- npmci npm install
|
||||
- npmci npm test
|
||||
coverage: /\d+.?\d+?\%\s*coverage/
|
||||
tags:
|
||||
- docker
|
||||
|
||||
testBuild:
|
||||
stage: test
|
||||
script:
|
||||
- npmci npm prepare
|
||||
- npmci node install stable
|
||||
- npmci npm install
|
||||
- npmci command npm run build
|
||||
coverage: /\d+.?\d+?\%\s*coverage/
|
||||
tags:
|
||||
- docker
|
||||
|
||||
release:
|
||||
stage: release
|
||||
script:
|
||||
- npmci node install stable
|
||||
- npmci npm publish
|
||||
only:
|
||||
- tags
|
||||
tags:
|
||||
- lossless
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
# ====================
|
||||
# metadata stage
|
||||
# ====================
|
||||
codequality:
|
||||
stage: metadata
|
||||
allow_failure: true
|
||||
only:
|
||||
- tags
|
||||
script:
|
||||
- npmci command npm install -g typescript
|
||||
- npmci npm prepare
|
||||
- npmci npm install
|
||||
tags:
|
||||
- lossless
|
||||
- docker
|
||||
- priv
|
||||
|
||||
trigger:
|
||||
stage: metadata
|
||||
script:
|
||||
- npmci trigger
|
||||
only:
|
||||
- tags
|
||||
tags:
|
||||
- lossless
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
pages:
|
||||
stage: metadata
|
||||
script:
|
||||
- npmci node install lts
|
||||
- npmci command npm install -g @git.zone/tsdoc
|
||||
- npmci npm prepare
|
||||
- npmci npm install
|
||||
- npmci command tsdoc
|
||||
tags:
|
||||
- lossless
|
||||
- docker
|
||||
- notpriv
|
||||
only:
|
||||
- tags
|
||||
artifacts:
|
||||
expire_in: 1 week
|
||||
paths:
|
||||
- public
|
||||
allow_failure: true
|
||||
216
changelog.md
216
changelog.md
@@ -1,5 +1,221 @@
|
||||
# Changelog
|
||||
|
||||
## 2026-01-24 - 4.3.1 - fix(bucket)
|
||||
propagate S3 client errors instead of silently logging them; update build script, bump dev/dependencies, and refresh npmextra configuration
|
||||
|
||||
- Remove .catch(...) wrappers around s3Client.send so errors are no longer swallowed and will propagate to callers
|
||||
- Update build script to use 'tsbuild tsfolders --allowimplicitany'
|
||||
- Bump devDependencies: @git.zone/tsbuild to ^4.1.2, @git.zone/tsrun to ^2.0.1, @git.zone/tstest to ^3.1.6
|
||||
- Bump dependency @aws-sdk/client-s3 to ^3.975.0
|
||||
- Adjust npmextra.json structure (@git.zone/cli, @git.zone/tsdoc), add release registries and @ship.zone/szci entry
|
||||
- Remove pnpm-workspace.yaml onlyBuiltDependencies configuration
|
||||
|
||||
## 2025-11-20 - 4.3.0 - feat(listing)
|
||||
Add memory-efficient listing APIs: async generator, RxJS observable, and cursor pagination; export ListCursor and Minimatch; add minimatch dependency; bump to 4.2.0
|
||||
|
||||
- Added memory-efficient listing methods on Bucket: listAllObjects (async generator), listAllObjectsObservable (RxJS Observable), createCursor (returns ListCursor) and listAllObjectsArray (convenience array collector).
|
||||
- New ListCursor class (ts/classes.listcursor.ts) providing page-based iteration: next(), hasMore(), reset(), getToken()/setToken().
|
||||
- Added glob matching helper findByGlob(pattern) using minimatch (exported via plugins.Minimatch).
|
||||
- Exported ListCursor from ts/index.ts and exported Minimatch via ts/plugins.ts.
|
||||
- Added minimatch dependency in package.json and bumped package version to 4.2.0; increased test timeout to 120s.
|
||||
- Updated tests to read S3_SECRETKEY, S3_PORT and to assert bucket name from env (test/test.node+deno.ts, test/test.trash.node+deno.ts).
|
||||
- No breaking changes: new APIs are additive and existing behavior preserved.
|
||||
|
||||
## 2025-11-20 - 4.2.0 - feat(listing)
|
||||
Add memory-efficient listing with async generators, RxJS observables, and cursor pagination for huge buckets
|
||||
|
||||
**New Memory-Efficient Listing Methods:**
|
||||
|
||||
**Async Generator (Recommended for most use cases):**
|
||||
- `Bucket.listAllObjects(prefix?)` - Stream object keys one at a time using `for await...of`
|
||||
- `Bucket.findByGlob(pattern)` - Find objects matching glob patterns (e.g., `**/*.json`, `npm/packages/*/index.json`)
|
||||
- Memory efficient, supports early termination, composable
|
||||
|
||||
**RxJS Observable (For complex reactive pipelines):**
|
||||
- `Bucket.listAllObjectsObservable(prefix?)` - Emit keys as Observable for use with RxJS operators (filter, map, take, etc.)
|
||||
- Perfect for complex data transformations and reactive architectures
|
||||
|
||||
**Cursor Pattern (For manual pagination control):**
|
||||
- `Bucket.createCursor(prefix?, options?)` - Create cursor for explicit page-by-page iteration
|
||||
- `ListCursor.next()` - Fetch next page of results
|
||||
- `ListCursor.hasMore()` - Check if more results available
|
||||
- `ListCursor.reset()` - Reset to beginning
|
||||
- `ListCursor.getToken()` / `ListCursor.setToken()` - Save/restore pagination state
|
||||
- Ideal for UI pagination and resumable operations
|
||||
|
||||
**Convenience Methods:**
|
||||
- `Bucket.listAllObjectsArray(prefix?)` - Collect all keys into array (WARNING: loads all into memory)
|
||||
|
||||
**Benefits:**
|
||||
- ✅ Memory-efficient streaming for buckets with millions of objects
|
||||
- ✅ Three patterns for different use cases (generators, observables, cursors)
|
||||
- ✅ Support for early termination and incremental processing
|
||||
- ✅ Glob pattern matching with minimatch
|
||||
- ✅ Full TypeScript support with proper types
|
||||
- ✅ Zero breaking changes - all new methods
|
||||
|
||||
**Dependencies:**
|
||||
- Added `minimatch` for glob pattern support
|
||||
|
||||
**Files Changed:**
|
||||
- `ts/classes.bucket.ts` - Added all listing methods
|
||||
- `ts/classes.listcursor.ts` - NEW: Cursor implementation
|
||||
- `ts/plugins.ts` - Export Minimatch
|
||||
- `ts/index.ts` - Export ListCursor
|
||||
- `test/test.listing.node+deno.ts` - NEW: Comprehensive listing tests
|
||||
- `package.json` - Added minimatch dependency
|
||||
|
||||
## 2025-11-20 - 4.1.0 - feat(core)
|
||||
Add S3 endpoint normalization, directory pagination, improved metadata checks, trash support, and related tests
|
||||
|
||||
- Add normalizeS3Descriptor helper to sanitize and normalize various S3 endpoint formats and emit warnings for mismatches (helpers.ts).
|
||||
- Use normalized endpoint and credentials when constructing S3 client in SmartBucket (classes.smartbucket.ts).
|
||||
- Implement paginated listing helper listObjectsV2AllPages in Directory and use it for listFiles and listDirectories to aggregate Contents and CommonPrefixes across pages (classes.directory.ts).
|
||||
- Improve MetaData.hasMetaData to catch NotFound errors and return false instead of throwing (classes.metadata.ts).
|
||||
- Export metadata and trash modules from index (ts/index.ts) and add a Trash class with utilities for trashed files and key encoding (classes.trash.ts).
|
||||
- Enhance Bucket operations: fastCopy now preserves or replaces native metadata correctly, cleanAllContents supports paginated deletion, and improved fastExists error handling (classes.bucket.ts).
|
||||
- Fix Directory.getSubDirectoryByName to construct new Directory instances with the correct parent directory reference.
|
||||
- Add tests covering metadata absence and pagination behavior (test/test.local.node+deno.ts).
|
||||
|
||||
## 2025-11-20 - 4.0.1 - fix(plugins)
|
||||
Use explicit node: imports for native path and stream modules in ts/plugins.ts
|
||||
|
||||
- Replaced imports of 'path' and 'stream' with 'node:path' and 'node:stream' in ts/plugins.ts.
|
||||
- Ensures correct ESM resolution of Node built-ins when package.json type is 'module' and avoids accidental conflicts with userland packages.
|
||||
|
||||
## 2025-11-20 - 4.0.0 - BREAKING CHANGE(core)
|
||||
Make API strict-by-default: remove *Strict variants, throw on not-found/exists conflicts, add explicit exists() methods, update docs/tests and bump deps
|
||||
|
||||
- Breaking: Core API methods are strict by default and now throw errors instead of returning null when targets are missing or already exist (e.g. getBucketByName, getFile, getSubDirectoryByName, fastPut, fastPutStream).
|
||||
- Removed *Strict variants: fastPutStrict, getBucketByNameStrict, getFileStrict, getSubDirectoryByNameStrict — use the base methods which are now strict.
|
||||
- Added explicit existence checks: bucketExists (SmartBucket), fileExists (Directory/fileExists), directoryExists (Directory.directoryExists), and fastExists (Bucket.fastExists) to allow non-throwing checks before operations.
|
||||
- Return type updates: fastPut now returns Promise<File> (no null), getBucketByName/getFile/getSubDirectoryByName now return the respective objects or throw.
|
||||
- Improved error messages to guide callers (e.g. suggest setting overwrite:true on fastPut when object exists).
|
||||
- Updated README, changelog and tests to reflect the new strict semantics and usage patterns.
|
||||
- Developer/runtime dependency bumps: @git.zone/tsbuild, @git.zone/tsrun, @git.zone/tstest, @aws-sdk/client-s3, @push.rocks/smartstring, @tsclass/tsclass (version bumps recorded in package.json).
|
||||
- Major version bump to 4.0.0 to reflect breaking API changes.
|
||||
|
||||
## 2025-11-20 - 4.0.0 - BREAKING: Strict by default + exists methods
|
||||
Complete API overhaul: all methods throw by default, removed all *Strict variants, added dedicated exists methods
|
||||
|
||||
**Breaking Changes:**
|
||||
|
||||
**Putters (Write Operations):**
|
||||
- `fastPut`: Return type `Promise<File | null>` → `Promise<File>`, throws when file exists and overwrite is false
|
||||
- `fastPutStream`: Now throws when file exists and overwrite is false (previously returned silently)
|
||||
- `fastPutStrict`: **Removed** - use `fastPut` directly
|
||||
|
||||
**Getters (Read Operations):**
|
||||
- `getBucketByName`: Return type `Promise<Bucket | null>` → `Promise<Bucket>`, throws when bucket not found
|
||||
- `getBucketByNameStrict`: **Removed** - use `getBucketByName` directly
|
||||
- `getFile`: Return type `Promise<File | null>` → `Promise<File>`, throws when file not found
|
||||
- `getFileStrict`: **Removed** - use `getFile` directly
|
||||
- `getSubDirectoryByName`: Return type `Promise<Directory | null>` → `Promise<Directory>`, throws when directory not found
|
||||
- `getSubDirectoryByNameStrict`: **Removed** - use `getSubDirectoryByName` directly
|
||||
|
||||
**New Methods (Existence Checks):**
|
||||
- `bucket.fastExists({ path })` - ✅ Already existed
|
||||
- `directory.fileExists({ path })` - **NEW** - Check if file exists
|
||||
- `directory.directoryExists(name)` - **NEW** - Check if subdirectory exists
|
||||
- `smartBucket.bucketExists(name)` - **NEW** - Check if bucket exists
|
||||
|
||||
**Benefits:**
|
||||
- ✅ **Simpler API**: Removed 4 redundant *Strict methods
|
||||
- ✅ **Type-safe**: No nullable returns - `Promise<T>` not `Promise<T | null>`
|
||||
- ✅ **Fail-fast**: Errors throw immediately with precise stack traces
|
||||
- ✅ **Consistent**: All methods behave the same way
|
||||
- ✅ **Explicit**: Use exists() to check, then get() to retrieve
|
||||
- ✅ **Better debugging**: Error location is always precise
|
||||
|
||||
**Migration Guide:**
|
||||
|
||||
```typescript
|
||||
// ============================================
|
||||
// Pattern 1: Check then Get (Recommended)
|
||||
// ============================================
|
||||
|
||||
// Before (v3.x):
|
||||
const bucket = await smartBucket.getBucketByName('my-bucket');
|
||||
if (bucket) {
|
||||
// use bucket
|
||||
}
|
||||
|
||||
// After (v4.0):
|
||||
if (await smartBucket.bucketExists('my-bucket')) {
|
||||
const bucket = await smartBucket.getBucketByName('my-bucket'); // guaranteed to exist
|
||||
// use bucket
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// Pattern 2: Try/Catch
|
||||
// ============================================
|
||||
|
||||
// Before (v3.x):
|
||||
const file = await directory.getFile({ path: 'file.txt' });
|
||||
if (!file) {
|
||||
// Handle not found
|
||||
}
|
||||
|
||||
// After (v4.0):
|
||||
try {
|
||||
const file = await directory.getFile({ path: 'file.txt' });
|
||||
// use file
|
||||
} catch (error) {
|
||||
// Handle not found
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// Pattern 3: Remove *Strict calls
|
||||
// ============================================
|
||||
|
||||
// Before (v3.x):
|
||||
const file = await directory.getFileStrict({ path: 'file.txt' });
|
||||
|
||||
// After (v4.0):
|
||||
const file = await directory.getFile({ path: 'file.txt' }); // already strict
|
||||
|
||||
// ============================================
|
||||
// Pattern 4: Write Operations
|
||||
// ============================================
|
||||
|
||||
// Before (v3.x):
|
||||
const file = await bucket.fastPutStrict({ path: 'file.txt', contents: 'data' });
|
||||
|
||||
// After (v4.0):
|
||||
const file = await bucket.fastPut({ path: 'file.txt', contents: 'data' }); // already strict
|
||||
```
|
||||
|
||||
## 2025-08-18 - 3.3.10 - fix(helpers)
|
||||
Normalize and robustly parse S3 endpoint configuration; use normalized descriptor in SmartBucket and update dev tooling
|
||||
|
||||
- Add normalizeS3Descriptor to ts/helpers.ts: robust endpoint parsing, coercion of useSsl/port, sanitization, warnings for dropped URL parts, and canonical endpoint URL output.
|
||||
- Update SmartBucket (ts/classes.smartbucket.ts) to use the normalized endpoint, region, credentials and forcePathStyle from normalizeS3Descriptor.
|
||||
- Adjust dev tooling: bump @git.zone/tsbuild -> ^2.6.7, @git.zone/tstest -> ^2.3.4, @push.rocks/qenv -> ^6.1.3 and update test script to run tstest with --verbose --logfile --timeout 60.
|
||||
- Add .claude/settings.local.json containing local assistant/CI permission settings (local config only).
|
||||
|
||||
## 2025-08-15 - 3.3.9 - fix(docs)
|
||||
Revise README with detailed usage examples and add local Claude settings
|
||||
|
||||
- Revamped README: reorganized content, added emojis and clearer headings for install, getting started, bucket/file/directory operations, streaming, metadata, trash/recovery, locking, and advanced configuration.
|
||||
- Added many concrete code examples for SmartBucket, Bucket, Directory, File, streaming (node/web), RxJS replay subjects, metadata handling, trash workflow, file locking, magic-bytes detection, JSON operations, and cleaning bucket contents.
|
||||
- Included testing instructions (pnpm test) and a Best Practices section with recommendations for strict mode, streaming, metadata, trash usage, and locking.
|
||||
- Added .claude/settings.local.json to include local Claude configuration and tool permissions.
|
||||
- No source code or public API changes; documentation and local tooling config only.
|
||||
|
||||
## 2025-08-15 - 3.3.8 - fix(tests)
|
||||
Update tests to use @git.zone/tstest, upgrade dependencies, remove GitLab CI and add local CI/workspace config
|
||||
|
||||
- Tests: replace imports from @push.rocks/tapbundle with @git.zone/tstest/tapbundle and switch tap.start() to export default tap.start()
|
||||
- Dependencies: bump @aws-sdk/client-s3 and several @push.rocks packages; upgrade @tsclass/tsclass to a newer major
|
||||
- DevDependencies: upgrade @git.zone/tsbuild, @git.zone/tstest, @push.rocks/qenv, and @push.rocks/tapbundle
|
||||
- CI/config: remove .gitlab-ci.yml, add .claude/settings.local.json
|
||||
- Workspace: add pnpm-workspace.yaml and packageManager field in package.json
|
||||
|
||||
## 2024-12-02 - 3.3.7 - fix(package)
|
||||
Update author field in package.json
|
||||
|
||||
- Corrected the author field from 'Lossless GmbH' to 'Task Venture Capital GmbH' in the package.json file.
|
||||
|
||||
## 2024-12-02 - 3.3.6 - fix(package)
|
||||
Fix license field in package.json to reflect MIT licensing
|
||||
|
||||
|
||||
@@ -1,8 +1,5 @@
|
||||
{
|
||||
"npmci": {
|
||||
"npmGlobalTools": []
|
||||
},
|
||||
"gitzone": {
|
||||
"@git.zone/cli": {
|
||||
"projectType": "npm",
|
||||
"module": {
|
||||
"githost": "code.foss.global",
|
||||
@@ -33,9 +30,19 @@
|
||||
"data management",
|
||||
"streaming"
|
||||
]
|
||||
},
|
||||
"release": {
|
||||
"registries": [
|
||||
"https://verdaccio.lossless.digital",
|
||||
"https://registry.npmjs.org"
|
||||
],
|
||||
"accessLevel": "public"
|
||||
}
|
||||
},
|
||||
"tsdoc": {
|
||||
"@git.zone/tsdoc": {
|
||||
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
||||
},
|
||||
"@ship.zone/szci": {
|
||||
"npmGlobalTools": []
|
||||
}
|
||||
}
|
||||
4
package-lock.json
generated
4
package-lock.json
generated
@@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@push.rocks/smartbucket",
|
||||
"version": "3.3.6",
|
||||
"version": "3.3.10",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@push.rocks/smartbucket",
|
||||
"version": "3.3.6",
|
||||
"version": "3.3.10",
|
||||
"license": "UNLICENSED",
|
||||
"dependencies": {
|
||||
"@push.rocks/smartpath": "^5.0.18",
|
||||
|
||||
34
package.json
34
package.json
@@ -1,33 +1,34 @@
|
||||
{
|
||||
"name": "@push.rocks/smartbucket",
|
||||
"version": "3.3.6",
|
||||
"version": "4.3.1",
|
||||
"description": "A TypeScript library providing a cloud-agnostic interface for managing object storage with functionalities like bucket management, file and directory operations, and advanced features such as metadata handling and file locking.",
|
||||
"main": "dist_ts/index.js",
|
||||
"typings": "dist_ts/index.d.ts",
|
||||
"type": "module",
|
||||
"author": "Lossless GmbH",
|
||||
"author": "Task Venture Capital GmbH",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"test": "(tstest test/)",
|
||||
"build": "(tsbuild --web --allowimplicitany)"
|
||||
"test": "(tstest test/ --verbose --logfile --timeout 120)",
|
||||
"build": "(tsbuild tsfolders --allowimplicitany)"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@git.zone/tsbuild": "^2.1.84",
|
||||
"@git.zone/tsrun": "^1.2.49",
|
||||
"@git.zone/tstest": "^1.0.90",
|
||||
"@push.rocks/qenv": "^6.1.0",
|
||||
"@push.rocks/tapbundle": "^5.5.3"
|
||||
"@git.zone/tsbuild": "^4.1.2",
|
||||
"@git.zone/tsrun": "^2.0.1",
|
||||
"@git.zone/tstest": "^3.1.6",
|
||||
"@push.rocks/qenv": "^6.1.3",
|
||||
"@push.rocks/tapbundle": "^6.0.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "^3.699.0",
|
||||
"@aws-sdk/client-s3": "^3.975.0",
|
||||
"@push.rocks/smartmime": "^2.0.4",
|
||||
"@push.rocks/smartpath": "^5.0.18",
|
||||
"@push.rocks/smartpromise": "^4.0.4",
|
||||
"@push.rocks/smartrx": "^3.0.7",
|
||||
"@push.rocks/smartpath": "^6.0.0",
|
||||
"@push.rocks/smartpromise": "^4.2.3",
|
||||
"@push.rocks/smartrx": "^3.0.10",
|
||||
"@push.rocks/smartstream": "^3.2.5",
|
||||
"@push.rocks/smartstring": "^4.0.15",
|
||||
"@push.rocks/smartstring": "^4.1.0",
|
||||
"@push.rocks/smartunique": "^3.0.9",
|
||||
"@tsclass/tsclass": "^4.1.2"
|
||||
"@tsclass/tsclass": "^9.3.0",
|
||||
"minimatch": "^10.1.1"
|
||||
},
|
||||
"private": false,
|
||||
"files": [
|
||||
@@ -71,5 +72,6 @@
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://code.foss.global/push.rocks/smartbucket.git"
|
||||
}
|
||||
},
|
||||
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748"
|
||||
}
|
||||
|
||||
8443
pnpm-lock.yaml
generated
8443
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
@@ -1,3 +1,5 @@
|
||||
* The project uses the official s3 client, not the minio client.
|
||||
* notice the difference between *Strict methods and the normal methods.
|
||||
* **All methods throw by default** (strict mode): - Put operations: `fastPut`, `fastPutStream` throw when file exists and overwrite is false - Get operations: `getBucketByName`, `getFile`, `getSubDirectoryByName` throw when not found
|
||||
* **Use exists() methods to check before getting**: `bucketExists`, `fileExists`, `directoryExists`, `fastExists`
|
||||
* **No *Strict methods**: All removed (fastPutStrict, getBucketByNameStrict, getFileStrict, getSubDirectoryByNameStrict)
|
||||
* metadata is handled though the MetaData class. Important!
|
||||
|
||||
298
test/test.listing.node+deno.ts
Normal file
298
test/test.listing.node+deno.ts
Normal file
@@ -0,0 +1,298 @@
|
||||
// test.listing.node+deno.ts - Tests for memory-efficient listing methods
|
||||
|
||||
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||
import * as smartbucket from '../ts/index.js';
|
||||
|
||||
// Get test configuration
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
const testQenv = new qenv.Qenv('./', './.nogit/');
|
||||
|
||||
// Test bucket reference
|
||||
let testBucket: smartbucket.Bucket;
|
||||
let testSmartbucket: smartbucket.SmartBucket;
|
||||
|
||||
// Setup: Create test bucket and populate with test data
|
||||
tap.test('should create valid smartbucket and bucket', async () => {
|
||||
testSmartbucket = new smartbucket.SmartBucket({
|
||||
accessKey: await testQenv.getEnvVarOnDemand('S3_ACCESSKEY'),
|
||||
accessSecret: await testQenv.getEnvVarOnDemand('S3_SECRETKEY'),
|
||||
endpoint: await testQenv.getEnvVarOnDemand('S3_ENDPOINT'),
|
||||
port: parseInt(await testQenv.getEnvVarOnDemand('S3_PORT')),
|
||||
useSsl: false,
|
||||
});
|
||||
|
||||
testBucket = await smartbucket.Bucket.getBucketByName(
|
||||
testSmartbucket,
|
||||
await testQenv.getEnvVarOnDemand('S3_BUCKET')
|
||||
);
|
||||
expect(testBucket).toBeInstanceOf(smartbucket.Bucket);
|
||||
});
|
||||
|
||||
tap.test('should clean bucket and create test data for listing tests', async () => {
|
||||
// Clean bucket first
|
||||
await testBucket.cleanAllContents();
|
||||
|
||||
// Create test structure:
|
||||
// npm/packages/foo/index.json
|
||||
// npm/packages/foo/1.0.0.tgz
|
||||
// npm/packages/bar/index.json
|
||||
// npm/packages/bar/2.0.0.tgz
|
||||
// oci/blobs/sha256-abc.tar
|
||||
// oci/blobs/sha256-def.tar
|
||||
// oci/manifests/latest.json
|
||||
// docs/readme.md
|
||||
// docs/api.md
|
||||
|
||||
const testFiles = [
|
||||
'npm/packages/foo/index.json',
|
||||
'npm/packages/foo/1.0.0.tgz',
|
||||
'npm/packages/bar/index.json',
|
||||
'npm/packages/bar/2.0.0.tgz',
|
||||
'oci/blobs/sha256-abc.tar',
|
||||
'oci/blobs/sha256-def.tar',
|
||||
'oci/manifests/latest.json',
|
||||
'docs/readme.md',
|
||||
'docs/api.md',
|
||||
];
|
||||
|
||||
for (const filePath of testFiles) {
|
||||
await testBucket.fastPut({
|
||||
path: filePath,
|
||||
contents: `test content for ${filePath}`,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// ==========================
|
||||
// Async Generator Tests
|
||||
// ==========================
|
||||
|
||||
tap.test('listAllObjects should iterate all objects with prefix', async () => {
|
||||
const keys: string[] = [];
|
||||
for await (const key of testBucket.listAllObjects('npm/')) {
|
||||
keys.push(key);
|
||||
}
|
||||
|
||||
expect(keys.length).toEqual(4);
|
||||
expect(keys).toContain('npm/packages/foo/index.json');
|
||||
expect(keys).toContain('npm/packages/bar/2.0.0.tgz');
|
||||
});
|
||||
|
||||
tap.test('listAllObjects should support early termination', async () => {
|
||||
let count = 0;
|
||||
for await (const key of testBucket.listAllObjects('')) {
|
||||
count++;
|
||||
if (count >= 3) break; // Early exit
|
||||
}
|
||||
|
||||
expect(count).toEqual(3);
|
||||
});
|
||||
|
||||
tap.test('listAllObjects without prefix should list all objects', async () => {
|
||||
const keys: string[] = [];
|
||||
for await (const key of testBucket.listAllObjects()) {
|
||||
keys.push(key);
|
||||
}
|
||||
|
||||
expect(keys.length).toBeGreaterThanOrEqual(9);
|
||||
});
|
||||
|
||||
// ==========================
|
||||
// Observable Tests
|
||||
// ==========================
|
||||
|
||||
tap.test('listAllObjectsObservable should emit all objects', async () => {
|
||||
const keys: string[] = [];
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
testBucket.listAllObjectsObservable('oci/')
|
||||
.subscribe({
|
||||
next: (key) => keys.push(key),
|
||||
error: (err) => reject(err),
|
||||
complete: () => resolve(),
|
||||
});
|
||||
});
|
||||
|
||||
expect(keys.length).toEqual(3);
|
||||
expect(keys).toContain('oci/blobs/sha256-abc.tar');
|
||||
expect(keys).toContain('oci/manifests/latest.json');
|
||||
});
|
||||
|
||||
tap.test('listAllObjectsObservable should support RxJS operators', async () => {
|
||||
const jsonFiles: string[] = [];
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
testBucket.listAllObjectsObservable('npm/')
|
||||
.subscribe({
|
||||
next: (key: string) => {
|
||||
if (key.endsWith('.json')) {
|
||||
jsonFiles.push(key);
|
||||
}
|
||||
},
|
||||
error: (err: any) => reject(err),
|
||||
complete: () => resolve(),
|
||||
});
|
||||
});
|
||||
|
||||
expect(jsonFiles.length).toEqual(2);
|
||||
expect(jsonFiles.every((k) => k.endsWith('.json'))).toBeTrue();
|
||||
});
|
||||
|
||||
// ==========================
|
||||
// Cursor Tests
|
||||
// ==========================
|
||||
|
||||
tap.test('createCursor should allow manual pagination', async () => {
|
||||
const cursor = testBucket.createCursor('npm/', { pageSize: 2 });
|
||||
|
||||
// First page
|
||||
const page1 = await cursor.next();
|
||||
expect(page1.keys.length).toEqual(2);
|
||||
expect(page1.done).toBeFalse();
|
||||
|
||||
// Second page
|
||||
const page2 = await cursor.next();
|
||||
expect(page2.keys.length).toEqual(2);
|
||||
expect(page2.done).toBeTrue();
|
||||
});
|
||||
|
||||
tap.test('cursor.hasMore() should accurately track state', async () => {
|
||||
const cursor = testBucket.createCursor('docs/', { pageSize: 10 });
|
||||
|
||||
expect(cursor.hasMore()).toBeTrue();
|
||||
|
||||
await cursor.next(); // Should get all docs files
|
||||
|
||||
expect(cursor.hasMore()).toBeFalse();
|
||||
});
|
||||
|
||||
tap.test('cursor.reset() should allow re-iteration', async () => {
|
||||
const cursor = testBucket.createCursor('docs/');
|
||||
|
||||
const firstRun = await cursor.next();
|
||||
expect(firstRun.keys.length).toBeGreaterThan(0);
|
||||
|
||||
cursor.reset();
|
||||
expect(cursor.hasMore()).toBeTrue();
|
||||
|
||||
const secondRun = await cursor.next();
|
||||
expect(secondRun.keys).toEqual(firstRun.keys);
|
||||
});
|
||||
|
||||
tap.test('cursor should support save/restore with token', async () => {
|
||||
const cursor1 = testBucket.createCursor('npm/', { pageSize: 2 });
|
||||
|
||||
await cursor1.next(); // Advance cursor
|
||||
const token = cursor1.getToken();
|
||||
expect(token).toBeDefined();
|
||||
|
||||
// Create new cursor and restore state
|
||||
const cursor2 = testBucket.createCursor('npm/', { pageSize: 2 });
|
||||
cursor2.setToken(token);
|
||||
|
||||
const page = await cursor2.next();
|
||||
expect(page.keys.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
// ==========================
|
||||
// findByGlob Tests
|
||||
// ==========================
|
||||
|
||||
tap.test('findByGlob should match simple patterns', async () => {
|
||||
const matches: string[] = [];
|
||||
for await (const key of testBucket.findByGlob('**/*.json')) {
|
||||
matches.push(key);
|
||||
}
|
||||
|
||||
expect(matches.length).toEqual(3); // foo/index.json, bar/index.json, latest.json
|
||||
expect(matches.every((k) => k.endsWith('.json'))).toBeTrue();
|
||||
});
|
||||
|
||||
tap.test('findByGlob should match specific path patterns', async () => {
|
||||
const matches: string[] = [];
|
||||
for await (const key of testBucket.findByGlob('npm/packages/*/index.json')) {
|
||||
matches.push(key);
|
||||
}
|
||||
|
||||
expect(matches.length).toEqual(2);
|
||||
expect(matches).toContain('npm/packages/foo/index.json');
|
||||
expect(matches).toContain('npm/packages/bar/index.json');
|
||||
});
|
||||
|
||||
tap.test('findByGlob should match wildcard patterns', async () => {
|
||||
const matches: string[] = [];
|
||||
for await (const key of testBucket.findByGlob('oci/blobs/*')) {
|
||||
matches.push(key);
|
||||
}
|
||||
|
||||
expect(matches.length).toEqual(2);
|
||||
expect(matches.every((k) => k.startsWith('oci/blobs/'))).toBeTrue();
|
||||
});
|
||||
|
||||
// ==========================
|
||||
// listAllObjectsArray Tests
|
||||
// ==========================
|
||||
|
||||
tap.test('listAllObjectsArray should collect all keys into array', async () => {
|
||||
const keys = await testBucket.listAllObjectsArray('docs/');
|
||||
|
||||
expect(Array.isArray(keys)).toBeTrue();
|
||||
expect(keys.length).toEqual(2);
|
||||
expect(keys).toContain('docs/readme.md');
|
||||
expect(keys).toContain('docs/api.md');
|
||||
});
|
||||
|
||||
tap.test('listAllObjectsArray without prefix should return all objects', async () => {
|
||||
const keys = await testBucket.listAllObjectsArray();
|
||||
|
||||
expect(keys.length).toBeGreaterThanOrEqual(9);
|
||||
});
|
||||
|
||||
// ==========================
|
||||
// Performance/Edge Case Tests
|
||||
// ==========================
|
||||
|
||||
tap.test('should handle empty prefix results gracefully', async () => {
|
||||
const keys: string[] = [];
|
||||
for await (const key of testBucket.listAllObjects('nonexistent/')) {
|
||||
keys.push(key);
|
||||
}
|
||||
|
||||
expect(keys.length).toEqual(0);
|
||||
});
|
||||
|
||||
tap.test('cursor should handle empty results', async () => {
|
||||
const cursor = testBucket.createCursor('nonexistent/');
|
||||
const result = await cursor.next();
|
||||
|
||||
expect(result.keys.length).toEqual(0);
|
||||
expect(result.done).toBeTrue();
|
||||
expect(cursor.hasMore()).toBeFalse();
|
||||
});
|
||||
|
||||
tap.test('observable should complete immediately on empty results', async () => {
|
||||
let completed = false;
|
||||
let count = 0;
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
testBucket.listAllObjectsObservable('nonexistent/')
|
||||
.subscribe({
|
||||
next: () => count++,
|
||||
error: (err) => reject(err),
|
||||
complete: () => {
|
||||
completed = true;
|
||||
resolve();
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
expect(count).toEqual(0);
|
||||
expect(completed).toBeTrue();
|
||||
});
|
||||
|
||||
// Cleanup
|
||||
tap.test('should clean up test data', async () => {
|
||||
await testBucket.cleanAllContents();
|
||||
});
|
||||
|
||||
export default tap.start();
|
||||
76
test/test.local.node+deno.ts
Normal file
76
test/test.local.node+deno.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||
|
||||
import * as plugins from '../ts/plugins.js';
|
||||
import * as smartbucket from '../ts/index.js';
|
||||
|
||||
class FakeS3Client {
|
||||
private callIndex = 0;
|
||||
|
||||
constructor(private readonly pages: Array<Partial<plugins.s3.ListObjectsV2Output>>) {}
|
||||
|
||||
public async send(_command: any) {
|
||||
const page = this.pages[this.callIndex] || { Contents: [], CommonPrefixes: [], IsTruncated: false };
|
||||
this.callIndex += 1;
|
||||
return page;
|
||||
}
|
||||
}
|
||||
|
||||
tap.test('MetaData.hasMetaData should return false when metadata file does not exist', async () => {
|
||||
const fakeFile = {
|
||||
name: 'file.txt',
|
||||
parentDirectoryRef: {
|
||||
async getFile() {
|
||||
throw new Error(`File not found at path 'file.txt.metadata'`);
|
||||
},
|
||||
},
|
||||
} as unknown as smartbucket.File;
|
||||
|
||||
const hasMetaData = await smartbucket.MetaData.hasMetaData({ file: fakeFile });
|
||||
expect(hasMetaData).toBeFalse();
|
||||
});
|
||||
|
||||
tap.test('getSubDirectoryByName should create correct parent chain for new nested directories', async () => {
|
||||
const fakeSmartbucket = { s3Client: new FakeS3Client([{ Contents: [], CommonPrefixes: [] }]) } as unknown as smartbucket.SmartBucket;
|
||||
const bucket = new smartbucket.Bucket(fakeSmartbucket, 'test-bucket');
|
||||
const baseDirectory = new smartbucket.Directory(bucket, null as any, '');
|
||||
|
||||
const nestedDirectory = await baseDirectory.getSubDirectoryByName('level1/level2', { getEmptyDirectory: true });
|
||||
|
||||
expect(nestedDirectory.name).toEqual('level2');
|
||||
expect(nestedDirectory.parentDirectoryRef.name).toEqual('level1');
|
||||
expect(nestedDirectory.getBasePath()).toEqual('level1/level2/');
|
||||
});
|
||||
|
||||
tap.test('listFiles should aggregate results across paginated ListObjectsV2 responses', async () => {
|
||||
const firstPage = {
|
||||
Contents: Array.from({ length: 1000 }, (_, index) => ({ Key: `file-${index}` })),
|
||||
IsTruncated: true,
|
||||
NextContinuationToken: 'token-1',
|
||||
};
|
||||
const secondPage = {
|
||||
Contents: Array.from({ length: 200 }, (_, index) => ({ Key: `file-${1000 + index}` })),
|
||||
IsTruncated: false,
|
||||
};
|
||||
const fakeSmartbucket = { s3Client: new FakeS3Client([firstPage, secondPage]) } as unknown as smartbucket.SmartBucket;
|
||||
const bucket = new smartbucket.Bucket(fakeSmartbucket, 'test-bucket');
|
||||
const baseDirectory = new smartbucket.Directory(bucket, null as any, '');
|
||||
|
||||
const files = await baseDirectory.listFiles();
|
||||
expect(files.length).toEqual(1200);
|
||||
});
|
||||
|
||||
tap.test('listDirectories should aggregate CommonPrefixes across pagination', async () => {
|
||||
const fakeSmartbucket = {
|
||||
s3Client: new FakeS3Client([
|
||||
{ CommonPrefixes: [{ Prefix: 'dirA/' }], IsTruncated: true, NextContinuationToken: 'token-1' },
|
||||
{ CommonPrefixes: [{ Prefix: 'dirB/' }], IsTruncated: false },
|
||||
]),
|
||||
} as unknown as smartbucket.SmartBucket;
|
||||
const bucket = new smartbucket.Bucket(fakeSmartbucket, 'test-bucket');
|
||||
const baseDirectory = new smartbucket.Directory(bucket, null as any, '');
|
||||
|
||||
const directories = await baseDirectory.listDirectories();
|
||||
expect(directories.map((d) => d.name)).toEqual(['dirA', 'dirB']);
|
||||
});
|
||||
|
||||
export default tap.start();
|
||||
7
test/test.metadata.node+deno.ts
Normal file
7
test/test.metadata.node+deno.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||
|
||||
tap.test('test metadata functionality', async () => {
|
||||
|
||||
})
|
||||
|
||||
export default tap.start();
|
||||
@@ -1,7 +0,0 @@
|
||||
import { tap, expect } from '@push.rocks/tapbundle';
|
||||
|
||||
tap.test('test metadata functionality', async () => {
|
||||
|
||||
})
|
||||
|
||||
tap.start();
|
||||
@@ -1,4 +1,4 @@
|
||||
import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
|
||||
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||
import { Qenv } from '@push.rocks/qenv';
|
||||
|
||||
import * as smartbucket from '../ts/index.js';
|
||||
@@ -12,13 +12,16 @@ let baseDirectory: smartbucket.Directory;
|
||||
tap.test('should create a valid smartbucket', async () => {
|
||||
testSmartbucket = new smartbucket.SmartBucket({
|
||||
accessKey: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSKEY'),
|
||||
accessSecret: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSSECRET'),
|
||||
accessSecret: await testQenv.getEnvVarOnDemandStrict('S3_SECRETKEY'),
|
||||
endpoint: await testQenv.getEnvVarOnDemandStrict('S3_ENDPOINT'),
|
||||
port: parseInt(await testQenv.getEnvVarOnDemandStrict('S3_PORT')),
|
||||
useSsl: false,
|
||||
});
|
||||
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
|
||||
myBucket = await testSmartbucket.getBucketByNameStrict(await testQenv.getEnvVarOnDemandStrict('S3_BUCKET'),);
|
||||
const bucketName = await testQenv.getEnvVarOnDemandStrict('S3_BUCKET');
|
||||
myBucket = await testSmartbucket.getBucketByName(bucketName);
|
||||
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
|
||||
expect(myBucket.name).toEqual('test-pushrocks-smartbucket');
|
||||
expect(myBucket.name).toEqual(bucketName);
|
||||
});
|
||||
|
||||
tap.test('should clean all contents', async () => {
|
||||
@@ -126,4 +129,4 @@ tap.test('clean up directory style tests', async () => {
|
||||
await myBucket.fastRemove({ path: 'file1.txt' });
|
||||
});
|
||||
|
||||
tap.start();
|
||||
export default tap.start();
|
||||
@@ -1,4 +1,4 @@
|
||||
import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
|
||||
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||
import { jestExpect } from '@push.rocks/tapbundle/node';
|
||||
import { Qenv } from '@push.rocks/qenv';
|
||||
|
||||
@@ -13,13 +13,15 @@ let baseDirectory: smartbucket.Directory;
|
||||
tap.test('should create a valid smartbucket', async () => {
|
||||
testSmartbucket = new smartbucket.SmartBucket({
|
||||
accessKey: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSKEY'),
|
||||
accessSecret: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSSECRET'),
|
||||
accessSecret: await testQenv.getEnvVarOnDemandStrict('S3_SECRETKEY'),
|
||||
endpoint: await testQenv.getEnvVarOnDemandStrict('S3_ENDPOINT'),
|
||||
port: parseInt(await testQenv.getEnvVarOnDemandStrict('S3_PORT')),
|
||||
useSsl: false,
|
||||
});
|
||||
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
|
||||
myBucket = await testSmartbucket.getBucketByNameStrict(await testQenv.getEnvVarOnDemandStrict('S3_BUCKET'),);
|
||||
const bucketName = await testQenv.getEnvVarOnDemandStrict('S3_BUCKET');
|
||||
myBucket = await testSmartbucket.getBucketByName(bucketName);
|
||||
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
|
||||
expect(myBucket.name).toEqual('test-pushrocks-smartbucket');
|
||||
});
|
||||
|
||||
tap.test('should clean all contents', async () => {
|
||||
@@ -30,7 +32,7 @@ tap.test('should clean all contents', async () => {
|
||||
|
||||
tap.test('should delete a file into the normally', async () => {
|
||||
const path = 'trashtest/trashme.txt';
|
||||
const file = await myBucket.fastPutStrict({
|
||||
const file = await myBucket.fastPut({
|
||||
path,
|
||||
contents: 'I\'m in the trash test content!',
|
||||
});
|
||||
@@ -44,7 +46,7 @@ tap.test('should delete a file into the normally', async () => {
|
||||
|
||||
tap.test('should put a file into the trash', async () => {
|
||||
const path = 'trashtest/trashme.txt';
|
||||
const file = await myBucket.fastPutStrict({
|
||||
const file = await myBucket.fastPut({
|
||||
path,
|
||||
contents: 'I\'m in the trash test content!',
|
||||
});
|
||||
@@ -76,7 +78,7 @@ tap.test('should put a file into the trash', async () => {
|
||||
|
||||
tap.test('should restore a file from trash', async () => {
|
||||
const baseDirectory = await myBucket.getBaseDirectory();
|
||||
const file = await baseDirectory.getFileStrict({
|
||||
const file = await baseDirectory.getFile({
|
||||
path: 'trashtest/trashme.txt',
|
||||
getFromTrash: true
|
||||
});
|
||||
@@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartbucket',
|
||||
version: '3.3.6',
|
||||
version: '4.3.1',
|
||||
description: 'A TypeScript library providing a cloud-agnostic interface for managing object storage with functionalities like bucket management, file and directory operations, and advanced features such as metadata handling and file locking.'
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import { SmartBucket } from './classes.smartbucket.js';
|
||||
import { Directory } from './classes.directory.js';
|
||||
import { File } from './classes.file.js';
|
||||
import { Trash } from './classes.trash.js';
|
||||
import { ListCursor, type IListCursorOptions } from './classes.listcursor.js';
|
||||
|
||||
/**
|
||||
* The bucket class exposes the basic functionality of a bucket.
|
||||
@@ -14,7 +15,7 @@ import { Trash } from './classes.trash.js';
|
||||
* operate in S3 basic fashion on blobs of data.
|
||||
*/
|
||||
export class Bucket {
|
||||
public static async getBucketByName(smartbucketRef: SmartBucket, bucketNameArg: string) {
|
||||
public static async getBucketByName(smartbucketRef: SmartBucket, bucketNameArg: string): Promise<Bucket> {
|
||||
const command = new plugins.s3.ListBucketsCommand({});
|
||||
const buckets = await smartbucketRef.s3Client.send(command);
|
||||
const foundBucket = buckets.Buckets!.find((bucket) => bucket.Name === bucketNameArg);
|
||||
@@ -24,20 +25,19 @@ export class Bucket {
|
||||
console.log(`Taking this as base for new Bucket instance`);
|
||||
return new this(smartbucketRef, bucketNameArg);
|
||||
} else {
|
||||
console.log(`did not find bucket by name: ${bucketNameArg}`);
|
||||
return null;
|
||||
throw new Error(`Bucket '${bucketNameArg}' not found.`);
|
||||
}
|
||||
}
|
||||
|
||||
public static async createBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
|
||||
const command = new plugins.s3.CreateBucketCommand({ Bucket: bucketName });
|
||||
await smartbucketRef.s3Client.send(command).catch((e) => console.log(e));
|
||||
await smartbucketRef.s3Client.send(command);
|
||||
return new Bucket(smartbucketRef, bucketName);
|
||||
}
|
||||
|
||||
public static async removeBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
|
||||
const command = new plugins.s3.DeleteBucketCommand({ Bucket: bucketName });
|
||||
await smartbucketRef.s3Client.send(command).catch((e) => console.log(e));
|
||||
await smartbucketRef.s3Client.send(command);
|
||||
}
|
||||
|
||||
public smartbucketRef: SmartBucket;
|
||||
@@ -71,7 +71,7 @@ export class Bucket {
|
||||
}
|
||||
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
|
||||
const baseDirectory = await this.getBaseDirectory();
|
||||
return await baseDirectory.getSubDirectoryByNameStrict(checkPath, {
|
||||
return await baseDirectory.getSubDirectoryByName(checkPath, {
|
||||
getEmptyDirectory: true,
|
||||
});
|
||||
}
|
||||
@@ -88,15 +88,16 @@ export class Bucket {
|
||||
contents: string | Buffer;
|
||||
overwrite?: boolean;
|
||||
}
|
||||
): Promise<File | null> {
|
||||
): Promise<File> {
|
||||
try {
|
||||
const reducedPath = await helpers.reducePathDescriptorToPath(optionsArg);
|
||||
const exists = await this.fastExists({ path: reducedPath });
|
||||
|
||||
if (exists && !optionsArg.overwrite) {
|
||||
const errorText = `Object already exists at path '${reducedPath}' in bucket '${this.name}'.`;
|
||||
console.error(errorText);
|
||||
return null;
|
||||
throw new Error(
|
||||
`Object already exists at path '${reducedPath}' in bucket '${this.name}'. ` +
|
||||
`Set overwrite:true to replace it.`
|
||||
);
|
||||
} else if (exists && optionsArg.overwrite) {
|
||||
console.log(
|
||||
`Overwriting existing object at path '${reducedPath}' in bucket '${this.name}'.`
|
||||
@@ -129,13 +130,6 @@ export class Bucket {
|
||||
}
|
||||
}
|
||||
|
||||
public async fastPutStrict(...args: Parameters<Bucket['fastPut']>) {
|
||||
const file = await this.fastPut(...args);
|
||||
if (!file) {
|
||||
throw new Error(`File not stored at path '${args[0].path}'`);
|
||||
}
|
||||
return file;
|
||||
}
|
||||
|
||||
/**
|
||||
* get file
|
||||
@@ -259,10 +253,10 @@ export class Bucket {
|
||||
const exists = await this.fastExists({ path: optionsArg.path });
|
||||
|
||||
if (exists && !optionsArg.overwrite) {
|
||||
console.error(
|
||||
`Object already exists at path '${optionsArg.path}' in bucket '${this.name}'.`
|
||||
throw new Error(
|
||||
`Object already exists at path '${optionsArg.path}' in bucket '${this.name}'. ` +
|
||||
`Set overwrite:true to replace it.`
|
||||
);
|
||||
return;
|
||||
} else if (exists && optionsArg.overwrite) {
|
||||
console.log(
|
||||
`Overwriting existing object at path '${optionsArg.path}' in bucket '${this.name}'.`
|
||||
@@ -460,7 +454,7 @@ export class Bucket {
|
||||
Range: `bytes=0-${optionsArg.length - 1}`,
|
||||
});
|
||||
const response = await this.smartbucketRef.s3Client.send(command);
|
||||
const chunks = [];
|
||||
const chunks: Buffer[] = [];
|
||||
const stream = response.Body as any; // SdkStreamMixin includes readable stream
|
||||
|
||||
for await (const chunk of stream) {
|
||||
@@ -476,6 +470,145 @@ export class Bucket {
|
||||
}
|
||||
}
|
||||
|
||||
// ==========================================
|
||||
// Memory-Efficient Listing Methods (Phase 1)
|
||||
// ==========================================
|
||||
|
||||
/**
|
||||
* List all objects with a given prefix using async generator (memory-efficient streaming)
|
||||
* @param prefix - Optional prefix to filter objects (default: '' for all objects)
|
||||
* @yields Object keys one at a time
|
||||
* @example
|
||||
* ```ts
|
||||
* for await (const key of bucket.listAllObjects('npm/')) {
|
||||
* console.log(key);
|
||||
* if (shouldStop) break; // Early exit supported
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
public async *listAllObjects(prefix: string = ''): AsyncIterableIterator<string> {
|
||||
let continuationToken: string | undefined;
|
||||
|
||||
do {
|
||||
const command = new plugins.s3.ListObjectsV2Command({
|
||||
Bucket: this.name,
|
||||
Prefix: prefix,
|
||||
ContinuationToken: continuationToken,
|
||||
});
|
||||
|
||||
const response = await this.smartbucketRef.s3Client.send(command);
|
||||
|
||||
for (const obj of response.Contents || []) {
|
||||
if (obj.Key) yield obj.Key;
|
||||
}
|
||||
|
||||
continuationToken = response.NextContinuationToken;
|
||||
} while (continuationToken);
|
||||
}
|
||||
|
||||
/**
|
||||
* List all objects as an RxJS Observable (for complex reactive pipelines)
|
||||
* @param prefix - Optional prefix to filter objects (default: '' for all objects)
|
||||
* @returns Observable that emits object keys
|
||||
* @example
|
||||
* ```ts
|
||||
* bucket.listAllObjectsObservable('npm/')
|
||||
* .pipe(
|
||||
* filter(key => key.endsWith('.json')),
|
||||
* take(100)
|
||||
* )
|
||||
* .subscribe(key => console.log(key));
|
||||
* ```
|
||||
*/
|
||||
public listAllObjectsObservable(prefix: string = ''): plugins.smartrx.rxjs.Observable<string> {
|
||||
return new plugins.smartrx.rxjs.Observable<string>((subscriber) => {
|
||||
const fetchPage = async (token?: string) => {
|
||||
try {
|
||||
const command = new plugins.s3.ListObjectsV2Command({
|
||||
Bucket: this.name,
|
||||
Prefix: prefix,
|
||||
ContinuationToken: token,
|
||||
});
|
||||
|
||||
const response = await this.smartbucketRef.s3Client.send(command);
|
||||
|
||||
for (const obj of response.Contents || []) {
|
||||
if (obj.Key) subscriber.next(obj.Key);
|
||||
}
|
||||
|
||||
if (response.NextContinuationToken) {
|
||||
await fetchPage(response.NextContinuationToken);
|
||||
} else {
|
||||
subscriber.complete();
|
||||
}
|
||||
} catch (error) {
|
||||
subscriber.error(error);
|
||||
}
|
||||
};
|
||||
|
||||
fetchPage();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a cursor for manual pagination control
|
||||
* @param prefix - Optional prefix to filter objects (default: '' for all objects)
|
||||
* @param options - Cursor options (pageSize, etc.)
|
||||
* @returns ListCursor instance
|
||||
* @example
|
||||
* ```ts
|
||||
* const cursor = bucket.createCursor('npm/', { pageSize: 500 });
|
||||
* while (cursor.hasMore()) {
|
||||
* const { keys, done } = await cursor.next();
|
||||
* console.log(`Processing ${keys.length} keys...`);
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
public createCursor(prefix: string = '', options?: IListCursorOptions): ListCursor {
|
||||
return new ListCursor(this, prefix, options);
|
||||
}
|
||||
|
||||
// ==========================================
|
||||
// High-Level Listing Helpers (Phase 2)
|
||||
// ==========================================
|
||||
|
||||
/**
|
||||
* Find objects matching a glob pattern (memory-efficient)
|
||||
* @param pattern - Glob pattern (e.g., "**\/*.json", "npm/packages/*\/index.json")
|
||||
* @yields Matching object keys
|
||||
* @example
|
||||
* ```ts
|
||||
* for await (const key of bucket.findByGlob('npm/packages/*\/index.json')) {
|
||||
* console.log('Found package index:', key);
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
public async *findByGlob(pattern: string): AsyncIterableIterator<string> {
|
||||
const matcher = new plugins.Minimatch(pattern);
|
||||
for await (const key of this.listAllObjects('')) {
|
||||
if (matcher.match(key)) yield key;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all objects and collect into an array (convenience method)
|
||||
* WARNING: Loads entire result set into memory. Use listAllObjects() generator for large buckets.
|
||||
* @param prefix - Optional prefix to filter objects (default: '' for all objects)
|
||||
* @returns Array of all object keys
|
||||
* @example
|
||||
* ```ts
|
||||
* const allKeys = await bucket.listAllObjectsArray('npm/');
|
||||
* console.log(`Found ${allKeys.length} objects`);
|
||||
* ```
|
||||
*/
|
||||
public async listAllObjectsArray(prefix: string = ''): Promise<string[]> {
|
||||
const keys: string[] = [];
|
||||
for await (const key of this.listAllObjects(prefix)) {
|
||||
keys.push(key);
|
||||
}
|
||||
return keys;
|
||||
}
|
||||
|
||||
public async cleanAllContents(): Promise<void> {
|
||||
try {
|
||||
// Define the command type explicitly
|
||||
|
||||
@@ -69,7 +69,7 @@ export class Directory {
|
||||
path: string;
|
||||
createWithContents?: string | Buffer;
|
||||
getFromTrash?: boolean;
|
||||
}): Promise<File | null> {
|
||||
}): Promise<File> {
|
||||
const pathDescriptor = {
|
||||
directory: this,
|
||||
path: optionsArg.path,
|
||||
@@ -83,7 +83,7 @@ export class Directory {
|
||||
return trashedFile;
|
||||
}
|
||||
if (!exists && !optionsArg.createWithContents) {
|
||||
return null;
|
||||
throw new Error(`File not found at path '${optionsArg.path}'`);
|
||||
}
|
||||
if (!exists && optionsArg.createWithContents) {
|
||||
await File.create({
|
||||
@@ -98,32 +98,66 @@ export class Directory {
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* gets a file strictly
|
||||
* @param args
|
||||
* @returns
|
||||
* Check if a file exists in this directory
|
||||
*/
|
||||
public async getFileStrict(...args: Parameters<Directory['getFile']>) {
|
||||
const file = await this.getFile(...args);
|
||||
if (!file) {
|
||||
throw new Error(`File not found at path '${args[0].path}'`);
|
||||
}
|
||||
return file;
|
||||
public async fileExists(optionsArg: { path: string }): Promise<boolean> {
|
||||
const pathDescriptor = {
|
||||
directory: this,
|
||||
path: optionsArg.path,
|
||||
};
|
||||
return this.bucketRef.fastExists({
|
||||
path: await helpers.reducePathDescriptorToPath(pathDescriptor),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a subdirectory exists
|
||||
*/
|
||||
public async directoryExists(dirNameArg: string): Promise<boolean> {
|
||||
const directories = await this.listDirectories();
|
||||
return directories.some(dir => dir.name === dirNameArg);
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects all ListObjectsV2 pages for a prefix.
|
||||
*/
|
||||
private async listObjectsV2AllPages(prefix: string, delimiter?: string) {
|
||||
const allContents: plugins.s3._Object[] = [];
|
||||
const allCommonPrefixes: plugins.s3.CommonPrefix[] = [];
|
||||
let continuationToken: string | undefined;
|
||||
|
||||
do {
|
||||
const command = new plugins.s3.ListObjectsV2Command({
|
||||
Bucket: this.bucketRef.name,
|
||||
Prefix: prefix,
|
||||
Delimiter: delimiter,
|
||||
ContinuationToken: continuationToken,
|
||||
});
|
||||
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
|
||||
|
||||
if (response.Contents) {
|
||||
allContents.push(...response.Contents);
|
||||
}
|
||||
if (response.CommonPrefixes) {
|
||||
allCommonPrefixes.push(...response.CommonPrefixes);
|
||||
}
|
||||
|
||||
continuationToken = response.IsTruncated ? response.NextContinuationToken : undefined;
|
||||
} while (continuationToken);
|
||||
|
||||
return { contents: allContents, commonPrefixes: allCommonPrefixes };
|
||||
}
|
||||
|
||||
/**
|
||||
* lists all files
|
||||
*/
|
||||
public async listFiles(): Promise<File[]> {
|
||||
const command = new plugins.s3.ListObjectsV2Command({
|
||||
Bucket: this.bucketRef.name,
|
||||
Prefix: this.getBasePath(),
|
||||
Delimiter: '/',
|
||||
});
|
||||
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
|
||||
const { contents } = await this.listObjectsV2AllPages(this.getBasePath(), '/');
|
||||
const fileArray: File[] = [];
|
||||
|
||||
response.Contents?.forEach((item) => {
|
||||
contents.forEach((item) => {
|
||||
if (item.Key && !item.Key.endsWith('/')) {
|
||||
const subtractedPath = item.Key.replace(this.getBasePath(), '');
|
||||
if (!subtractedPath.includes('/')) {
|
||||
@@ -145,16 +179,11 @@ export class Directory {
|
||||
*/
|
||||
public async listDirectories(): Promise<Directory[]> {
|
||||
try {
|
||||
const command = new plugins.s3.ListObjectsV2Command({
|
||||
Bucket: this.bucketRef.name,
|
||||
Prefix: this.getBasePath(),
|
||||
Delimiter: '/',
|
||||
});
|
||||
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
|
||||
const { commonPrefixes } = await this.listObjectsV2AllPages(this.getBasePath(), '/');
|
||||
const directoryArray: Directory[] = [];
|
||||
|
||||
if (response.CommonPrefixes) {
|
||||
response.CommonPrefixes.forEach((item) => {
|
||||
if (commonPrefixes) {
|
||||
commonPrefixes.forEach((item) => {
|
||||
if (item.Prefix) {
|
||||
const subtractedPath = item.Prefix.replace(this.getBasePath(), '');
|
||||
if (subtractedPath.endsWith('/')) {
|
||||
@@ -206,7 +235,7 @@ export class Directory {
|
||||
* if the path is a file path, it will be treated as a file and the parent directory will be returned
|
||||
*/
|
||||
couldBeFilePath?: boolean;
|
||||
} = {}): Promise<Directory | null> {
|
||||
} = {}): Promise<Directory> {
|
||||
|
||||
const dirNameArray = dirNameArg.split('/').filter(str => str.trim() !== "");
|
||||
|
||||
@@ -226,7 +255,7 @@ export class Directory {
|
||||
return returnDirectory;
|
||||
}
|
||||
if (optionsArg.getEmptyDirectory || optionsArg.createWithInitializerFile) {
|
||||
returnDirectory = new Directory(this.bucketRef, this, dirNameToSearch);
|
||||
returnDirectory = new Directory(this.bucketRef, directoryArg, dirNameToSearch);
|
||||
}
|
||||
if (isFinalDirectory && optionsArg.createWithInitializerFile) {
|
||||
returnDirectory?.createEmptyFile('00init.txt');
|
||||
@@ -253,16 +282,12 @@ export class Directory {
|
||||
wantedDirectory = await getDirectory(directoryToSearchIn, dirNameToSearch, counter === dirNameArray.length);
|
||||
}
|
||||
|
||||
return wantedDirectory || null;
|
||||
if (!wantedDirectory) {
|
||||
throw new Error(`Directory not found at path '${dirNameArg}'`);
|
||||
}
|
||||
return wantedDirectory;
|
||||
}
|
||||
|
||||
public async getSubDirectoryByNameStrict(...args: Parameters<Directory['getSubDirectoryByName']>) {
|
||||
const directory = await this.getSubDirectoryByName(...args);
|
||||
if (!directory) {
|
||||
throw new Error(`Directory not found at path '${args[0]}'`);
|
||||
}
|
||||
return directory;
|
||||
}
|
||||
|
||||
/**
|
||||
* moves the directory
|
||||
@@ -360,7 +385,7 @@ export class Directory {
|
||||
*/
|
||||
mode?: 'permanent' | 'trash';
|
||||
}) {
|
||||
const file = await this.getFileStrict({
|
||||
const file = await this.getFile({
|
||||
path: optionsArg.path,
|
||||
});
|
||||
await file.delete({
|
||||
|
||||
@@ -245,7 +245,7 @@ export class File {
|
||||
|
||||
// lets update references of this
|
||||
const baseDirectory = await this.parentDirectoryRef.bucketRef.getBaseDirectory();
|
||||
this.parentDirectoryRef = await baseDirectory.getSubDirectoryByNameStrict(
|
||||
this.parentDirectoryRef = await baseDirectory.getSubDirectoryByName(
|
||||
await helpers.reducePathDescriptorToPath(pathDescriptorArg),
|
||||
{
|
||||
couldBeFilePath: true,
|
||||
|
||||
89
ts/classes.listcursor.ts
Normal file
89
ts/classes.listcursor.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
// classes.listcursor.ts
|
||||
|
||||
import * as plugins from './plugins.js';
|
||||
import type { Bucket } from './classes.bucket.js';
|
||||
|
||||
export interface IListCursorOptions {
|
||||
pageSize?: number;
|
||||
}
|
||||
|
||||
export interface IListCursorResult {
|
||||
keys: string[];
|
||||
done: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* ListCursor provides explicit pagination control for listing objects in a bucket.
|
||||
* Useful for UI pagination, resumable operations, and manual batch processing.
|
||||
*/
|
||||
export class ListCursor {
|
||||
private continuationToken?: string;
|
||||
private exhausted = false;
|
||||
private pageSize: number;
|
||||
|
||||
constructor(
|
||||
private bucket: Bucket,
|
||||
private prefix: string,
|
||||
options: IListCursorOptions = {}
|
||||
) {
|
||||
this.pageSize = options.pageSize || 1000;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch the next page of object keys
|
||||
* @returns Object with keys array and done flag
|
||||
*/
|
||||
public async next(): Promise<IListCursorResult> {
|
||||
if (this.exhausted) {
|
||||
return { keys: [], done: true };
|
||||
}
|
||||
|
||||
const command = new plugins.s3.ListObjectsV2Command({
|
||||
Bucket: this.bucket.name,
|
||||
Prefix: this.prefix,
|
||||
MaxKeys: this.pageSize,
|
||||
ContinuationToken: this.continuationToken,
|
||||
});
|
||||
|
||||
const response = await this.bucket.smartbucketRef.s3Client.send(command);
|
||||
|
||||
const keys = (response.Contents || [])
|
||||
.map((obj) => obj.Key)
|
||||
.filter((key): key is string => !!key);
|
||||
|
||||
this.continuationToken = response.NextContinuationToken;
|
||||
this.exhausted = !this.continuationToken;
|
||||
|
||||
return { keys, done: this.exhausted };
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if there are more pages to fetch
|
||||
*/
|
||||
public hasMore(): boolean {
|
||||
return !this.exhausted;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset the cursor to start from the beginning
|
||||
*/
|
||||
public reset(): void {
|
||||
this.continuationToken = undefined;
|
||||
this.exhausted = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current continuation token (for saving/restoring state)
|
||||
*/
|
||||
public getToken(): string | undefined {
|
||||
return this.continuationToken;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the continuation token (for resuming from a saved state)
|
||||
*/
|
||||
public setToken(token: string | undefined): void {
|
||||
this.continuationToken = token;
|
||||
this.exhausted = !token;
|
||||
}
|
||||
}
|
||||
@@ -4,11 +4,23 @@ import { File } from './classes.file.js';
|
||||
|
||||
export class MetaData {
|
||||
public static async hasMetaData(optionsArg: { file: File }) {
|
||||
// lets find the existing metadata file
|
||||
const existingFile = await optionsArg.file.parentDirectoryRef.getFile({
|
||||
path: optionsArg.file.name + '.metadata',
|
||||
});
|
||||
return !!existingFile;
|
||||
// try finding the existing metadata file; return false if it doesn't exist
|
||||
try {
|
||||
const existingFile = await optionsArg.file.parentDirectoryRef.getFile({
|
||||
path: optionsArg.file.name + '.metadata',
|
||||
});
|
||||
return !!existingFile;
|
||||
} catch (error: any) {
|
||||
const message = error?.message || '';
|
||||
const isNotFound =
|
||||
message.includes('File not found') ||
|
||||
error?.name === 'NotFound' ||
|
||||
error?.$metadata?.httpStatusCode === 404;
|
||||
if (isNotFound) {
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// static
|
||||
@@ -17,7 +29,7 @@ export class MetaData {
|
||||
metaData.fileRef = optionsArg.file;
|
||||
|
||||
// lets find the existing metadata file
|
||||
metaData.metadataFile = await metaData.fileRef.parentDirectoryRef.getFileStrict({
|
||||
metaData.metadataFile = await metaData.fileRef.parentDirectoryRef.getFile({
|
||||
path: metaData.fileRef.name + '.metadata',
|
||||
createWithContents: '{}',
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
import * as plugins from './plugins.js';
|
||||
import { Bucket } from './classes.bucket.js';
|
||||
import { normalizeS3Descriptor } from './helpers.js';
|
||||
|
||||
export class SmartBucket {
|
||||
public config: plugins.tsclass.storage.IS3Descriptor;
|
||||
@@ -17,18 +18,14 @@ export class SmartBucket {
|
||||
constructor(configArg: plugins.tsclass.storage.IS3Descriptor) {
|
||||
this.config = configArg;
|
||||
|
||||
const protocol = configArg.useSsl === false ? 'http' : 'https';
|
||||
const port = configArg.port ? `:${configArg.port}` : '';
|
||||
const endpoint = `${protocol}://${configArg.endpoint}${port}`;
|
||||
// Use the normalizer to handle various endpoint formats
|
||||
const { normalized } = normalizeS3Descriptor(configArg);
|
||||
|
||||
this.s3Client = new plugins.s3.S3Client({
|
||||
endpoint,
|
||||
region: configArg.region || 'us-east-1',
|
||||
credentials: {
|
||||
accessKeyId: configArg.accessKey,
|
||||
secretAccessKey: configArg.accessSecret,
|
||||
},
|
||||
forcePathStyle: true, // Necessary for S3-compatible storage like MinIO or Wasabi
|
||||
endpoint: normalized.endpointUrl,
|
||||
region: normalized.region,
|
||||
credentials: normalized.credentials,
|
||||
forcePathStyle: normalized.forcePathStyle, // Necessary for S3-compatible storage like MinIO or Wasabi
|
||||
});
|
||||
}
|
||||
|
||||
@@ -45,11 +42,12 @@ export class SmartBucket {
|
||||
return Bucket.getBucketByName(this, bucketNameArg);
|
||||
}
|
||||
|
||||
public async getBucketByNameStrict(...args: Parameters<SmartBucket['getBucketByName']>) {
|
||||
const bucket = await this.getBucketByName(...args);
|
||||
if (!bucket) {
|
||||
throw new Error(`Bucket ${args[0]} does not exist.`);
|
||||
}
|
||||
return bucket;
|
||||
/**
|
||||
* Check if a bucket exists
|
||||
*/
|
||||
public async bucketExists(bucketNameArg: string): Promise<boolean> {
|
||||
const command = new plugins.s3.ListBucketsCommand({});
|
||||
const buckets = await this.s3Client.send(command);
|
||||
return buckets.Buckets?.some(bucket => bucket.Name === bucketNameArg) ?? false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@ export class Trash {
|
||||
const trashDir = await this.getTrashDir();
|
||||
const originalPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||
const trashKey = await this.getTrashKeyByOriginalBasePath(originalPath);
|
||||
return trashDir.getFileStrict({ path: trashKey });
|
||||
return trashDir.getFile({ path: trashKey });
|
||||
}
|
||||
|
||||
public async getTrashKeyByOriginalBasePath (originalPath: string): Promise<string> {
|
||||
|
||||
232
ts/helpers.ts
232
ts/helpers.ts
@@ -19,4 +19,236 @@ export const reducePathDescriptorToPath = async (pathDescriptorArg: interfaces.I
|
||||
returnPath = returnPath.substring(1);
|
||||
}
|
||||
return returnPath;
|
||||
}
|
||||
|
||||
// S3 Descriptor Normalization
|
||||
export interface IS3Warning {
|
||||
code: string;
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface INormalizedS3Config {
|
||||
endpointUrl: string;
|
||||
host: string;
|
||||
protocol: 'http' | 'https';
|
||||
port?: number;
|
||||
region: string;
|
||||
credentials: {
|
||||
accessKeyId: string;
|
||||
secretAccessKey: string;
|
||||
};
|
||||
forcePathStyle: boolean;
|
||||
}
|
||||
|
||||
function coerceBooleanMaybe(value: unknown): { value: boolean | undefined; warning?: IS3Warning } {
|
||||
if (typeof value === 'boolean') return { value };
|
||||
if (typeof value === 'string') {
|
||||
const v = value.trim().toLowerCase();
|
||||
if (v === 'true' || v === '1') {
|
||||
return {
|
||||
value: true,
|
||||
warning: {
|
||||
code: 'SBK_S3_COERCED_USESSL',
|
||||
message: `Coerced useSsl='${value}' (string) to boolean true.`
|
||||
}
|
||||
};
|
||||
}
|
||||
if (v === 'false' || v === '0') {
|
||||
return {
|
||||
value: false,
|
||||
warning: {
|
||||
code: 'SBK_S3_COERCED_USESSL',
|
||||
message: `Coerced useSsl='${value}' (string) to boolean false.`
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
return { value: undefined };
|
||||
}
|
||||
|
||||
function coercePortMaybe(port: unknown): { value: number | undefined; warning?: IS3Warning } {
|
||||
if (port === undefined || port === null || port === '') return { value: undefined };
|
||||
const n = typeof port === 'number' ? port : Number(String(port).trim());
|
||||
if (!Number.isFinite(n) || !Number.isInteger(n) || n <= 0 || n > 65535) {
|
||||
return {
|
||||
value: undefined,
|
||||
warning: {
|
||||
code: 'SBK_S3_INVALID_PORT',
|
||||
message: `Invalid port '${String(port)}' - expected integer in [1..65535].`
|
||||
}
|
||||
};
|
||||
}
|
||||
return { value: n };
|
||||
}
|
||||
|
||||
function sanitizeEndpointString(raw: unknown): { value: string; warnings: IS3Warning[] } {
|
||||
const warnings: IS3Warning[] = [];
|
||||
let s = String(raw ?? '').trim();
|
||||
if (s !== String(raw ?? '')) {
|
||||
warnings.push({
|
||||
code: 'SBK_S3_TRIMMED_ENDPOINT',
|
||||
message: 'Trimmed surrounding whitespace from endpoint.'
|
||||
});
|
||||
}
|
||||
return { value: s, warnings };
|
||||
}
|
||||
|
||||
function parseEndpointHostPort(
|
||||
endpoint: string,
|
||||
provisionalProtocol: 'http' | 'https'
|
||||
): {
|
||||
hadScheme: boolean;
|
||||
host: string;
|
||||
port?: number;
|
||||
extras: {
|
||||
droppedPath?: boolean;
|
||||
droppedQuery?: boolean;
|
||||
droppedCreds?: boolean
|
||||
}
|
||||
} {
|
||||
let url: URL | undefined;
|
||||
const extras: { droppedPath?: boolean; droppedQuery?: boolean; droppedCreds?: boolean } = {};
|
||||
|
||||
// Check if endpoint already has a scheme
|
||||
const hasScheme = /^https?:\/\//i.test(endpoint);
|
||||
|
||||
// Try parsing as full URL first
|
||||
try {
|
||||
if (hasScheme) {
|
||||
url = new URL(endpoint);
|
||||
} else {
|
||||
// Not a full URL; try host[:port] by attaching provisional scheme
|
||||
// Remove anything after first '/' for safety
|
||||
const cleanEndpoint = endpoint.replace(/\/.*/, '');
|
||||
url = new URL(`${provisionalProtocol}://${cleanEndpoint}`);
|
||||
}
|
||||
} catch (e) {
|
||||
throw new Error(`Unable to parse endpoint '${endpoint}'.`);
|
||||
}
|
||||
|
||||
// Check for dropped components
|
||||
if (url.username || url.password) extras.droppedCreds = true;
|
||||
if (url.pathname && url.pathname !== '/') extras.droppedPath = true;
|
||||
if (url.search) extras.droppedQuery = true;
|
||||
|
||||
const hadScheme = hasScheme;
|
||||
const host = url.hostname; // hostnames lowercased by URL; IPs preserved
|
||||
const port = url.port ? Number(url.port) : undefined;
|
||||
|
||||
return { hadScheme, host, port, extras };
|
||||
}
|
||||
|
||||
export function normalizeS3Descriptor(
|
||||
input: plugins.tsclass.storage.IS3Descriptor,
|
||||
logger?: { warn: (msg: string) => void }
|
||||
): { normalized: INormalizedS3Config; warnings: IS3Warning[] } {
|
||||
const warnings: IS3Warning[] = [];
|
||||
const logWarn = (w: IS3Warning) => {
|
||||
warnings.push(w);
|
||||
if (logger) {
|
||||
logger.warn(`[SmartBucket S3] ${w.code}: ${w.message}`);
|
||||
} else {
|
||||
console.warn(`[SmartBucket S3] ${w.code}: ${w.message}`);
|
||||
}
|
||||
};
|
||||
|
||||
// Coerce and sanitize inputs
|
||||
const { value: coercedUseSsl, warning: useSslWarn } = coerceBooleanMaybe((input as any).useSsl);
|
||||
if (useSslWarn) logWarn(useSslWarn);
|
||||
|
||||
const { value: coercedPort, warning: portWarn } = coercePortMaybe((input as any).port);
|
||||
if (portWarn) logWarn(portWarn);
|
||||
|
||||
const { value: endpointStr, warnings: endpointSanWarnings } = sanitizeEndpointString((input as any).endpoint);
|
||||
endpointSanWarnings.forEach(logWarn);
|
||||
|
||||
if (!endpointStr) {
|
||||
throw new Error('S3 endpoint is required (got empty string). Provide hostname or URL.');
|
||||
}
|
||||
|
||||
// Provisional protocol selection for parsing host:port forms
|
||||
const provisionalProtocol: 'http' | 'https' = coercedUseSsl === false ? 'http' : 'https';
|
||||
|
||||
const { hadScheme, host, port: epPort, extras } = parseEndpointHostPort(endpointStr, provisionalProtocol);
|
||||
|
||||
if (extras.droppedCreds) {
|
||||
logWarn({
|
||||
code: 'SBK_S3_DROPPED_CREDENTIALS',
|
||||
message: 'Ignored credentials in endpoint URL.'
|
||||
});
|
||||
}
|
||||
if (extras.droppedPath) {
|
||||
logWarn({
|
||||
code: 'SBK_S3_DROPPED_PATH',
|
||||
message: 'Removed path segment from endpoint URL; S3 endpoint should be host[:port] only.'
|
||||
});
|
||||
}
|
||||
if (extras.droppedQuery) {
|
||||
logWarn({
|
||||
code: 'SBK_S3_DROPPED_QUERY',
|
||||
message: 'Removed query string from endpoint URL; S3 endpoint should be host[:port] only.'
|
||||
});
|
||||
}
|
||||
|
||||
// Final protocol decision
|
||||
let finalProtocol: 'http' | 'https';
|
||||
if (hadScheme) {
|
||||
// Scheme from endpoint wins
|
||||
const schemeFromEndpoint = endpointStr.trim().toLowerCase().startsWith('http://') ? 'http' : 'https';
|
||||
finalProtocol = schemeFromEndpoint;
|
||||
if (typeof coercedUseSsl === 'boolean') {
|
||||
const expected = coercedUseSsl ? 'https' : 'http';
|
||||
if (expected !== finalProtocol) {
|
||||
logWarn({
|
||||
code: 'SBK_S3_SCHEME_CONFLICT',
|
||||
message: `useSsl=${String(coercedUseSsl)} conflicts with endpoint scheme '${finalProtocol}'; using endpoint scheme.`
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (typeof coercedUseSsl === 'boolean') {
|
||||
finalProtocol = coercedUseSsl ? 'https' : 'http';
|
||||
} else {
|
||||
finalProtocol = 'https';
|
||||
logWarn({
|
||||
code: 'SBK_S3_GUESSED_PROTOCOL',
|
||||
message: "No scheme in endpoint and useSsl not provided; defaulting to 'https'."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Final port decision
|
||||
let finalPort: number | undefined = undefined;
|
||||
if (coercedPort !== undefined && epPort !== undefined && coercedPort !== epPort) {
|
||||
logWarn({
|
||||
code: 'SBK_S3_PORT_CONFLICT',
|
||||
message: `Port in config (${coercedPort}) conflicts with endpoint port (${epPort}); using config port.`
|
||||
});
|
||||
finalPort = coercedPort;
|
||||
} else {
|
||||
finalPort = (coercedPort !== undefined) ? coercedPort : epPort;
|
||||
}
|
||||
|
||||
// Build canonical endpoint URL (origin only, no trailing slash)
|
||||
const url = new URL(`${finalProtocol}://${host}`);
|
||||
if (finalPort !== undefined) url.port = String(finalPort);
|
||||
const endpointUrl = url.origin;
|
||||
|
||||
const region = input.region || 'us-east-1';
|
||||
|
||||
return {
|
||||
normalized: {
|
||||
endpointUrl,
|
||||
host,
|
||||
protocol: finalProtocol,
|
||||
port: finalPort,
|
||||
region,
|
||||
credentials: {
|
||||
accessKeyId: input.accessKey,
|
||||
secretAccessKey: input.accessSecret,
|
||||
},
|
||||
forcePathStyle: true,
|
||||
},
|
||||
warnings,
|
||||
};
|
||||
}
|
||||
@@ -2,3 +2,6 @@ export * from './classes.smartbucket.js';
|
||||
export * from './classes.bucket.js';
|
||||
export * from './classes.directory.js';
|
||||
export * from './classes.file.js';
|
||||
export * from './classes.listcursor.js';
|
||||
export * from './classes.metadata.js';
|
||||
export * from './classes.trash.js';
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
// plugins.ts
|
||||
|
||||
// node native
|
||||
import * as path from 'path';
|
||||
import * as stream from 'stream';
|
||||
import * as path from 'node:path';
|
||||
import * as stream from 'node:stream';
|
||||
|
||||
export { path, stream };
|
||||
|
||||
@@ -26,7 +26,9 @@ export {
|
||||
|
||||
// third party scope
|
||||
import * as s3 from '@aws-sdk/client-s3';
|
||||
import { Minimatch } from 'minimatch';
|
||||
|
||||
export {
|
||||
s3,
|
||||
Minimatch,
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user