Compare commits
158 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| bd73004bd6 | |||
| 65c7bcf12c | |||
| dd6efa4908 | |||
| 1f4b7319d3 | |||
| b8e5d9a222 | |||
| 429375a643 | |||
| e147a077f3 | |||
| 5889396134 | |||
| 0c631383e1 | |||
| d852d8c85b | |||
| fa4c44ae04 | |||
| 708b0b63b1 | |||
| 8554554642 | |||
| a04aabf78b | |||
| 47cf2cc2cb | |||
| ef20e15d20 | |||
| 39a4bd6ab7 | |||
| c2a30654c5 | |||
| 8085033de4 | |||
| 75dd1d43a9 | |||
| 8ba7cdc873 | |||
| ed8db4536b | |||
| 96e3eadb31 | |||
| e9426b9cc9 | |||
| 9801e15c32 | |||
| cbfdd8e123 | |||
| 138c38ee30 | |||
| a1e449cf94 | |||
| aa9a2e9220 | |||
| 154854dc21 | |||
| 8e9041fbbf | |||
| 16a82ac50a | |||
| 0b396f19cf | |||
| 6ab77ece6e | |||
| b7a1f2087c | |||
| b0d41fa9a0 | |||
| 34082c38a7 | |||
| 8d160cefb0 | |||
| cec9c07b7c | |||
| 383a5204f4 | |||
| c7f0c97341 | |||
| e7f60465ff | |||
| 7db4d24817 | |||
| dc599585b8 | |||
| a22e32cd32 | |||
| 4647181807 | |||
| 99c3935d0c | |||
| 05523dc7a1 | |||
| dc99cfa229 | |||
| 23f8dc55d0 | |||
| ffaf0fc97a | |||
| 2a0425ff54 | |||
| 9adcdee0a0 | |||
| 786f8d4365 | |||
| 67244ba5cf | |||
| a9bb31c2a2 | |||
| bd8b05920f | |||
| 535d9f8520 | |||
| 8401fe1c0c | |||
| 08c3f674bf | |||
| df0a439def | |||
| 7245b49c31 | |||
| 4b70edb947 | |||
| 9629a04da6 | |||
| 963463d40d | |||
| ce58b99fc7 | |||
| 591c99736d | |||
| 559e3da47b | |||
| a7ac870e05 | |||
| d48c5e229a | |||
| b9c384dd08 | |||
| 91c04b2364 | |||
| b5dcc131e2 | |||
| cb0ab2c9db | |||
| 2a17ee542e | |||
| 95e9d2f0ff | |||
| 1a71c76da3 | |||
| e924511147 | |||
| 645ebbdd4d | |||
| 168148b2c9 | |||
| 1293fc4ca6 | |||
| b040120813 | |||
| 5c2d92c041 | |||
| eaf2e7e6bb | |||
| 1e1f65119c | |||
| c70ee820d7 | |||
| 2a15362ced | |||
| 9d5cdadd89 | |||
| a92fae2617 | |||
| 2cacfcf990 | |||
| 72d1e1e5a2 | |||
| a0be96bf23 | |||
| dab74572b8 | |||
| b871e23052 | |||
| caa69ae6ba | |||
| f1ee2f096c | |||
| 32e574197b | |||
| f13db18b00 | |||
| 6e1ff8ed22 | |||
| 150bf944de | |||
| f3a0ff1f63 | |||
| 8bc939173a | |||
| 410c16a717 | |||
| f809631451 | |||
| 226bf990b9 | |||
| f3e2a8a4f2 | |||
| 0430a35873 | |||
| a074a9558d | |||
| e8cc232fa1 | |||
| 89f48cea21 | |||
| 784e54f021 | |||
| 95065de1b5 | |||
| ea9d0f58e9 | |||
| 9592ab863a | |||
| b60ece389c | |||
| 121c71102a | |||
| ec8f320317 | |||
| cca3ade103 | |||
| caedf37288 | |||
| 9255875d83 | |||
| 346269d399 | |||
| 4bb6e2ef51 | |||
| 0ec7e1d6c6 | |||
| bac986ac85 | |||
| 476ff5bbce | |||
| 178c360b89 | |||
| 191e0b8e05 | |||
| 7d6bbd289d | |||
| 3f5461dfe0 | |||
| 78ec895a39 | |||
| 0b14904027 | |||
| a8ce2f75b7 | |||
| c383b75336 | |||
| d5d3105a98 | |||
| 6b69edb3ef | |||
| 77e69171e2 | |||
| 365eea59bd | |||
| 1a64835510 | |||
| b61a86f029 | |||
| b9d082f07f | |||
| 4819dd0bc5 | |||
| 7250793f95 | |||
| 8fb5e89714 | |||
| 9dbd19d1a9 | |||
| 43d29947c5 | |||
| d715ed7bb8 | |||
| 9c2af1931b | |||
| c58f465189 | |||
| 5506db9612 | |||
| 0fcfba2973 | |||
| cba7c11eea | |||
| 51c6b29b58 | |||
| 1089c8f3ec | |||
| 789ff96cf0 | |||
| 1b49699663 | |||
| aa209e87c1 | |||
| 9ad70a9942 | |||
| bc41089925 |
4
.gitignore
vendored
4
.gitignore
vendored
@@ -15,8 +15,6 @@ node_modules/
|
||||
|
||||
# builds
|
||||
dist/
|
||||
dist_web/
|
||||
dist_serve/
|
||||
dist_ts_web/
|
||||
dist_*/
|
||||
|
||||
# custom
|
||||
118
.gitlab-ci.yml
118
.gitlab-ci.yml
@@ -1,118 +0,0 @@
|
||||
# gitzone ci_default
|
||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
||||
|
||||
cache:
|
||||
paths:
|
||||
- .npmci_cache/
|
||||
key: "$CI_BUILD_STAGE"
|
||||
|
||||
stages:
|
||||
- security
|
||||
- test
|
||||
- release
|
||||
- metadata
|
||||
|
||||
# ====================
|
||||
# security stage
|
||||
# ====================
|
||||
mirror:
|
||||
stage: security
|
||||
script:
|
||||
- npmci git mirror
|
||||
tags:
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
snyk:
|
||||
stage: security
|
||||
script:
|
||||
- npmci npm prepare
|
||||
- npmci command npm install -g snyk
|
||||
- npmci command npm install --ignore-scripts
|
||||
- npmci command snyk test
|
||||
tags:
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
# ====================
|
||||
# test stage
|
||||
# ====================
|
||||
|
||||
testStable:
|
||||
stage: test
|
||||
script:
|
||||
- npmci npm prepare
|
||||
- npmci node install stable
|
||||
- npmci npm install
|
||||
- npmci npm test
|
||||
coverage: /\d+.?\d+?\%\s*coverage/
|
||||
tags:
|
||||
- docker
|
||||
- priv
|
||||
|
||||
testBuild:
|
||||
stage: test
|
||||
script:
|
||||
- npmci npm prepare
|
||||
- npmci node install stable
|
||||
- npmci npm install
|
||||
- npmci command npm run build
|
||||
coverage: /\d+.?\d+?\%\s*coverage/
|
||||
tags:
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
release:
|
||||
stage: release
|
||||
script:
|
||||
- npmci node install stable
|
||||
- npmci npm publish
|
||||
only:
|
||||
- tags
|
||||
tags:
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
# ====================
|
||||
# metadata stage
|
||||
# ====================
|
||||
codequality:
|
||||
stage: metadata
|
||||
allow_failure: true
|
||||
script:
|
||||
- npmci command npm install -g tslint typescript
|
||||
- npmci npm prepare
|
||||
- npmci npm install
|
||||
- npmci command "tslint -c tslint.json ./ts/**/*.ts"
|
||||
tags:
|
||||
- docker
|
||||
- priv
|
||||
|
||||
trigger:
|
||||
stage: metadata
|
||||
script:
|
||||
- npmci trigger
|
||||
only:
|
||||
- tags
|
||||
tags:
|
||||
- docker
|
||||
- notpriv
|
||||
|
||||
pages:
|
||||
stage: metadata
|
||||
script:
|
||||
- npmci node install lts
|
||||
- npmci command npm install -g @gitzone/tsdoc
|
||||
- npmci npm prepare
|
||||
- npmci npm install
|
||||
- npmci command tsdoc
|
||||
tags:
|
||||
- docker
|
||||
- notpriv
|
||||
only:
|
||||
- tags
|
||||
artifacts:
|
||||
expire_in: 1 week
|
||||
paths:
|
||||
- public
|
||||
allow_failure: true
|
||||
24
.vscode/launch.json
vendored
24
.vscode/launch.json
vendored
@@ -2,28 +2,10 @@
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "current file",
|
||||
"type": "node",
|
||||
"command": "npm test",
|
||||
"name": "Run npm test",
|
||||
"request": "launch",
|
||||
"args": [
|
||||
"${relativeFile}"
|
||||
],
|
||||
"runtimeArgs": ["-r", "@gitzone/tsrun"],
|
||||
"cwd": "${workspaceRoot}",
|
||||
"protocol": "inspector",
|
||||
"internalConsoleOptions": "openOnSessionStart"
|
||||
},
|
||||
{
|
||||
"name": "test.ts",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"args": [
|
||||
"test/test.ts"
|
||||
],
|
||||
"runtimeArgs": ["-r", "@gitzone/tsrun"],
|
||||
"cwd": "${workspaceRoot}",
|
||||
"protocol": "inspector",
|
||||
"internalConsoleOptions": "openOnSessionStart"
|
||||
"type": "node-terminal"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
8
.vscode/settings.json
vendored
8
.vscode/settings.json
vendored
@@ -11,7 +11,13 @@
|
||||
},
|
||||
"gitzone": {
|
||||
"type": "object",
|
||||
"description": "settings for gitzone"
|
||||
"description": "settings for gitzone",
|
||||
"properties": {
|
||||
"projectType": {
|
||||
"type": "string",
|
||||
"enum": ["website", "element", "service", "npm", "wcc"]
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
351
changelog.md
Normal file
351
changelog.md
Normal file
@@ -0,0 +1,351 @@
|
||||
# Changelog
|
||||
|
||||
## 2025-11-20 - 4.3.0 - feat(listing)
|
||||
Add memory-efficient listing APIs: async generator, RxJS observable, and cursor pagination; export ListCursor and Minimatch; add minimatch dependency; bump to 4.2.0
|
||||
|
||||
- Added memory-efficient listing methods on Bucket: listAllObjects (async generator), listAllObjectsObservable (RxJS Observable), createCursor (returns ListCursor) and listAllObjectsArray (convenience array collector).
|
||||
- New ListCursor class (ts/classes.listcursor.ts) providing page-based iteration: next(), hasMore(), reset(), getToken()/setToken().
|
||||
- Added glob matching helper findByGlob(pattern) using minimatch (exported via plugins.Minimatch).
|
||||
- Exported ListCursor from ts/index.ts and exported Minimatch via ts/plugins.ts.
|
||||
- Added minimatch dependency in package.json and bumped package version to 4.2.0; increased test timeout to 120s.
|
||||
- Updated tests to read S3_SECRETKEY, S3_PORT and to assert bucket name from env (test/test.node+deno.ts, test/test.trash.node+deno.ts).
|
||||
- No breaking changes: new APIs are additive and existing behavior preserved.
|
||||
|
||||
## 2025-11-20 - 4.2.0 - feat(listing)
|
||||
Add memory-efficient listing with async generators, RxJS observables, and cursor pagination for huge buckets
|
||||
|
||||
**New Memory-Efficient Listing Methods:**
|
||||
|
||||
**Async Generator (Recommended for most use cases):**
|
||||
- `Bucket.listAllObjects(prefix?)` - Stream object keys one at a time using `for await...of`
|
||||
- `Bucket.findByGlob(pattern)` - Find objects matching glob patterns (e.g., `**/*.json`, `npm/packages/*/index.json`)
|
||||
- Memory efficient, supports early termination, composable
|
||||
|
||||
**RxJS Observable (For complex reactive pipelines):**
|
||||
- `Bucket.listAllObjectsObservable(prefix?)` - Emit keys as Observable for use with RxJS operators (filter, map, take, etc.)
|
||||
- Perfect for complex data transformations and reactive architectures
|
||||
|
||||
**Cursor Pattern (For manual pagination control):**
|
||||
- `Bucket.createCursor(prefix?, options?)` - Create cursor for explicit page-by-page iteration
|
||||
- `ListCursor.next()` - Fetch next page of results
|
||||
- `ListCursor.hasMore()` - Check if more results available
|
||||
- `ListCursor.reset()` - Reset to beginning
|
||||
- `ListCursor.getToken()` / `ListCursor.setToken()` - Save/restore pagination state
|
||||
- Ideal for UI pagination and resumable operations
|
||||
|
||||
**Convenience Methods:**
|
||||
- `Bucket.listAllObjectsArray(prefix?)` - Collect all keys into array (WARNING: loads all into memory)
|
||||
|
||||
**Benefits:**
|
||||
- ✅ Memory-efficient streaming for buckets with millions of objects
|
||||
- ✅ Three patterns for different use cases (generators, observables, cursors)
|
||||
- ✅ Support for early termination and incremental processing
|
||||
- ✅ Glob pattern matching with minimatch
|
||||
- ✅ Full TypeScript support with proper types
|
||||
- ✅ Zero breaking changes - all new methods
|
||||
|
||||
**Dependencies:**
|
||||
- Added `minimatch` for glob pattern support
|
||||
|
||||
**Files Changed:**
|
||||
- `ts/classes.bucket.ts` - Added all listing methods
|
||||
- `ts/classes.listcursor.ts` - NEW: Cursor implementation
|
||||
- `ts/plugins.ts` - Export Minimatch
|
||||
- `ts/index.ts` - Export ListCursor
|
||||
- `test/test.listing.node+deno.ts` - NEW: Comprehensive listing tests
|
||||
- `package.json` - Added minimatch dependency
|
||||
|
||||
## 2025-11-20 - 4.1.0 - feat(core)
|
||||
Add S3 endpoint normalization, directory pagination, improved metadata checks, trash support, and related tests
|
||||
|
||||
- Add normalizeS3Descriptor helper to sanitize and normalize various S3 endpoint formats and emit warnings for mismatches (helpers.ts).
|
||||
- Use normalized endpoint and credentials when constructing S3 client in SmartBucket (classes.smartbucket.ts).
|
||||
- Implement paginated listing helper listObjectsV2AllPages in Directory and use it for listFiles and listDirectories to aggregate Contents and CommonPrefixes across pages (classes.directory.ts).
|
||||
- Improve MetaData.hasMetaData to catch NotFound errors and return false instead of throwing (classes.metadata.ts).
|
||||
- Export metadata and trash modules from index (ts/index.ts) and add a Trash class with utilities for trashed files and key encoding (classes.trash.ts).
|
||||
- Enhance Bucket operations: fastCopy now preserves or replaces native metadata correctly, cleanAllContents supports paginated deletion, and improved fastExists error handling (classes.bucket.ts).
|
||||
- Fix Directory.getSubDirectoryByName to construct new Directory instances with the correct parent directory reference.
|
||||
- Add tests covering metadata absence and pagination behavior (test/test.local.node+deno.ts).
|
||||
|
||||
## 2025-11-20 - 4.0.1 - fix(plugins)
|
||||
Use explicit node: imports for native path and stream modules in ts/plugins.ts
|
||||
|
||||
- Replaced imports of 'path' and 'stream' with 'node:path' and 'node:stream' in ts/plugins.ts.
|
||||
- Ensures correct ESM resolution of Node built-ins when package.json type is 'module' and avoids accidental conflicts with userland packages.
|
||||
|
||||
## 2025-11-20 - 4.0.0 - BREAKING CHANGE(core)
|
||||
Make API strict-by-default: remove *Strict variants, throw on not-found/exists conflicts, add explicit exists() methods, update docs/tests and bump deps
|
||||
|
||||
- Breaking: Core API methods are strict by default and now throw errors instead of returning null when targets are missing or already exist (e.g. getBucketByName, getFile, getSubDirectoryByName, fastPut, fastPutStream).
|
||||
- Removed *Strict variants: fastPutStrict, getBucketByNameStrict, getFileStrict, getSubDirectoryByNameStrict — use the base methods which are now strict.
|
||||
- Added explicit existence checks: bucketExists (SmartBucket), fileExists (Directory/fileExists), directoryExists (Directory.directoryExists), and fastExists (Bucket.fastExists) to allow non-throwing checks before operations.
|
||||
- Return type updates: fastPut now returns Promise<File> (no null), getBucketByName/getFile/getSubDirectoryByName now return the respective objects or throw.
|
||||
- Improved error messages to guide callers (e.g. suggest setting overwrite:true on fastPut when object exists).
|
||||
- Updated README, changelog and tests to reflect the new strict semantics and usage patterns.
|
||||
- Developer/runtime dependency bumps: @git.zone/tsbuild, @git.zone/tsrun, @git.zone/tstest, @aws-sdk/client-s3, @push.rocks/smartstring, @tsclass/tsclass (version bumps recorded in package.json).
|
||||
- Major version bump to 4.0.0 to reflect breaking API changes.
|
||||
|
||||
## 2025-11-20 - 4.0.0 - BREAKING: Strict by default + exists methods
|
||||
Complete API overhaul: all methods throw by default, removed all *Strict variants, added dedicated exists methods
|
||||
|
||||
**Breaking Changes:**
|
||||
|
||||
**Putters (Write Operations):**
|
||||
- `fastPut`: Return type `Promise<File | null>` → `Promise<File>`, throws when file exists and overwrite is false
|
||||
- `fastPutStream`: Now throws when file exists and overwrite is false (previously returned silently)
|
||||
- `fastPutStrict`: **Removed** - use `fastPut` directly
|
||||
|
||||
**Getters (Read Operations):**
|
||||
- `getBucketByName`: Return type `Promise<Bucket | null>` → `Promise<Bucket>`, throws when bucket not found
|
||||
- `getBucketByNameStrict`: **Removed** - use `getBucketByName` directly
|
||||
- `getFile`: Return type `Promise<File | null>` → `Promise<File>`, throws when file not found
|
||||
- `getFileStrict`: **Removed** - use `getFile` directly
|
||||
- `getSubDirectoryByName`: Return type `Promise<Directory | null>` → `Promise<Directory>`, throws when directory not found
|
||||
- `getSubDirectoryByNameStrict`: **Removed** - use `getSubDirectoryByName` directly
|
||||
|
||||
**New Methods (Existence Checks):**
|
||||
- `bucket.fastExists({ path })` - ✅ Already existed
|
||||
- `directory.fileExists({ path })` - **NEW** - Check if file exists
|
||||
- `directory.directoryExists(name)` - **NEW** - Check if subdirectory exists
|
||||
- `smartBucket.bucketExists(name)` - **NEW** - Check if bucket exists
|
||||
|
||||
**Benefits:**
|
||||
- ✅ **Simpler API**: Removed 4 redundant *Strict methods
|
||||
- ✅ **Type-safe**: No nullable returns - `Promise<T>` not `Promise<T | null>`
|
||||
- ✅ **Fail-fast**: Errors throw immediately with precise stack traces
|
||||
- ✅ **Consistent**: All methods behave the same way
|
||||
- ✅ **Explicit**: Use exists() to check, then get() to retrieve
|
||||
- ✅ **Better debugging**: Error location is always precise
|
||||
|
||||
**Migration Guide:**
|
||||
|
||||
```typescript
|
||||
// ============================================
|
||||
// Pattern 1: Check then Get (Recommended)
|
||||
// ============================================
|
||||
|
||||
// Before (v3.x):
|
||||
const bucket = await smartBucket.getBucketByName('my-bucket');
|
||||
if (bucket) {
|
||||
// use bucket
|
||||
}
|
||||
|
||||
// After (v4.0):
|
||||
if (await smartBucket.bucketExists('my-bucket')) {
|
||||
const bucket = await smartBucket.getBucketByName('my-bucket'); // guaranteed to exist
|
||||
// use bucket
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// Pattern 2: Try/Catch
|
||||
// ============================================
|
||||
|
||||
// Before (v3.x):
|
||||
const file = await directory.getFile({ path: 'file.txt' });
|
||||
if (!file) {
|
||||
// Handle not found
|
||||
}
|
||||
|
||||
// After (v4.0):
|
||||
try {
|
||||
const file = await directory.getFile({ path: 'file.txt' });
|
||||
// use file
|
||||
} catch (error) {
|
||||
// Handle not found
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// Pattern 3: Remove *Strict calls
|
||||
// ============================================
|
||||
|
||||
// Before (v3.x):
|
||||
const file = await directory.getFileStrict({ path: 'file.txt' });
|
||||
|
||||
// After (v4.0):
|
||||
const file = await directory.getFile({ path: 'file.txt' }); // already strict
|
||||
|
||||
// ============================================
|
||||
// Pattern 4: Write Operations
|
||||
// ============================================
|
||||
|
||||
// Before (v3.x):
|
||||
const file = await bucket.fastPutStrict({ path: 'file.txt', contents: 'data' });
|
||||
|
||||
// After (v4.0):
|
||||
const file = await bucket.fastPut({ path: 'file.txt', contents: 'data' }); // already strict
|
||||
```
|
||||
|
||||
## 2025-08-18 - 3.3.10 - fix(helpers)
|
||||
Normalize and robustly parse S3 endpoint configuration; use normalized descriptor in SmartBucket and update dev tooling
|
||||
|
||||
- Add normalizeS3Descriptor to ts/helpers.ts: robust endpoint parsing, coercion of useSsl/port, sanitization, warnings for dropped URL parts, and canonical endpoint URL output.
|
||||
- Update SmartBucket (ts/classes.smartbucket.ts) to use the normalized endpoint, region, credentials and forcePathStyle from normalizeS3Descriptor.
|
||||
- Adjust dev tooling: bump @git.zone/tsbuild -> ^2.6.7, @git.zone/tstest -> ^2.3.4, @push.rocks/qenv -> ^6.1.3 and update test script to run tstest with --verbose --logfile --timeout 60.
|
||||
- Add .claude/settings.local.json containing local assistant/CI permission settings (local config only).
|
||||
|
||||
## 2025-08-15 - 3.3.9 - fix(docs)
|
||||
Revise README with detailed usage examples and add local Claude settings
|
||||
|
||||
- Revamped README: reorganized content, added emojis and clearer headings for install, getting started, bucket/file/directory operations, streaming, metadata, trash/recovery, locking, and advanced configuration.
|
||||
- Added many concrete code examples for SmartBucket, Bucket, Directory, File, streaming (node/web), RxJS replay subjects, metadata handling, trash workflow, file locking, magic-bytes detection, JSON operations, and cleaning bucket contents.
|
||||
- Included testing instructions (pnpm test) and a Best Practices section with recommendations for strict mode, streaming, metadata, trash usage, and locking.
|
||||
- Added .claude/settings.local.json to include local Claude configuration and tool permissions.
|
||||
- No source code or public API changes; documentation and local tooling config only.
|
||||
|
||||
## 2025-08-15 - 3.3.8 - fix(tests)
|
||||
Update tests to use @git.zone/tstest, upgrade dependencies, remove GitLab CI and add local CI/workspace config
|
||||
|
||||
- Tests: replace imports from @push.rocks/tapbundle with @git.zone/tstest/tapbundle and switch tap.start() to export default tap.start()
|
||||
- Dependencies: bump @aws-sdk/client-s3 and several @push.rocks packages; upgrade @tsclass/tsclass to a newer major
|
||||
- DevDependencies: upgrade @git.zone/tsbuild, @git.zone/tstest, @push.rocks/qenv, and @push.rocks/tapbundle
|
||||
- CI/config: remove .gitlab-ci.yml, add .claude/settings.local.json
|
||||
- Workspace: add pnpm-workspace.yaml and packageManager field in package.json
|
||||
|
||||
## 2024-12-02 - 3.3.7 - fix(package)
|
||||
Update author field in package.json
|
||||
|
||||
- Corrected the author field from 'Lossless GmbH' to 'Task Venture Capital GmbH' in the package.json file.
|
||||
|
||||
## 2024-12-02 - 3.3.6 - fix(package)
|
||||
Fix license field in package.json to reflect MIT licensing
|
||||
|
||||
|
||||
## 2024-11-25 - 3.3.5 - fix(test)
|
||||
Refactor trash test to improve metadata validation
|
||||
|
||||
- Added new checks in trash tests to ensure metadata files are correctly moved to trash.
|
||||
- Validated the presence and integrity of metadata within trashed files.
|
||||
|
||||
## 2024-11-25 - 3.3.4 - fix(core)
|
||||
Minor refactoring and cleanup of TypeScript source files for improved readability and maintainability.
|
||||
|
||||
|
||||
## 2024-11-24 - 3.3.3 - fix(documentation)
|
||||
Improved documentation accuracy and consistency
|
||||
|
||||
- Updated the project description to reflect the cloud-agnostic nature and advanced capabilities
|
||||
- Enhanced the README with detailed explanations and code examples for advanced features like trash management
|
||||
- Clarified the handling and importance of metadata using the MetaData utility
|
||||
|
||||
## 2024-11-24 - 3.3.2 - fix(documentation)
|
||||
Updated keywords and description for clarity and consistency.
|
||||
|
||||
- Modified keywords and description in package.json and npmextra.json.
|
||||
- Enhanced readme.md file structure and examples
|
||||
|
||||
## 2024-11-24 - 3.3.1 - fix(File)
|
||||
Fixed issue with file restore metadata operations.
|
||||
|
||||
- Corrected the order of operations in the file restore function to ensure custom metadata is appropriately deleted after moving the file.
|
||||
|
||||
## 2024-11-24 - 3.3.0 - feat(core)
|
||||
Enhanced directory handling and file restoration from trash
|
||||
|
||||
- Refined getSubDirectoryByName to handle file paths treated as directories.
|
||||
- Introduced file restoration function from trash to original or specified paths.
|
||||
|
||||
## 2024-11-24 - 3.2.2 - fix(core)
|
||||
Refactor Bucket class for improved error handling
|
||||
|
||||
- Ensured safe access using non-null assertions when finding a bucket.
|
||||
- Enhanced fastPut method by adding fastPutStrict for safer operations.
|
||||
- Added explicit error handling and type checking in fastExists method.
|
||||
|
||||
## 2024-11-24 - 3.2.1 - fix(metadata)
|
||||
Fix metadata handling for deleted files
|
||||
|
||||
- Ensured metadata is correctly stored and managed when files are deleted into the trash.
|
||||
|
||||
## 2024-11-24 - 3.2.0 - feat(bucket)
|
||||
Enhanced SmartBucket with trash management and metadata handling
|
||||
|
||||
- Added functionality to move files to a trash directory.
|
||||
- Introduced methods to handle file metadata more robustly.
|
||||
- Implemented a method to clean all contents from a bucket.
|
||||
- Enhanced directory retrieval to handle non-existent directories with options.
|
||||
- Improved handling of file paths and metadata within the storage system.
|
||||
|
||||
## 2024-11-18 - 3.1.0 - feat(file)
|
||||
Added functionality to retrieve magic bytes from files and detect file types using magic bytes.
|
||||
|
||||
- Introduced method `getMagicBytes` in `File` and `Bucket` classes to retrieve a specific number of bytes from a file.
|
||||
- Enhanced file type detection by utilizing magic bytes in `MetaData` class.
|
||||
- Updated dependencies for better performance and compatibility.
|
||||
|
||||
## 2024-11-18 - 3.0.24 - fix(metadata)
|
||||
Fix metadata handling to address type assertion and data retrieval.
|
||||
|
||||
- Fixed type assertion issues in `MetaData` class properties with type non-null assertions.
|
||||
- Corrected the handling of JSON data retrieval in `MetaData.storeCustomMetaData` function.
|
||||
|
||||
## 2024-10-16 - 3.0.23 - fix(dependencies)
|
||||
Update package dependencies for improved functionality and security.
|
||||
|
||||
- Updated @aws-sdk/client-s3 to version ^3.670.0 for enhanced S3 client capabilities.
|
||||
- Updated @push.rocks/smartstream to version ^3.2.4.
|
||||
- Updated the dev dependency @push.rocks/tapbundle to version ^5.3.0.
|
||||
|
||||
## 2024-07-28 - 3.0.22 - fix(dependencies)
|
||||
Update dependencies and improve bucket retrieval logging
|
||||
|
||||
- Updated @aws-sdk/client-s3 to ^3.620.0
|
||||
- Updated @git.zone/tsbuild to ^2.1.84
|
||||
- Updated @git.zone/tsrun to ^1.2.49
|
||||
- Updated @push.rocks/smartpromise to ^4.0.4
|
||||
- Updated @tsclass/tsclass to ^4.1.2
|
||||
- Added a log for when a bucket is not found by name in getBucketByName method
|
||||
|
||||
## 2024-07-04 - 3.0.21 - fix(test)
|
||||
Update endpoint configuration in tests to use environment variable
|
||||
|
||||
- Modified `qenv.yml` to include `S3_ENDPOINT` as a required environment variable.
|
||||
- Updated test files to fetch `S3_ENDPOINT` from environment instead of hardcoding.
|
||||
|
||||
## 2024-06-19 - 3.0.20 - Fix and Stability Updates
|
||||
Improved overall stability and consistency.
|
||||
|
||||
## 2024-06-18 - 3.0.18 - Delete Functions Consistency
|
||||
Ensured more consistency between delete methods and trash behavior.
|
||||
|
||||
## 2024-06-17 - 3.0.17 to 3.0.16 - Fix and Update
|
||||
Routine updates and fixes performed.
|
||||
|
||||
## 2024-06-11 - 3.0.15 to 3.0.14 - Fix and Update
|
||||
Routine updates and fixes performed.
|
||||
|
||||
## 2024-06-10 - 3.0.13 - Trash Feature Completion
|
||||
Finished work on trash feature.
|
||||
|
||||
## 2024-06-09 - 3.0.12 - Fix and Update
|
||||
Routine updates and fixes performed.
|
||||
|
||||
## 2024-06-08 - 3.0.11 to 3.0.10 - Fix and Update
|
||||
Routine updates and fixes performed.
|
||||
|
||||
## 2024-06-03 - 3.0.10 - Fix and Update
|
||||
Routine updates and fixes performed.
|
||||
|
||||
## 2024-05-29 - 3.0.9 - Update Description
|
||||
Updated project description.
|
||||
|
||||
## 2024-05-27 - 3.0.8 to 3.0.6 - Pathing and Core Updates
|
||||
Routine updates and fixes performed.
|
||||
- S3 paths' pathing differences now correctly handled with a reducePath method.
|
||||
|
||||
## 2024-05-21 - 3.0.5 to 3.0.4 - Fix and Update
|
||||
Routine updates and fixes performed.
|
||||
|
||||
## 2024-05-17 - 3.0.3 to 3.0.2 - Fix and Update
|
||||
Routine updates and fixes performed.
|
||||
|
||||
## 2024-05-17 - 3.0.0 - Major Release
|
||||
Introduced breaking changes in core and significant improvements.
|
||||
|
||||
## 2024-05-05 - 2.0.5 - Breaking Changes
|
||||
Introduced breaking changes in core functionality.
|
||||
|
||||
## 2024-04-14 - 2.0.4 - TSConfig Update
|
||||
Updated TypeScript configuration.
|
||||
|
||||
## 2024-01-01 - 2.0.2 - Organization Scheme Update
|
||||
Switched to the new organizational scheme.
|
||||
19
license
Normal file
19
license
Normal file
@@ -0,0 +1,19 @@
|
||||
Copyright (c) 2014 Task Venture Capital GmbH (hello@task.vc)
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -3,13 +3,39 @@
|
||||
"npmGlobalTools": []
|
||||
},
|
||||
"gitzone": {
|
||||
"projectType": "npm",
|
||||
"module": {
|
||||
"githost": "gitlab.com",
|
||||
"gitscope": "pushrocks",
|
||||
"githost": "code.foss.global",
|
||||
"gitscope": "push.rocks",
|
||||
"gitrepo": "smartbucket",
|
||||
"shortDescription": "simple cloud independent object storage",
|
||||
"npmPackagename": "@pushrocks/smartbucket",
|
||||
"license": "MIT"
|
||||
"description": "A TypeScript library providing a cloud-agnostic interface for managing object storage with functionalities like bucket management, file and directory operations, and advanced features such as metadata handling and file locking.",
|
||||
"npmPackagename": "@push.rocks/smartbucket",
|
||||
"license": "MIT",
|
||||
"keywords": [
|
||||
"TypeScript",
|
||||
"cloud agnostic",
|
||||
"object storage",
|
||||
"bucket management",
|
||||
"file operations",
|
||||
"directory management",
|
||||
"data streaming",
|
||||
"S3",
|
||||
"multi-cloud",
|
||||
"file locking",
|
||||
"metadata management",
|
||||
"buffer handling",
|
||||
"access control",
|
||||
"environment configuration",
|
||||
"unified storage",
|
||||
"bucket policies",
|
||||
"trash management",
|
||||
"file transfer",
|
||||
"data management",
|
||||
"streaming"
|
||||
]
|
||||
}
|
||||
},
|
||||
"tsdoc": {
|
||||
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
||||
}
|
||||
}
|
||||
11840
package-lock.json
generated
11840
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
82
package.json
82
package.json
@@ -1,41 +1,77 @@
|
||||
{
|
||||
"name": "@pushrocks/smartbucket",
|
||||
"version": "1.0.20",
|
||||
"description": "simple cloud independent object storage",
|
||||
"main": "dist/index.js",
|
||||
"typings": "dist/index.d.ts",
|
||||
"author": "Lossless GmbH",
|
||||
"license": "UNLICENSED",
|
||||
"name": "@push.rocks/smartbucket",
|
||||
"version": "4.3.0",
|
||||
"description": "A TypeScript library providing a cloud-agnostic interface for managing object storage with functionalities like bucket management, file and directory operations, and advanced features such as metadata handling and file locking.",
|
||||
"main": "dist_ts/index.js",
|
||||
"typings": "dist_ts/index.d.ts",
|
||||
"type": "module",
|
||||
"author": "Task Venture Capital GmbH",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"test": "(tstest test/)",
|
||||
"format": "(gitzone format)",
|
||||
"build": "(tsbuild)"
|
||||
"test": "(tstest test/ --verbose --logfile --timeout 120)",
|
||||
"build": "(tsbuild --web --allowimplicitany)"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@gitzone/tsbuild": "^2.1.17",
|
||||
"@gitzone/tstest": "^1.0.28",
|
||||
"@pushrocks/tapbundle": "^3.0.13",
|
||||
"tslint": "^5.20.0",
|
||||
"tslint-config-prettier": "^1.18.0"
|
||||
"@git.zone/tsbuild": "^3.1.0",
|
||||
"@git.zone/tsrun": "^2.0.0",
|
||||
"@git.zone/tstest": "^3.0.1",
|
||||
"@push.rocks/qenv": "^6.1.3",
|
||||
"@push.rocks/tapbundle": "^6.0.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@pushrocks/qenv": "^4.0.6",
|
||||
"@pushrocks/smartpath": "^4.0.1",
|
||||
"@pushrocks/smartpromise": "^3.0.6",
|
||||
"@pushrocks/streamfunction": "^1.0.24",
|
||||
"@types/minio": "^7.0.3",
|
||||
"minio": "^7.0.12"
|
||||
"@aws-sdk/client-s3": "^3.936.0",
|
||||
"@push.rocks/smartmime": "^2.0.4",
|
||||
"@push.rocks/smartpath": "^6.0.0",
|
||||
"@push.rocks/smartpromise": "^4.2.3",
|
||||
"@push.rocks/smartrx": "^3.0.10",
|
||||
"@push.rocks/smartstream": "^3.2.5",
|
||||
"@push.rocks/smartstring": "^4.1.0",
|
||||
"@push.rocks/smartunique": "^3.0.9",
|
||||
"@tsclass/tsclass": "^9.3.0",
|
||||
"minimatch": "^10.1.1"
|
||||
},
|
||||
"private": false,
|
||||
"files": [
|
||||
"ts/**/*",
|
||||
"ts_web/**/*",
|
||||
"dist/**/*",
|
||||
"dist_web/**/*",
|
||||
"dist_*/**/*",
|
||||
"dist_ts/**/*",
|
||||
"dist_ts_web/**/*",
|
||||
"assets/**/*",
|
||||
"cli.js",
|
||||
"npmextra.json",
|
||||
"readme.md"
|
||||
]
|
||||
],
|
||||
"browserslist": [
|
||||
"last 1 chrome versions"
|
||||
],
|
||||
"keywords": [
|
||||
"TypeScript",
|
||||
"cloud agnostic",
|
||||
"object storage",
|
||||
"bucket management",
|
||||
"file operations",
|
||||
"directory management",
|
||||
"data streaming",
|
||||
"S3",
|
||||
"multi-cloud",
|
||||
"file locking",
|
||||
"metadata management",
|
||||
"buffer handling",
|
||||
"access control",
|
||||
"environment configuration",
|
||||
"unified storage",
|
||||
"bucket policies",
|
||||
"trash management",
|
||||
"file transfer",
|
||||
"data management",
|
||||
"streaming"
|
||||
],
|
||||
"homepage": "https://code.foss.global/push.rocks/smartbucket",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://code.foss.global/push.rocks/smartbucket.git"
|
||||
},
|
||||
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748"
|
||||
}
|
||||
|
||||
11446
pnpm-lock.yaml
generated
Normal file
11446
pnpm-lock.yaml
generated
Normal file
File diff suppressed because it is too large
Load Diff
4
pnpm-workspace.yaml
Normal file
4
pnpm-workspace.yaml
Normal file
@@ -0,0 +1,4 @@
|
||||
onlyBuiltDependencies:
|
||||
- esbuild
|
||||
- mongodb-memory-server
|
||||
- puppeteer
|
||||
5
readme.hints.md
Normal file
5
readme.hints.md
Normal file
@@ -0,0 +1,5 @@
|
||||
* The project uses the official s3 client, not the minio client.
|
||||
* **All methods throw by default** (strict mode): - Put operations: `fastPut`, `fastPutStream` throw when file exists and overwrite is false - Get operations: `getBucketByName`, `getFile`, `getSubDirectoryByName` throw when not found
|
||||
* **Use exists() methods to check before getting**: `bucketExists`, `fileExists`, `directoryExists`, `fastExists`
|
||||
* **No *Strict methods**: All removed (fastPutStrict, getBucketByNameStrict, getFileStrict, getSubDirectoryByNameStrict)
|
||||
* metadata is handled though the MetaData class. Important!
|
||||
894
readme.md
894
readme.md
@@ -1,26 +1,880 @@
|
||||
# @pushrocks/smartbucket
|
||||
simple cloud independent object storage
|
||||
# @push.rocks/smartbucket 🪣
|
||||
|
||||
## Availabililty and Links
|
||||
* [npmjs.org (npm package)](https://www.npmjs.com/package/@pushrocks/smartbucket)
|
||||
* [gitlab.com (source)](https://gitlab.com/pushrocks/smartbucket)
|
||||
* [github.com (source mirror)](https://github.com/pushrocks/smartbucket)
|
||||
* [docs (typedoc)](https://pushrocks.gitlab.io/smartbucket/)
|
||||
> A powerful, cloud-agnostic TypeScript library for object storage that makes S3 feel like a modern filesystem. Built for developers who demand simplicity, type-safety, and advanced features like metadata management, file locking, intelligent trash handling, and memory-efficient streaming.
|
||||
|
||||
## Status for master
|
||||
[](https://gitlab.com/pushrocks/smartbucket/commits/master)
|
||||
[](https://gitlab.com/pushrocks/smartbucket/commits/master)
|
||||
[](https://www.npmjs.com/package/@pushrocks/smartbucket)
|
||||
[](https://snyk.io/test/npm/@pushrocks/smartbucket)
|
||||
[](https://nodejs.org/dist/latest-v10.x/docs/api/)
|
||||
[](https://nodejs.org/dist/latest-v10.x/docs/api/)
|
||||
[](https://prettier.io/)
|
||||
## Why SmartBucket? 🎯
|
||||
|
||||
## Usage
|
||||
- **🌍 Cloud Agnostic** - Write once, run on AWS S3, MinIO, DigitalOcean Spaces, Backblaze B2, Wasabi, or any S3-compatible storage
|
||||
- **🚀 Modern TypeScript** - First-class TypeScript support with complete type definitions and async/await patterns
|
||||
- **💾 Memory Efficient** - Handle millions of files with async generators, RxJS observables, and cursor pagination
|
||||
- **🗑️ Smart Trash System** - Recover accidentally deleted files with built-in trash and restore functionality
|
||||
- **🔒 File Locking** - Prevent concurrent modifications with built-in locking mechanisms
|
||||
- **🏷️ Rich Metadata** - Attach custom metadata to any file for powerful organization and search
|
||||
- **🌊 Streaming Support** - Efficient handling of large files with Node.js and Web streams
|
||||
- **📁 Directory-like API** - Intuitive filesystem-like operations on object storage
|
||||
- **⚡ Fail-Fast** - Strict-by-default API catches errors immediately with precise stack traces
|
||||
|
||||
For further information read the linked docs at the top of this readme.
|
||||
## Quick Start 🚀
|
||||
|
||||
> MIT licensed | **©** [Lossless GmbH](https://lossless.gmbh)
|
||||
| By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy)
|
||||
```typescript
|
||||
import { SmartBucket } from '@push.rocks/smartbucket';
|
||||
|
||||
[](https://maintainedby.lossless.com)
|
||||
// Connect to your storage
|
||||
const storage = new SmartBucket({
|
||||
accessKey: 'your-access-key',
|
||||
accessSecret: 'your-secret-key',
|
||||
endpoint: 's3.amazonaws.com',
|
||||
port: 443,
|
||||
useSsl: true
|
||||
});
|
||||
|
||||
// Get or create a bucket
|
||||
const bucket = await storage.getBucketByName('my-app-data');
|
||||
|
||||
// Upload a file
|
||||
await bucket.fastPut({
|
||||
path: 'users/profile.json',
|
||||
contents: JSON.stringify({ name: 'Alice', role: 'admin' })
|
||||
});
|
||||
|
||||
// Download it back
|
||||
const data = await bucket.fastGet({ path: 'users/profile.json' });
|
||||
console.log('📄', JSON.parse(data.toString()));
|
||||
|
||||
// List files efficiently (even with millions of objects!)
|
||||
for await (const key of bucket.listAllObjects('users/')) {
|
||||
console.log('🔍 Found:', key);
|
||||
}
|
||||
```
|
||||
|
||||
## Install 📦
|
||||
|
||||
```bash
|
||||
# Using pnpm (recommended)
|
||||
pnpm add @push.rocks/smartbucket
|
||||
|
||||
# Using npm
|
||||
npm install @push.rocks/smartbucket --save
|
||||
```
|
||||
|
||||
## Usage 🚀
|
||||
|
||||
### Table of Contents
|
||||
|
||||
1. [🏁 Getting Started](#-getting-started)
|
||||
2. [🗂️ Working with Buckets](#️-working-with-buckets)
|
||||
3. [📁 File Operations](#-file-operations)
|
||||
4. [📋 Memory-Efficient Listing](#-memory-efficient-listing)
|
||||
5. [📂 Directory Management](#-directory-management)
|
||||
6. [🌊 Streaming Operations](#-streaming-operations)
|
||||
7. [🔒 File Locking](#-file-locking)
|
||||
8. [🏷️ Metadata Management](#️-metadata-management)
|
||||
9. [🗑️ Trash & Recovery](#️-trash--recovery)
|
||||
10. [⚡ Advanced Features](#-advanced-features)
|
||||
11. [☁️ Cloud Provider Support](#️-cloud-provider-support)
|
||||
|
||||
### 🏁 Getting Started
|
||||
|
||||
First, set up your storage connection:
|
||||
|
||||
```typescript
|
||||
import { SmartBucket } from '@push.rocks/smartbucket';
|
||||
|
||||
// Initialize with your cloud storage credentials
|
||||
const smartBucket = new SmartBucket({
|
||||
accessKey: 'your-access-key',
|
||||
accessSecret: 'your-secret-key',
|
||||
endpoint: 's3.amazonaws.com', // Or your provider's endpoint
|
||||
port: 443,
|
||||
useSsl: true,
|
||||
region: 'us-east-1' // Optional, defaults to 'us-east-1'
|
||||
});
|
||||
```
|
||||
|
||||
**For MinIO or self-hosted S3:**
|
||||
```typescript
|
||||
const smartBucket = new SmartBucket({
|
||||
accessKey: 'minioadmin',
|
||||
accessSecret: 'minioadmin',
|
||||
endpoint: 'localhost',
|
||||
port: 9000,
|
||||
useSsl: false // MinIO often runs without SSL locally
|
||||
});
|
||||
```
|
||||
|
||||
### 🗂️ Working with Buckets
|
||||
|
||||
#### Creating Buckets
|
||||
|
||||
```typescript
|
||||
// Create a new bucket
|
||||
const myBucket = await smartBucket.createBucket('my-awesome-bucket');
|
||||
console.log(`✅ Bucket created: ${myBucket.name}`);
|
||||
```
|
||||
|
||||
#### Getting Existing Buckets
|
||||
|
||||
```typescript
|
||||
// Get a bucket reference (throws if not found - strict by default!)
|
||||
const existingBucket = await smartBucket.getBucketByName('existing-bucket');
|
||||
|
||||
// Check first, then get (non-throwing approach)
|
||||
if (await smartBucket.bucketExists('maybe-exists')) {
|
||||
const bucket = await smartBucket.getBucketByName('maybe-exists');
|
||||
console.log('✅ Found bucket:', bucket.name);
|
||||
}
|
||||
```
|
||||
|
||||
#### Removing Buckets
|
||||
|
||||
```typescript
|
||||
// Delete a bucket (must be empty)
|
||||
await smartBucket.removeBucket('old-bucket');
|
||||
console.log('🗑️ Bucket removed');
|
||||
```
|
||||
|
||||
### 📁 File Operations
|
||||
|
||||
#### Upload Files
|
||||
|
||||
```typescript
|
||||
const bucket = await smartBucket.getBucketByName('my-bucket');
|
||||
|
||||
// Simple file upload (returns File object)
|
||||
const file = await bucket.fastPut({
|
||||
path: 'documents/report.pdf',
|
||||
contents: Buffer.from('Your file content here')
|
||||
});
|
||||
console.log('✅ Uploaded:', file.path);
|
||||
|
||||
// Upload with string content
|
||||
await bucket.fastPut({
|
||||
path: 'notes/todo.txt',
|
||||
contents: 'Buy milk\nCall mom\nRule the world'
|
||||
});
|
||||
|
||||
// Upload with overwrite control
|
||||
const uploadedFile = await bucket.fastPut({
|
||||
path: 'images/logo.png',
|
||||
contents: imageBuffer,
|
||||
overwrite: true // Set to true to replace existing files
|
||||
});
|
||||
|
||||
// Error handling: fastPut throws if file exists and overwrite is false
|
||||
try {
|
||||
await bucket.fastPut({
|
||||
path: 'existing-file.txt',
|
||||
contents: 'new content'
|
||||
});
|
||||
} catch (error) {
|
||||
console.error('❌ Upload failed:', error.message);
|
||||
// Error: Object already exists at path 'existing-file.txt' in bucket 'my-bucket'.
|
||||
// Set overwrite:true to replace it.
|
||||
}
|
||||
```
|
||||
|
||||
#### Download Files
|
||||
|
||||
```typescript
|
||||
// Get file as Buffer
|
||||
const fileContent = await bucket.fastGet({
|
||||
path: 'documents/report.pdf'
|
||||
});
|
||||
console.log(`📄 File size: ${fileContent.length} bytes`);
|
||||
|
||||
// Get file as string
|
||||
const textContent = fileContent.toString('utf-8');
|
||||
|
||||
// Parse JSON files directly
|
||||
const jsonData = JSON.parse(fileContent.toString());
|
||||
```
|
||||
|
||||
#### Check File Existence
|
||||
|
||||
```typescript
|
||||
const exists = await bucket.fastExists({
|
||||
path: 'documents/report.pdf'
|
||||
});
|
||||
console.log(`File exists: ${exists ? '✅' : '❌'}`);
|
||||
```
|
||||
|
||||
#### Delete Files
|
||||
|
||||
```typescript
|
||||
// Permanent deletion
|
||||
await bucket.fastRemove({
|
||||
path: 'old-file.txt'
|
||||
});
|
||||
console.log('🗑️ File deleted permanently');
|
||||
```
|
||||
|
||||
#### Copy & Move Files
|
||||
|
||||
```typescript
|
||||
// Copy file within bucket
|
||||
await bucket.fastCopy({
|
||||
sourcePath: 'original/file.txt',
|
||||
destinationPath: 'backup/file-copy.txt'
|
||||
});
|
||||
console.log('📋 File copied');
|
||||
|
||||
// Move file (copy + delete original)
|
||||
await bucket.fastMove({
|
||||
sourcePath: 'temp/draft.txt',
|
||||
destinationPath: 'final/document.txt'
|
||||
});
|
||||
console.log('📦 File moved');
|
||||
```
|
||||
|
||||
### 📋 Memory-Efficient Listing
|
||||
|
||||
SmartBucket provides three powerful patterns for listing objects, optimized for handling **millions of files** efficiently:
|
||||
|
||||
#### Async Generators (Recommended) ⭐
|
||||
|
||||
Memory-efficient streaming using native JavaScript async iteration:
|
||||
|
||||
```typescript
|
||||
// List all objects with prefix - streams one at a time!
|
||||
for await (const key of bucket.listAllObjects('documents/')) {
|
||||
console.log(`📄 Found: ${key}`);
|
||||
|
||||
// Process each file individually (memory efficient!)
|
||||
const content = await bucket.fastGet({ path: key });
|
||||
processFile(content);
|
||||
|
||||
// Early termination support
|
||||
if (shouldStop()) break;
|
||||
}
|
||||
|
||||
// List all objects (no prefix)
|
||||
const allKeys: string[] = [];
|
||||
for await (const key of bucket.listAllObjects()) {
|
||||
allKeys.push(key);
|
||||
}
|
||||
|
||||
// Find objects matching glob patterns
|
||||
for await (const key of bucket.findByGlob('**/*.json')) {
|
||||
console.log(`📦 JSON file: ${key}`);
|
||||
}
|
||||
|
||||
// Complex glob patterns
|
||||
for await (const key of bucket.findByGlob('npm/packages/*/index.json')) {
|
||||
// Matches: npm/packages/foo/index.json, npm/packages/bar/index.json
|
||||
console.log(`📦 Package index: ${key}`);
|
||||
}
|
||||
|
||||
// More glob examples
|
||||
for await (const key of bucket.findByGlob('logs/**/*.log')) {
|
||||
console.log('📋 Log file:', key);
|
||||
}
|
||||
|
||||
for await (const key of bucket.findByGlob('images/*.{jpg,png,gif}')) {
|
||||
console.log('🖼️ Image:', key);
|
||||
}
|
||||
```
|
||||
|
||||
**Why use async generators?**
|
||||
- ✅ Processes one item at a time (constant memory usage)
|
||||
- ✅ Supports early termination with `break`
|
||||
- ✅ Native JavaScript - no dependencies
|
||||
- ✅ Perfect for large buckets with millions of objects
|
||||
- ✅ Works seamlessly with `for await...of` loops
|
||||
|
||||
#### RxJS Observables
|
||||
|
||||
Perfect for reactive pipelines and complex data transformations:
|
||||
|
||||
```typescript
|
||||
import { filter, take, map } from 'rxjs/operators';
|
||||
|
||||
// Stream keys as Observable with powerful operators
|
||||
bucket.listAllObjectsObservable('logs/')
|
||||
.pipe(
|
||||
filter(key => key.endsWith('.log')),
|
||||
take(100),
|
||||
map(key => ({ key, timestamp: Date.now() }))
|
||||
)
|
||||
.subscribe({
|
||||
next: (item) => console.log(`📋 Log file: ${item.key}`),
|
||||
error: (err) => console.error('❌ Error:', err),
|
||||
complete: () => console.log('✅ Listing complete')
|
||||
});
|
||||
|
||||
// Simple subscription without operators
|
||||
bucket.listAllObjectsObservable('data/')
|
||||
.subscribe({
|
||||
next: (key) => processKey(key),
|
||||
complete: () => console.log('✅ Done')
|
||||
});
|
||||
|
||||
// Combine with other observables
|
||||
import { merge } from 'rxjs';
|
||||
|
||||
const logs$ = bucket.listAllObjectsObservable('logs/');
|
||||
const backups$ = bucket.listAllObjectsObservable('backups/');
|
||||
|
||||
merge(logs$, backups$)
|
||||
.pipe(filter(key => key.includes('2024')))
|
||||
.subscribe(key => console.log('📅 2024 file:', key));
|
||||
```
|
||||
|
||||
**Why use observables?**
|
||||
- ✅ Rich operator ecosystem (filter, map, debounce, etc.)
|
||||
- ✅ Composable with other RxJS streams
|
||||
- ✅ Perfect for reactive architectures
|
||||
- ✅ Great for complex transformations
|
||||
|
||||
#### Cursor Pattern
|
||||
|
||||
Explicit pagination control for UI and resumable operations:
|
||||
|
||||
```typescript
|
||||
// Create cursor with custom page size
|
||||
const cursor = bucket.createCursor('uploads/', { pageSize: 100 });
|
||||
|
||||
// Fetch pages manually
|
||||
while (cursor.hasMore()) {
|
||||
const page = await cursor.next();
|
||||
console.log(`📄 Page has ${page.keys.length} items`);
|
||||
|
||||
for (const key of page.keys) {
|
||||
console.log(` - ${key}`);
|
||||
}
|
||||
|
||||
if (page.done) {
|
||||
console.log('✅ Reached end');
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Save and restore cursor state (perfect for resumable operations!)
|
||||
const token = cursor.getToken();
|
||||
// Store token in database or session...
|
||||
|
||||
// ... later, in a different request ...
|
||||
const newCursor = bucket.createCursor('uploads/', { pageSize: 100 });
|
||||
newCursor.setToken(token); // Resume from saved position!
|
||||
const nextPage = await cursor.next();
|
||||
|
||||
// Reset cursor to start over
|
||||
cursor.reset();
|
||||
const firstPage = await cursor.next(); // Back to the beginning
|
||||
```
|
||||
|
||||
**Why use cursors?**
|
||||
- ✅ Perfect for UI pagination (prev/next buttons)
|
||||
- ✅ Save/restore state for resumable operations
|
||||
- ✅ Explicit control over page fetching
|
||||
- ✅ Great for implementing "Load More" buttons
|
||||
|
||||
#### Convenience Methods
|
||||
|
||||
```typescript
|
||||
// Collect all keys into array (⚠️ WARNING: loads everything into memory!)
|
||||
const allKeys = await bucket.listAllObjectsArray('images/');
|
||||
console.log(`📦 Found ${allKeys.length} images`);
|
||||
|
||||
// Only use for small result sets
|
||||
const smallList = await bucket.listAllObjectsArray('config/');
|
||||
if (smallList.length < 100) {
|
||||
// Safe to process in memory
|
||||
smallList.forEach(key => console.log(key));
|
||||
}
|
||||
```
|
||||
|
||||
**Performance Comparison:**
|
||||
|
||||
| Method | Memory Usage | Best For | Supports Early Exit |
|
||||
|--------|-------------|----------|-------------------|
|
||||
| **Async Generator** | O(1) - constant | Most use cases, large datasets | ✅ Yes |
|
||||
| **Observable** | O(1) - constant | Reactive pipelines, RxJS apps | ✅ Yes |
|
||||
| **Cursor** | O(pageSize) | UI pagination, resumable ops | ✅ Yes |
|
||||
| **Array** | O(n) - grows with results | Small datasets (<10k items) | ❌ No |
|
||||
|
||||
### 📂 Directory Management
|
||||
|
||||
SmartBucket provides powerful directory-like operations for organizing your files:
|
||||
|
||||
```typescript
|
||||
// Get base directory
|
||||
const baseDir = await bucket.getBaseDirectory();
|
||||
|
||||
// List directories and files
|
||||
const directories = await baseDir.listDirectories();
|
||||
const files = await baseDir.listFiles();
|
||||
|
||||
console.log(`📁 Found ${directories.length} directories`);
|
||||
console.log(`📄 Found ${files.length} files`);
|
||||
|
||||
// Navigate subdirectories
|
||||
const subDir = await baseDir.getSubDirectoryByName('projects/2024');
|
||||
|
||||
// Create nested file
|
||||
await subDir.fastPut({
|
||||
path: 'report.pdf',
|
||||
contents: reportBuffer
|
||||
});
|
||||
|
||||
// Get directory tree structure
|
||||
const tree = await subDir.getTreeArray();
|
||||
console.log('🌳 Directory tree:', tree);
|
||||
|
||||
// Get directory path
|
||||
console.log('📂 Base path:', subDir.getBasePath()); // "projects/2024/"
|
||||
|
||||
// Create empty file as placeholder
|
||||
await subDir.createEmptyFile('placeholder.txt');
|
||||
```
|
||||
|
||||
### 🌊 Streaming Operations
|
||||
|
||||
Handle large files efficiently with streaming support:
|
||||
|
||||
#### Download Streams
|
||||
|
||||
```typescript
|
||||
// Node.js stream (for file I/O, HTTP responses, etc.)
|
||||
const nodeStream = await bucket.fastGetStream(
|
||||
{ path: 'large-video.mp4' },
|
||||
'nodestream'
|
||||
);
|
||||
|
||||
// Pipe to file
|
||||
import * as fs from 'node:fs';
|
||||
nodeStream.pipe(fs.createWriteStream('local-video.mp4'));
|
||||
|
||||
// Pipe to HTTP response
|
||||
app.get('/download', async (req, res) => {
|
||||
const stream = await bucket.fastGetStream(
|
||||
{ path: 'file.pdf' },
|
||||
'nodestream'
|
||||
);
|
||||
res.setHeader('Content-Type', 'application/pdf');
|
||||
stream.pipe(res);
|
||||
});
|
||||
|
||||
// Web stream (for modern browser/Deno environments)
|
||||
const webStream = await bucket.fastGetStream(
|
||||
{ path: 'large-file.zip' },
|
||||
'webstream'
|
||||
);
|
||||
```
|
||||
|
||||
#### Upload Streams
|
||||
|
||||
```typescript
|
||||
import * as fs from 'node:fs';
|
||||
|
||||
// Stream upload from file
|
||||
const readStream = fs.createReadStream('big-data.csv');
|
||||
await bucket.fastPutStream({
|
||||
path: 'uploads/big-data.csv',
|
||||
stream: readStream,
|
||||
metadata: {
|
||||
contentType: 'text/csv',
|
||||
userMetadata: {
|
||||
uploadedBy: 'data-team',
|
||||
version: '2.0'
|
||||
}
|
||||
}
|
||||
});
|
||||
console.log('✅ Large file uploaded via stream');
|
||||
```
|
||||
|
||||
#### Reactive Streams with RxJS
|
||||
|
||||
```typescript
|
||||
// Get file as ReplaySubject for reactive programming
|
||||
const replaySubject = await bucket.fastGetReplaySubject({
|
||||
path: 'data/sensor-readings.json',
|
||||
chunkSize: 1024
|
||||
});
|
||||
|
||||
// Multiple subscribers can consume the same data
|
||||
replaySubject.subscribe({
|
||||
next: (chunk) => processChunk(chunk),
|
||||
complete: () => console.log('✅ Stream complete')
|
||||
});
|
||||
|
||||
replaySubject.subscribe({
|
||||
next: (chunk) => logChunk(chunk)
|
||||
});
|
||||
```
|
||||
|
||||
### 🔒 File Locking
|
||||
|
||||
Prevent concurrent modifications with built-in file locking:
|
||||
|
||||
```typescript
|
||||
const file = await bucket.getBaseDirectory()
|
||||
.getFile({ path: 'important-config.json' });
|
||||
|
||||
// Lock file for 10 minutes
|
||||
await file.lock({ timeoutMillis: 600000 });
|
||||
console.log('🔒 File locked');
|
||||
|
||||
// Try to modify locked file (will throw error)
|
||||
try {
|
||||
await file.delete();
|
||||
} catch (error) {
|
||||
console.log('❌ Cannot delete locked file');
|
||||
}
|
||||
|
||||
// Check lock status
|
||||
const isLocked = await file.isLocked();
|
||||
console.log(`Lock status: ${isLocked ? '🔒 Locked' : '🔓 Unlocked'}`);
|
||||
|
||||
// Unlock when done
|
||||
await file.unlock();
|
||||
console.log('🔓 File unlocked');
|
||||
```
|
||||
|
||||
**Lock use cases:**
|
||||
- 🔄 Prevent concurrent writes during critical updates
|
||||
- 🔐 Protect configuration files during deployment
|
||||
- 🚦 Coordinate distributed workers
|
||||
- 🛡️ Ensure data consistency
|
||||
|
||||
### 🏷️ Metadata Management
|
||||
|
||||
Attach and manage rich metadata for your files:
|
||||
|
||||
```typescript
|
||||
const file = await bucket.getBaseDirectory()
|
||||
.getFile({ path: 'document.pdf' });
|
||||
|
||||
// Get metadata handler
|
||||
const metadata = await file.getMetaData();
|
||||
|
||||
// Set custom metadata
|
||||
await metadata.setCustomMetaData({
|
||||
key: 'author',
|
||||
value: 'John Doe'
|
||||
});
|
||||
|
||||
await metadata.setCustomMetaData({
|
||||
key: 'department',
|
||||
value: 'Engineering'
|
||||
});
|
||||
|
||||
await metadata.setCustomMetaData({
|
||||
key: 'version',
|
||||
value: '1.0.0'
|
||||
});
|
||||
|
||||
// Retrieve metadata
|
||||
const author = await metadata.getCustomMetaData({ key: 'author' });
|
||||
console.log(`📝 Author: ${author}`);
|
||||
|
||||
// Get all metadata
|
||||
const allMeta = await metadata.getAllCustomMetaData();
|
||||
console.log('📋 All metadata:', allMeta);
|
||||
// { author: 'John Doe', department: 'Engineering', version: '1.0.0' }
|
||||
|
||||
// Check if metadata exists
|
||||
const hasMetadata = await metadata.hasMetaData();
|
||||
console.log(`Has metadata: ${hasMetadata ? '✅' : '❌'}`);
|
||||
```
|
||||
|
||||
**Metadata use cases:**
|
||||
- 👤 Track file ownership and authorship
|
||||
- 🏷️ Add tags and categories for search
|
||||
- 📊 Store processing status or workflow state
|
||||
- 🔍 Enable rich querying and filtering
|
||||
- 📝 Maintain audit trails
|
||||
|
||||
### 🗑️ Trash & Recovery
|
||||
|
||||
SmartBucket includes an intelligent trash system for safe file deletion and recovery:
|
||||
|
||||
```typescript
|
||||
const file = await bucket.getBaseDirectory()
|
||||
.getFile({ path: 'important-data.xlsx' });
|
||||
|
||||
// Move to trash instead of permanent deletion
|
||||
await file.delete({ mode: 'trash' });
|
||||
console.log('🗑️ File moved to trash (can be restored!)');
|
||||
|
||||
// Permanent deletion (use with caution!)
|
||||
await file.delete({ mode: 'permanent' });
|
||||
console.log('💀 File permanently deleted (cannot be recovered)');
|
||||
|
||||
// Access trash
|
||||
const trash = await bucket.getTrash();
|
||||
const trashDir = await trash.getTrashDir();
|
||||
const trashedFiles = await trashDir.listFiles();
|
||||
console.log(`📦 ${trashedFiles.length} files in trash`);
|
||||
|
||||
// Restore from trash
|
||||
const trashedFile = await bucket.getBaseDirectory()
|
||||
.getFile({
|
||||
path: 'important-data.xlsx',
|
||||
getFromTrash: true
|
||||
});
|
||||
|
||||
await trashedFile.restore({ useOriginalPath: true });
|
||||
console.log('♻️ File restored to original location');
|
||||
|
||||
// Or restore to a different location
|
||||
await trashedFile.restore({
|
||||
useOriginalPath: false,
|
||||
restorePath: 'recovered/important-data.xlsx'
|
||||
});
|
||||
console.log('♻️ File restored to new location');
|
||||
|
||||
// Empty trash permanently
|
||||
await trash.emptyTrash();
|
||||
console.log('🧹 Trash emptied');
|
||||
```
|
||||
|
||||
**Trash features:**
|
||||
- ♻️ Recover accidentally deleted files
|
||||
- 🏷️ Preserves original path in metadata
|
||||
- ⏰ Tracks deletion timestamp
|
||||
- 🔍 List and inspect trashed files
|
||||
- 🧹 Bulk empty trash operation
|
||||
|
||||
### ⚡ Advanced Features
|
||||
|
||||
#### File Statistics
|
||||
|
||||
```typescript
|
||||
// Get detailed file statistics
|
||||
const stats = await bucket.fastStat({ path: 'document.pdf' });
|
||||
console.log(`📊 Size: ${stats.size} bytes`);
|
||||
console.log(`📅 Last modified: ${stats.lastModified}`);
|
||||
console.log(`🏷️ ETag: ${stats.etag}`);
|
||||
console.log(`🗂️ Storage class: ${stats.storageClass}`);
|
||||
```
|
||||
|
||||
#### Magic Bytes Detection
|
||||
|
||||
Detect file types by examining the first bytes (useful for validation):
|
||||
|
||||
```typescript
|
||||
// Read first bytes for file type detection
|
||||
const magicBytes = await bucket.getMagicBytes({
|
||||
path: 'mystery-file',
|
||||
length: 16
|
||||
});
|
||||
console.log(`🔮 Magic bytes: ${magicBytes.toString('hex')}`);
|
||||
|
||||
// Or from a File object
|
||||
const file = await bucket.getBaseDirectory()
|
||||
.getFile({ path: 'image.jpg' });
|
||||
const magic = await file.getMagicBytes({ length: 4 });
|
||||
|
||||
// Check file signatures
|
||||
if (magic[0] === 0xFF && magic[1] === 0xD8) {
|
||||
console.log('📸 This is a JPEG image');
|
||||
} else if (magic[0] === 0x89 && magic[1] === 0x50) {
|
||||
console.log('🖼️ This is a PNG image');
|
||||
}
|
||||
```
|
||||
|
||||
#### JSON Data Operations
|
||||
|
||||
```typescript
|
||||
const file = await bucket.getBaseDirectory()
|
||||
.getFile({ path: 'config.json' });
|
||||
|
||||
// Read JSON data
|
||||
const config = await file.getJsonData();
|
||||
console.log('⚙️ Config loaded:', config);
|
||||
|
||||
// Update JSON data
|
||||
config.version = '2.0';
|
||||
config.updated = new Date().toISOString();
|
||||
config.features.push('newFeature');
|
||||
|
||||
await file.writeJsonData(config);
|
||||
console.log('💾 Config updated');
|
||||
```
|
||||
|
||||
#### Directory & File Type Detection
|
||||
|
||||
```typescript
|
||||
// Check if path is a directory
|
||||
const isDir = await bucket.isDirectory({ path: 'uploads/' });
|
||||
|
||||
// Check if path is a file
|
||||
const isFile = await bucket.isFile({ path: 'uploads/document.pdf' });
|
||||
|
||||
console.log(`Is directory: ${isDir ? '📁' : '❌'}`);
|
||||
console.log(`Is file: ${isFile ? '📄' : '❌'}`);
|
||||
```
|
||||
|
||||
#### Clean Bucket Contents
|
||||
|
||||
```typescript
|
||||
// Remove all files and directories (⚠️ use with caution!)
|
||||
await bucket.cleanAllContents();
|
||||
console.log('🧹 Bucket cleaned');
|
||||
```
|
||||
|
||||
### ☁️ Cloud Provider Support
|
||||
|
||||
SmartBucket works seamlessly with all major S3-compatible providers:
|
||||
|
||||
| Provider | Status | Notes |
|
||||
|----------|--------|-------|
|
||||
| **AWS S3** | ✅ Full support | Native S3 API |
|
||||
| **MinIO** | ✅ Full support | Self-hosted, perfect for development |
|
||||
| **DigitalOcean Spaces** | ✅ Full support | Cost-effective S3-compatible |
|
||||
| **Backblaze B2** | ✅ Full support | Very affordable storage |
|
||||
| **Wasabi** | ✅ Full support | High-performance hot storage |
|
||||
| **Google Cloud Storage** | ✅ Full support | Via S3-compatible API |
|
||||
| **Cloudflare R2** | ✅ Full support | Zero egress fees |
|
||||
| **Any S3-compatible** | ✅ Full support | Works with any S3-compatible provider |
|
||||
|
||||
The library automatically handles provider quirks and optimizes operations for each platform while maintaining a consistent API.
|
||||
|
||||
**Configuration examples:**
|
||||
|
||||
```typescript
|
||||
// AWS S3
|
||||
const awsStorage = new SmartBucket({
|
||||
accessKey: process.env.AWS_ACCESS_KEY_ID,
|
||||
accessSecret: process.env.AWS_SECRET_ACCESS_KEY,
|
||||
endpoint: 's3.amazonaws.com',
|
||||
region: 'us-east-1',
|
||||
useSsl: true
|
||||
});
|
||||
|
||||
// MinIO (local development)
|
||||
const minioStorage = new SmartBucket({
|
||||
accessKey: 'minioadmin',
|
||||
accessSecret: 'minioadmin',
|
||||
endpoint: 'localhost',
|
||||
port: 9000,
|
||||
useSsl: false
|
||||
});
|
||||
|
||||
// DigitalOcean Spaces
|
||||
const doStorage = new SmartBucket({
|
||||
accessKey: process.env.DO_SPACES_KEY,
|
||||
accessSecret: process.env.DO_SPACES_SECRET,
|
||||
endpoint: 'nyc3.digitaloceanspaces.com',
|
||||
region: 'nyc3',
|
||||
useSsl: true
|
||||
});
|
||||
|
||||
// Backblaze B2
|
||||
const b2Storage = new SmartBucket({
|
||||
accessKey: process.env.B2_KEY_ID,
|
||||
accessSecret: process.env.B2_APPLICATION_KEY,
|
||||
endpoint: 's3.us-west-002.backblazeb2.com',
|
||||
region: 'us-west-002',
|
||||
useSsl: true
|
||||
});
|
||||
```
|
||||
|
||||
### 🔧 Advanced Configuration
|
||||
|
||||
```typescript
|
||||
// Environment-based configuration with @push.rocks/qenv
|
||||
import { Qenv } from '@push.rocks/qenv';
|
||||
|
||||
const qenv = new Qenv('./', './.nogit/');
|
||||
|
||||
const smartBucket = new SmartBucket({
|
||||
accessKey: await qenv.getEnvVarOnDemandStrict('S3_ACCESS_KEY'),
|
||||
accessSecret: await qenv.getEnvVarOnDemandStrict('S3_SECRET'),
|
||||
endpoint: await qenv.getEnvVarOnDemandStrict('S3_ENDPOINT'),
|
||||
port: parseInt(await qenv.getEnvVarOnDemandStrict('S3_PORT')),
|
||||
useSsl: await qenv.getEnvVarOnDemandStrict('S3_USE_SSL') === 'true',
|
||||
region: await qenv.getEnvVarOnDemandStrict('S3_REGION')
|
||||
});
|
||||
```
|
||||
|
||||
### 🧪 Testing
|
||||
|
||||
SmartBucket is thoroughly tested with 82 comprehensive tests covering all features:
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
pnpm test
|
||||
|
||||
# Run specific test file
|
||||
pnpm tstest test/test.listing.node+deno.ts --verbose
|
||||
|
||||
# Run tests with log file
|
||||
pnpm test --logfile
|
||||
```
|
||||
|
||||
### 🛡️ Error Handling Best Practices
|
||||
|
||||
SmartBucket uses a **strict-by-default** approach - methods throw errors instead of returning null:
|
||||
|
||||
```typescript
|
||||
// ✅ Good: Check existence first
|
||||
if (await bucket.fastExists({ path: 'file.txt' })) {
|
||||
const content = await bucket.fastGet({ path: 'file.txt' });
|
||||
process(content);
|
||||
}
|
||||
|
||||
// ✅ Good: Try/catch for expected failures
|
||||
try {
|
||||
const file = await bucket.fastGet({ path: 'might-not-exist.txt' });
|
||||
process(file);
|
||||
} catch (error) {
|
||||
console.log('File not found, using default');
|
||||
useDefault();
|
||||
}
|
||||
|
||||
// ✅ Good: Explicit overwrite control
|
||||
try {
|
||||
await bucket.fastPut({
|
||||
path: 'existing-file.txt',
|
||||
contents: 'new data',
|
||||
overwrite: false // Explicitly fail if exists
|
||||
});
|
||||
} catch (error) {
|
||||
console.log('File already exists');
|
||||
}
|
||||
|
||||
// ❌ Bad: Assuming file exists without checking
|
||||
const content = await bucket.fastGet({ path: 'file.txt' }); // May throw!
|
||||
```
|
||||
|
||||
### 💡 Best Practices
|
||||
|
||||
1. **Always use strict mode** for critical operations to catch errors early
|
||||
2. **Check existence first** with `fastExists()`, `bucketExists()`, etc. before operations
|
||||
3. **Implement proper error handling** for network and permission issues
|
||||
4. **Use streaming** for large files (>100MB) to optimize memory usage
|
||||
5. **Leverage metadata** for organizing and searching files
|
||||
6. **Enable trash mode** for important data to prevent accidental loss
|
||||
7. **Lock files** during critical operations to prevent race conditions
|
||||
8. **Use async generators** for listing large buckets to avoid memory issues
|
||||
9. **Set explicit overwrite flags** to prevent accidental file overwrites
|
||||
10. **Clean up resources** properly when done
|
||||
|
||||
### 📊 Performance Tips
|
||||
|
||||
- **Listing**: Use async generators or cursors for buckets with >10,000 objects
|
||||
- **Uploads**: Use streams for files >100MB
|
||||
- **Downloads**: Use streams for files you'll process incrementally
|
||||
- **Metadata**: Cache metadata when reading frequently
|
||||
- **Locking**: Keep lock durations as short as possible
|
||||
- **Glob patterns**: Be specific to reduce objects scanned
|
||||
|
||||
## License and Legal Information
|
||||
|
||||
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
||||
|
||||
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||
|
||||
### Trademarks
|
||||
|
||||
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
|
||||
|
||||
### Company Information
|
||||
|
||||
Task Venture Capital GmbH
|
||||
Registered at District court Bremen HRB 35230 HB, Germany
|
||||
|
||||
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
||||
|
||||
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
||||
|
||||
0
test/helpers/prepare.ts
Normal file
0
test/helpers/prepare.ts
Normal file
298
test/test.listing.node+deno.ts
Normal file
298
test/test.listing.node+deno.ts
Normal file
@@ -0,0 +1,298 @@
|
||||
// test.listing.node+deno.ts - Tests for memory-efficient listing methods
|
||||
|
||||
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||
import * as smartbucket from '../ts/index.js';
|
||||
|
||||
// Get test configuration
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
const testQenv = new qenv.Qenv('./', './.nogit/');
|
||||
|
||||
// Test bucket reference
|
||||
let testBucket: smartbucket.Bucket;
|
||||
let testSmartbucket: smartbucket.SmartBucket;
|
||||
|
||||
// Setup: Create test bucket and populate with test data
|
||||
tap.test('should create valid smartbucket and bucket', async () => {
|
||||
testSmartbucket = new smartbucket.SmartBucket({
|
||||
accessKey: await testQenv.getEnvVarOnDemand('S3_ACCESSKEY'),
|
||||
accessSecret: await testQenv.getEnvVarOnDemand('S3_SECRETKEY'),
|
||||
endpoint: await testQenv.getEnvVarOnDemand('S3_ENDPOINT'),
|
||||
port: parseInt(await testQenv.getEnvVarOnDemand('S3_PORT')),
|
||||
useSsl: false,
|
||||
});
|
||||
|
||||
testBucket = await smartbucket.Bucket.getBucketByName(
|
||||
testSmartbucket,
|
||||
await testQenv.getEnvVarOnDemand('S3_BUCKET')
|
||||
);
|
||||
expect(testBucket).toBeInstanceOf(smartbucket.Bucket);
|
||||
});
|
||||
|
||||
tap.test('should clean bucket and create test data for listing tests', async () => {
|
||||
// Clean bucket first
|
||||
await testBucket.cleanAllContents();
|
||||
|
||||
// Create test structure:
|
||||
// npm/packages/foo/index.json
|
||||
// npm/packages/foo/1.0.0.tgz
|
||||
// npm/packages/bar/index.json
|
||||
// npm/packages/bar/2.0.0.tgz
|
||||
// oci/blobs/sha256-abc.tar
|
||||
// oci/blobs/sha256-def.tar
|
||||
// oci/manifests/latest.json
|
||||
// docs/readme.md
|
||||
// docs/api.md
|
||||
|
||||
const testFiles = [
|
||||
'npm/packages/foo/index.json',
|
||||
'npm/packages/foo/1.0.0.tgz',
|
||||
'npm/packages/bar/index.json',
|
||||
'npm/packages/bar/2.0.0.tgz',
|
||||
'oci/blobs/sha256-abc.tar',
|
||||
'oci/blobs/sha256-def.tar',
|
||||
'oci/manifests/latest.json',
|
||||
'docs/readme.md',
|
||||
'docs/api.md',
|
||||
];
|
||||
|
||||
for (const filePath of testFiles) {
|
||||
await testBucket.fastPut({
|
||||
path: filePath,
|
||||
contents: `test content for ${filePath}`,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
// ==========================
|
||||
// Async Generator Tests
|
||||
// ==========================
|
||||
|
||||
tap.test('listAllObjects should iterate all objects with prefix', async () => {
|
||||
const keys: string[] = [];
|
||||
for await (const key of testBucket.listAllObjects('npm/')) {
|
||||
keys.push(key);
|
||||
}
|
||||
|
||||
expect(keys.length).toEqual(4);
|
||||
expect(keys).toContain('npm/packages/foo/index.json');
|
||||
expect(keys).toContain('npm/packages/bar/2.0.0.tgz');
|
||||
});
|
||||
|
||||
tap.test('listAllObjects should support early termination', async () => {
|
||||
let count = 0;
|
||||
for await (const key of testBucket.listAllObjects('')) {
|
||||
count++;
|
||||
if (count >= 3) break; // Early exit
|
||||
}
|
||||
|
||||
expect(count).toEqual(3);
|
||||
});
|
||||
|
||||
tap.test('listAllObjects without prefix should list all objects', async () => {
|
||||
const keys: string[] = [];
|
||||
for await (const key of testBucket.listAllObjects()) {
|
||||
keys.push(key);
|
||||
}
|
||||
|
||||
expect(keys.length).toBeGreaterThanOrEqual(9);
|
||||
});
|
||||
|
||||
// ==========================
|
||||
// Observable Tests
|
||||
// ==========================
|
||||
|
||||
tap.test('listAllObjectsObservable should emit all objects', async () => {
|
||||
const keys: string[] = [];
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
testBucket.listAllObjectsObservable('oci/')
|
||||
.subscribe({
|
||||
next: (key) => keys.push(key),
|
||||
error: (err) => reject(err),
|
||||
complete: () => resolve(),
|
||||
});
|
||||
});
|
||||
|
||||
expect(keys.length).toEqual(3);
|
||||
expect(keys).toContain('oci/blobs/sha256-abc.tar');
|
||||
expect(keys).toContain('oci/manifests/latest.json');
|
||||
});
|
||||
|
||||
tap.test('listAllObjectsObservable should support RxJS operators', async () => {
|
||||
const jsonFiles: string[] = [];
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
testBucket.listAllObjectsObservable('npm/')
|
||||
.subscribe({
|
||||
next: (key: string) => {
|
||||
if (key.endsWith('.json')) {
|
||||
jsonFiles.push(key);
|
||||
}
|
||||
},
|
||||
error: (err: any) => reject(err),
|
||||
complete: () => resolve(),
|
||||
});
|
||||
});
|
||||
|
||||
expect(jsonFiles.length).toEqual(2);
|
||||
expect(jsonFiles.every((k) => k.endsWith('.json'))).toBeTrue();
|
||||
});
|
||||
|
||||
// ==========================
|
||||
// Cursor Tests
|
||||
// ==========================
|
||||
|
||||
tap.test('createCursor should allow manual pagination', async () => {
|
||||
const cursor = testBucket.createCursor('npm/', { pageSize: 2 });
|
||||
|
||||
// First page
|
||||
const page1 = await cursor.next();
|
||||
expect(page1.keys.length).toEqual(2);
|
||||
expect(page1.done).toBeFalse();
|
||||
|
||||
// Second page
|
||||
const page2 = await cursor.next();
|
||||
expect(page2.keys.length).toEqual(2);
|
||||
expect(page2.done).toBeTrue();
|
||||
});
|
||||
|
||||
tap.test('cursor.hasMore() should accurately track state', async () => {
|
||||
const cursor = testBucket.createCursor('docs/', { pageSize: 10 });
|
||||
|
||||
expect(cursor.hasMore()).toBeTrue();
|
||||
|
||||
await cursor.next(); // Should get all docs files
|
||||
|
||||
expect(cursor.hasMore()).toBeFalse();
|
||||
});
|
||||
|
||||
tap.test('cursor.reset() should allow re-iteration', async () => {
|
||||
const cursor = testBucket.createCursor('docs/');
|
||||
|
||||
const firstRun = await cursor.next();
|
||||
expect(firstRun.keys.length).toBeGreaterThan(0);
|
||||
|
||||
cursor.reset();
|
||||
expect(cursor.hasMore()).toBeTrue();
|
||||
|
||||
const secondRun = await cursor.next();
|
||||
expect(secondRun.keys).toEqual(firstRun.keys);
|
||||
});
|
||||
|
||||
tap.test('cursor should support save/restore with token', async () => {
|
||||
const cursor1 = testBucket.createCursor('npm/', { pageSize: 2 });
|
||||
|
||||
await cursor1.next(); // Advance cursor
|
||||
const token = cursor1.getToken();
|
||||
expect(token).toBeDefined();
|
||||
|
||||
// Create new cursor and restore state
|
||||
const cursor2 = testBucket.createCursor('npm/', { pageSize: 2 });
|
||||
cursor2.setToken(token);
|
||||
|
||||
const page = await cursor2.next();
|
||||
expect(page.keys.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
// ==========================
|
||||
// findByGlob Tests
|
||||
// ==========================
|
||||
|
||||
tap.test('findByGlob should match simple patterns', async () => {
|
||||
const matches: string[] = [];
|
||||
for await (const key of testBucket.findByGlob('**/*.json')) {
|
||||
matches.push(key);
|
||||
}
|
||||
|
||||
expect(matches.length).toEqual(3); // foo/index.json, bar/index.json, latest.json
|
||||
expect(matches.every((k) => k.endsWith('.json'))).toBeTrue();
|
||||
});
|
||||
|
||||
tap.test('findByGlob should match specific path patterns', async () => {
|
||||
const matches: string[] = [];
|
||||
for await (const key of testBucket.findByGlob('npm/packages/*/index.json')) {
|
||||
matches.push(key);
|
||||
}
|
||||
|
||||
expect(matches.length).toEqual(2);
|
||||
expect(matches).toContain('npm/packages/foo/index.json');
|
||||
expect(matches).toContain('npm/packages/bar/index.json');
|
||||
});
|
||||
|
||||
tap.test('findByGlob should match wildcard patterns', async () => {
|
||||
const matches: string[] = [];
|
||||
for await (const key of testBucket.findByGlob('oci/blobs/*')) {
|
||||
matches.push(key);
|
||||
}
|
||||
|
||||
expect(matches.length).toEqual(2);
|
||||
expect(matches.every((k) => k.startsWith('oci/blobs/'))).toBeTrue();
|
||||
});
|
||||
|
||||
// ==========================
|
||||
// listAllObjectsArray Tests
|
||||
// ==========================
|
||||
|
||||
tap.test('listAllObjectsArray should collect all keys into array', async () => {
|
||||
const keys = await testBucket.listAllObjectsArray('docs/');
|
||||
|
||||
expect(Array.isArray(keys)).toBeTrue();
|
||||
expect(keys.length).toEqual(2);
|
||||
expect(keys).toContain('docs/readme.md');
|
||||
expect(keys).toContain('docs/api.md');
|
||||
});
|
||||
|
||||
tap.test('listAllObjectsArray without prefix should return all objects', async () => {
|
||||
const keys = await testBucket.listAllObjectsArray();
|
||||
|
||||
expect(keys.length).toBeGreaterThanOrEqual(9);
|
||||
});
|
||||
|
||||
// ==========================
|
||||
// Performance/Edge Case Tests
|
||||
// ==========================
|
||||
|
||||
tap.test('should handle empty prefix results gracefully', async () => {
|
||||
const keys: string[] = [];
|
||||
for await (const key of testBucket.listAllObjects('nonexistent/')) {
|
||||
keys.push(key);
|
||||
}
|
||||
|
||||
expect(keys.length).toEqual(0);
|
||||
});
|
||||
|
||||
tap.test('cursor should handle empty results', async () => {
|
||||
const cursor = testBucket.createCursor('nonexistent/');
|
||||
const result = await cursor.next();
|
||||
|
||||
expect(result.keys.length).toEqual(0);
|
||||
expect(result.done).toBeTrue();
|
||||
expect(cursor.hasMore()).toBeFalse();
|
||||
});
|
||||
|
||||
tap.test('observable should complete immediately on empty results', async () => {
|
||||
let completed = false;
|
||||
let count = 0;
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
testBucket.listAllObjectsObservable('nonexistent/')
|
||||
.subscribe({
|
||||
next: () => count++,
|
||||
error: (err) => reject(err),
|
||||
complete: () => {
|
||||
completed = true;
|
||||
resolve();
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
expect(count).toEqual(0);
|
||||
expect(completed).toBeTrue();
|
||||
});
|
||||
|
||||
// Cleanup
|
||||
tap.test('should clean up test data', async () => {
|
||||
await testBucket.cleanAllContents();
|
||||
});
|
||||
|
||||
export default tap.start();
|
||||
76
test/test.local.node+deno.ts
Normal file
76
test/test.local.node+deno.ts
Normal file
@@ -0,0 +1,76 @@
|
||||
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||
|
||||
import * as plugins from '../ts/plugins.js';
|
||||
import * as smartbucket from '../ts/index.js';
|
||||
|
||||
class FakeS3Client {
|
||||
private callIndex = 0;
|
||||
|
||||
constructor(private readonly pages: Array<Partial<plugins.s3.ListObjectsV2Output>>) {}
|
||||
|
||||
public async send(_command: any) {
|
||||
const page = this.pages[this.callIndex] || { Contents: [], CommonPrefixes: [], IsTruncated: false };
|
||||
this.callIndex += 1;
|
||||
return page;
|
||||
}
|
||||
}
|
||||
|
||||
tap.test('MetaData.hasMetaData should return false when metadata file does not exist', async () => {
|
||||
const fakeFile = {
|
||||
name: 'file.txt',
|
||||
parentDirectoryRef: {
|
||||
async getFile() {
|
||||
throw new Error(`File not found at path 'file.txt.metadata'`);
|
||||
},
|
||||
},
|
||||
} as unknown as smartbucket.File;
|
||||
|
||||
const hasMetaData = await smartbucket.MetaData.hasMetaData({ file: fakeFile });
|
||||
expect(hasMetaData).toBeFalse();
|
||||
});
|
||||
|
||||
tap.test('getSubDirectoryByName should create correct parent chain for new nested directories', async () => {
|
||||
const fakeSmartbucket = { s3Client: new FakeS3Client([{ Contents: [], CommonPrefixes: [] }]) } as unknown as smartbucket.SmartBucket;
|
||||
const bucket = new smartbucket.Bucket(fakeSmartbucket, 'test-bucket');
|
||||
const baseDirectory = new smartbucket.Directory(bucket, null as any, '');
|
||||
|
||||
const nestedDirectory = await baseDirectory.getSubDirectoryByName('level1/level2', { getEmptyDirectory: true });
|
||||
|
||||
expect(nestedDirectory.name).toEqual('level2');
|
||||
expect(nestedDirectory.parentDirectoryRef.name).toEqual('level1');
|
||||
expect(nestedDirectory.getBasePath()).toEqual('level1/level2/');
|
||||
});
|
||||
|
||||
tap.test('listFiles should aggregate results across paginated ListObjectsV2 responses', async () => {
|
||||
const firstPage = {
|
||||
Contents: Array.from({ length: 1000 }, (_, index) => ({ Key: `file-${index}` })),
|
||||
IsTruncated: true,
|
||||
NextContinuationToken: 'token-1',
|
||||
};
|
||||
const secondPage = {
|
||||
Contents: Array.from({ length: 200 }, (_, index) => ({ Key: `file-${1000 + index}` })),
|
||||
IsTruncated: false,
|
||||
};
|
||||
const fakeSmartbucket = { s3Client: new FakeS3Client([firstPage, secondPage]) } as unknown as smartbucket.SmartBucket;
|
||||
const bucket = new smartbucket.Bucket(fakeSmartbucket, 'test-bucket');
|
||||
const baseDirectory = new smartbucket.Directory(bucket, null as any, '');
|
||||
|
||||
const files = await baseDirectory.listFiles();
|
||||
expect(files.length).toEqual(1200);
|
||||
});
|
||||
|
||||
tap.test('listDirectories should aggregate CommonPrefixes across pagination', async () => {
|
||||
const fakeSmartbucket = {
|
||||
s3Client: new FakeS3Client([
|
||||
{ CommonPrefixes: [{ Prefix: 'dirA/' }], IsTruncated: true, NextContinuationToken: 'token-1' },
|
||||
{ CommonPrefixes: [{ Prefix: 'dirB/' }], IsTruncated: false },
|
||||
]),
|
||||
} as unknown as smartbucket.SmartBucket;
|
||||
const bucket = new smartbucket.Bucket(fakeSmartbucket, 'test-bucket');
|
||||
const baseDirectory = new smartbucket.Directory(bucket, null as any, '');
|
||||
|
||||
const directories = await baseDirectory.listDirectories();
|
||||
expect(directories.map((d) => d.name)).toEqual(['dirA', 'dirB']);
|
||||
});
|
||||
|
||||
export default tap.start();
|
||||
7
test/test.metadata.node+deno.ts
Normal file
7
test/test.metadata.node+deno.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||
|
||||
tap.test('test metadata functionality', async () => {
|
||||
|
||||
})
|
||||
|
||||
export default tap.start();
|
||||
132
test/test.node+deno.ts
Normal file
132
test/test.node+deno.ts
Normal file
@@ -0,0 +1,132 @@
|
||||
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||
import { Qenv } from '@push.rocks/qenv';
|
||||
|
||||
import * as smartbucket from '../ts/index.js';
|
||||
|
||||
const testQenv = new Qenv('./', './.nogit/');
|
||||
|
||||
let testSmartbucket: smartbucket.SmartBucket;
|
||||
let myBucket: smartbucket.Bucket;
|
||||
let baseDirectory: smartbucket.Directory;
|
||||
|
||||
tap.test('should create a valid smartbucket', async () => {
|
||||
testSmartbucket = new smartbucket.SmartBucket({
|
||||
accessKey: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSKEY'),
|
||||
accessSecret: await testQenv.getEnvVarOnDemandStrict('S3_SECRETKEY'),
|
||||
endpoint: await testQenv.getEnvVarOnDemandStrict('S3_ENDPOINT'),
|
||||
port: parseInt(await testQenv.getEnvVarOnDemandStrict('S3_PORT')),
|
||||
useSsl: false,
|
||||
});
|
||||
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
|
||||
const bucketName = await testQenv.getEnvVarOnDemandStrict('S3_BUCKET');
|
||||
myBucket = await testSmartbucket.getBucketByName(bucketName);
|
||||
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
|
||||
expect(myBucket.name).toEqual(bucketName);
|
||||
});
|
||||
|
||||
tap.test('should clean all contents', async () => {
|
||||
await myBucket.cleanAllContents();
|
||||
expect(await myBucket.fastExists({ path: 'hithere/socool.txt' })).toBeFalse();
|
||||
expect(await myBucket.fastExists({ path: 'trashtest/trashme.txt' })).toBeFalse();
|
||||
});
|
||||
|
||||
tap.skip.test('should create testbucket', async () => {
|
||||
// await testSmartbucket.createBucket('testzone2');
|
||||
});
|
||||
|
||||
tap.skip.test('should remove testbucket', async () => {
|
||||
// await testSmartbucket.removeBucket('testzone2');
|
||||
});
|
||||
|
||||
// Fast operations
|
||||
tap.test('should store data in bucket fast', async () => {
|
||||
await myBucket.fastPut({
|
||||
path: 'hithere/socool.txt',
|
||||
contents: 'hi there!',
|
||||
});
|
||||
});
|
||||
|
||||
tap.test('should get data in bucket', async () => {
|
||||
const fileString = await myBucket.fastGet({
|
||||
path: 'hithere/socool.txt',
|
||||
});
|
||||
const fileStringStream = await myBucket.fastGetStream(
|
||||
{
|
||||
path: 'hithere/socool.txt',
|
||||
},
|
||||
'nodestream'
|
||||
);
|
||||
console.log(fileString);
|
||||
});
|
||||
|
||||
tap.test('should delete data in bucket', async () => {
|
||||
await myBucket.fastRemove({
|
||||
path: 'hithere/socool.txt',
|
||||
});
|
||||
});
|
||||
|
||||
// fs operations
|
||||
|
||||
tap.test('prepare for directory style tests', async () => {
|
||||
await myBucket.fastPut({
|
||||
path: 'dir1/file1.txt',
|
||||
contents: 'dir1/file1.txt content',
|
||||
});
|
||||
await myBucket.fastPut({
|
||||
path: 'dir1/file2.txt',
|
||||
contents: 'dir1/file2.txt content',
|
||||
});
|
||||
await myBucket.fastPut({
|
||||
path: 'dir2/file1.txt',
|
||||
contents: 'dir2/file1.txt content',
|
||||
});
|
||||
await myBucket.fastPut({
|
||||
path: 'dir3/file1.txt',
|
||||
contents: 'dir3/file1.txt content',
|
||||
});
|
||||
await myBucket.fastPut({
|
||||
path: 'dir3/dir4/file1.txt',
|
||||
contents: 'dir3/dir4/file1.txt content',
|
||||
});
|
||||
await myBucket.fastPut({
|
||||
path: '/file1.txt',
|
||||
contents: 'file1 content',
|
||||
});
|
||||
});
|
||||
|
||||
tap.test('should get base directory', async () => {
|
||||
baseDirectory = await myBucket.getBaseDirectory();
|
||||
const directories = await baseDirectory.listDirectories();
|
||||
console.log('Found the following directories:');
|
||||
console.log(directories);
|
||||
expect(directories.length).toEqual(3);
|
||||
const files = await baseDirectory.listFiles();
|
||||
console.log('Found the following files:');
|
||||
console.log(files);
|
||||
expect(files.length).toEqual(1);
|
||||
});
|
||||
|
||||
tap.test('should correctly build paths for sub directories', async () => {
|
||||
const dir4 = await baseDirectory.getSubDirectoryByName('dir3/dir4');
|
||||
expect(dir4).toBeInstanceOf(smartbucket.Directory);
|
||||
const dir4BasePath = dir4?.getBasePath();
|
||||
console.log(dir4BasePath);
|
||||
expect(dir4BasePath).toEqual('dir3/dir4/');
|
||||
});
|
||||
|
||||
tap.test('clean up directory style tests', async () => {
|
||||
await myBucket.fastRemove({
|
||||
path: 'dir1/file1.txt',
|
||||
});
|
||||
await myBucket.fastRemove({
|
||||
path: 'dir1/file2.txt',
|
||||
});
|
||||
await myBucket.fastRemove({
|
||||
path: 'dir2/file1.txt',
|
||||
});
|
||||
await myBucket.fastRemove({ path: 'dir3/file1.txt' });
|
||||
await myBucket.fastRemove({ path: 'dir3/dir4/file1.txt' });
|
||||
await myBucket.fastRemove({ path: 'file1.txt' });
|
||||
});
|
||||
|
||||
export default tap.start();
|
||||
94
test/test.trash.node+deno.ts
Normal file
94
test/test.trash.node+deno.ts
Normal file
@@ -0,0 +1,94 @@
|
||||
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||
import { jestExpect } from '@push.rocks/tapbundle/node';
|
||||
import { Qenv } from '@push.rocks/qenv';
|
||||
|
||||
import * as smartbucket from '../ts/index.js';
|
||||
|
||||
const testQenv = new Qenv('./', './.nogit/');
|
||||
|
||||
let testSmartbucket: smartbucket.SmartBucket;
|
||||
let myBucket: smartbucket.Bucket;
|
||||
let baseDirectory: smartbucket.Directory;
|
||||
|
||||
tap.test('should create a valid smartbucket', async () => {
|
||||
testSmartbucket = new smartbucket.SmartBucket({
|
||||
accessKey: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSKEY'),
|
||||
accessSecret: await testQenv.getEnvVarOnDemandStrict('S3_SECRETKEY'),
|
||||
endpoint: await testQenv.getEnvVarOnDemandStrict('S3_ENDPOINT'),
|
||||
port: parseInt(await testQenv.getEnvVarOnDemandStrict('S3_PORT')),
|
||||
useSsl: false,
|
||||
});
|
||||
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
|
||||
const bucketName = await testQenv.getEnvVarOnDemandStrict('S3_BUCKET');
|
||||
myBucket = await testSmartbucket.getBucketByName(bucketName);
|
||||
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
|
||||
});
|
||||
|
||||
tap.test('should clean all contents', async () => {
|
||||
await myBucket.cleanAllContents();
|
||||
expect(await myBucket.fastExists({ path: 'hithere/socool.txt' })).toBeFalse();
|
||||
expect(await myBucket.fastExists({ path: 'trashtest/trashme.txt' })).toBeFalse();
|
||||
});
|
||||
|
||||
tap.test('should delete a file into the normally', async () => {
|
||||
const path = 'trashtest/trashme.txt';
|
||||
const file = await myBucket.fastPut({
|
||||
path,
|
||||
contents: 'I\'m in the trash test content!',
|
||||
});
|
||||
const fileMetadata = await (await file.getMetaData()).metadataFile.getContents();
|
||||
console.log(fileMetadata.toString());
|
||||
expect(await file.getMetaData().then((meta) => meta.metadataFile.getJsonData())).toEqual({});
|
||||
await file.delete({ mode: 'permanent' });
|
||||
expect((await (await myBucket.getBaseDirectory()).listFiles()).length).toEqual(0);
|
||||
expect((await (await myBucket.getBaseDirectory()).listDirectories()).length).toEqual(0);
|
||||
});
|
||||
|
||||
tap.test('should put a file into the trash', async () => {
|
||||
const path = 'trashtest/trashme.txt';
|
||||
const file = await myBucket.fastPut({
|
||||
path,
|
||||
contents: 'I\'m in the trash test content!',
|
||||
});
|
||||
const fileMetadata = await (await file.getMetaData()).metadataFile.getContents();
|
||||
console.log(fileMetadata.toString());
|
||||
expect(await file.getMetaData().then((meta) => meta.metadataFile.getJsonData())).toEqual({});
|
||||
await file.delete({ mode: 'trash' });
|
||||
|
||||
const getTrashContents = async () => {
|
||||
const trash = await myBucket.getTrash();
|
||||
const trashDir = await trash.getTrashDir();
|
||||
return await trashDir.listFiles();
|
||||
}
|
||||
|
||||
const trashedFiles = await getTrashContents();
|
||||
expect(trashedFiles.length).toEqual(2);
|
||||
|
||||
const trashedMetaFile = trashedFiles.find(file => file.name.endsWith('.metadata'));
|
||||
expect(trashedMetaFile).toBeDefined();
|
||||
expect(trashedMetaFile).toBeInstanceOf(smartbucket.File);
|
||||
|
||||
jestExpect(await trashedMetaFile!.getJsonData()).toEqual({
|
||||
custom_recycle: {
|
||||
deletedAt: jestExpect.any(Number),
|
||||
originalPath: "trashtest/trashme.txt",
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
tap.test('should restore a file from trash', async () => {
|
||||
const baseDirectory = await myBucket.getBaseDirectory();
|
||||
const file = await baseDirectory.getFile({
|
||||
path: 'trashtest/trashme.txt',
|
||||
getFromTrash: true
|
||||
});
|
||||
const trashFileMeta = await file.getMetaData();
|
||||
const data = await trashFileMeta.getCustomMetaData({
|
||||
key: 'recycle'
|
||||
});
|
||||
expect(file).toBeInstanceOf(smartbucket.File);
|
||||
await file.restore();
|
||||
});
|
||||
|
||||
|
||||
export default tap.start();
|
||||
78
test/test.ts
78
test/test.ts
@@ -1,78 +0,0 @@
|
||||
import { expect, tap } from '@pushrocks/tapbundle';
|
||||
import { Qenv } from '@pushrocks/qenv';
|
||||
|
||||
import * as smartbucket from '../ts/index';
|
||||
|
||||
const testQenv = new Qenv('./', './.nogit/');
|
||||
|
||||
let testSmartbucket: smartbucket.SmartBucket;
|
||||
let myBucket: smartbucket.Bucket;
|
||||
let baseDirectory: smartbucket.Directory;
|
||||
|
||||
tap.test('should create a valid smartbucket', async () => {
|
||||
testSmartbucket = new smartbucket.SmartBucket({
|
||||
accessKey: testQenv.getEnvVarOnDemand('S3_KEY'),
|
||||
accessSecret: testQenv.getEnvVarOnDemand('S3_SECRET'),
|
||||
endpoint: 'fra1.digitaloceanspaces.com'
|
||||
});
|
||||
});
|
||||
|
||||
tap.skip.test('should create testbucket', async () => {
|
||||
await testSmartbucket.createBucket('testzone');
|
||||
});
|
||||
|
||||
tap.skip.test('should remove testbucket', async () => {
|
||||
await testSmartbucket.removeBucket('testzone');
|
||||
});
|
||||
|
||||
tap.test('should get a bucket', async () => {
|
||||
myBucket = await testSmartbucket.getBucketByName('testzone');
|
||||
expect(myBucket).to.be.instanceOf(smartbucket.Bucket);
|
||||
expect(myBucket.name).to.equal('testzone');
|
||||
});
|
||||
|
||||
// Fast operations
|
||||
tap.test('should store data in bucket fast', async () => {
|
||||
await myBucket.fastStore('hithere/socool.txt', 'hi there!');
|
||||
});
|
||||
|
||||
tap.test('should get data in bucket', async () => {
|
||||
const fileString = await myBucket.fastGet('hithere/socool.txt');
|
||||
console.log(fileString);
|
||||
});
|
||||
|
||||
tap.test('should delete data in bucket', async () => {
|
||||
await myBucket.fastRemove('hithere/socool.txt');
|
||||
});
|
||||
|
||||
// fs operations
|
||||
|
||||
tap.test('prepare for directory style tests', async () => {
|
||||
await myBucket.fastStore('dir1/file1.txt', 'dir1/file1.txt content');
|
||||
await myBucket.fastStore('dir1/file2.txt', 'dir1/file2.txt content');
|
||||
await myBucket.fastStore('dir2/file1.txt', 'dir2/file1.txt content');
|
||||
await myBucket.fastStore('dir3/file1.txt', 'dir3/file1.txt content');
|
||||
await myBucket.fastStore('file1.txt', 'file1 content');
|
||||
});
|
||||
|
||||
tap.test('should get base directory', async () => {
|
||||
baseDirectory = await myBucket.getBaseDirectory();
|
||||
const directories = await baseDirectory.listDirectories();
|
||||
console.log('Found the following directories:');
|
||||
console.log(directories);
|
||||
expect(directories.length).to.equal(3);
|
||||
const files = await baseDirectory.listFiles();
|
||||
console.log('Found the following files:');
|
||||
console.log(files);
|
||||
expect(files.length).to.equal(1);
|
||||
});
|
||||
|
||||
tap.test('clean up directory style tests', async () => {
|
||||
await myBucket.fastRemove('dir1/file1.txt');
|
||||
await myBucket.fastRemove('dir1/file2.txt');
|
||||
await myBucket.fastRemove('dir2/file1.txt');
|
||||
await myBucket.fastRemove('dir3/file1.txt');
|
||||
await myBucket.fastRemove('file1.txt');
|
||||
});
|
||||
|
||||
tap.start();
|
||||
8
ts/00_commitinfo_data.ts
Normal file
8
ts/00_commitinfo_data.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
/**
|
||||
* autocreated commitinfo by @push.rocks/commitinfo
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartbucket',
|
||||
version: '4.3.0',
|
||||
description: 'A TypeScript library providing a cloud-agnostic interface for managing object storage with functionalities like bucket management, file and directory operations, and advanced features such as metadata handling and file locking.'
|
||||
}
|
||||
659
ts/classes.bucket.ts
Normal file
659
ts/classes.bucket.ts
Normal file
@@ -0,0 +1,659 @@
|
||||
// classes.bucket.ts
|
||||
|
||||
import * as plugins from './plugins.js';
|
||||
import * as helpers from './helpers.js';
|
||||
import * as interfaces from './interfaces.js';
|
||||
import { SmartBucket } from './classes.smartbucket.js';
|
||||
import { Directory } from './classes.directory.js';
|
||||
import { File } from './classes.file.js';
|
||||
import { Trash } from './classes.trash.js';
|
||||
import { ListCursor, type IListCursorOptions } from './classes.listcursor.js';
|
||||
|
||||
/**
|
||||
* The bucket class exposes the basic functionality of a bucket.
|
||||
* The functions of the bucket alone are enough to
|
||||
* operate in S3 basic fashion on blobs of data.
|
||||
*/
|
||||
export class Bucket {
|
||||
public static async getBucketByName(smartbucketRef: SmartBucket, bucketNameArg: string): Promise<Bucket> {
|
||||
const command = new plugins.s3.ListBucketsCommand({});
|
||||
const buckets = await smartbucketRef.s3Client.send(command);
|
||||
const foundBucket = buckets.Buckets!.find((bucket) => bucket.Name === bucketNameArg);
|
||||
|
||||
if (foundBucket) {
|
||||
console.log(`bucket with name ${bucketNameArg} exists.`);
|
||||
console.log(`Taking this as base for new Bucket instance`);
|
||||
return new this(smartbucketRef, bucketNameArg);
|
||||
} else {
|
||||
throw new Error(`Bucket '${bucketNameArg}' not found.`);
|
||||
}
|
||||
}
|
||||
|
||||
public static async createBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
|
||||
const command = new plugins.s3.CreateBucketCommand({ Bucket: bucketName });
|
||||
await smartbucketRef.s3Client.send(command).catch((e) => console.log(e));
|
||||
return new Bucket(smartbucketRef, bucketName);
|
||||
}
|
||||
|
||||
public static async removeBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
|
||||
const command = new plugins.s3.DeleteBucketCommand({ Bucket: bucketName });
|
||||
await smartbucketRef.s3Client.send(command).catch((e) => console.log(e));
|
||||
}
|
||||
|
||||
public smartbucketRef: SmartBucket;
|
||||
public name: string;
|
||||
|
||||
constructor(smartbucketRef: SmartBucket, bucketName: string) {
|
||||
this.smartbucketRef = smartbucketRef;
|
||||
this.name = bucketName;
|
||||
}
|
||||
|
||||
/**
|
||||
* gets the base directory of the bucket
|
||||
*/
|
||||
public async getBaseDirectory(): Promise<Directory> {
|
||||
return new Directory(this, null!, '');
|
||||
}
|
||||
|
||||
/**
|
||||
* gets the trash directory
|
||||
*/
|
||||
public async getTrash(): Promise<Trash> {
|
||||
const trash = new Trash(this);
|
||||
return trash;
|
||||
}
|
||||
|
||||
public async getDirectoryFromPath(
|
||||
pathDescriptorArg: interfaces.IPathDecriptor
|
||||
): Promise<Directory> {
|
||||
if (!pathDescriptorArg.path && !pathDescriptorArg.directory) {
|
||||
return this.getBaseDirectory();
|
||||
}
|
||||
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
|
||||
const baseDirectory = await this.getBaseDirectory();
|
||||
return await baseDirectory.getSubDirectoryByName(checkPath, {
|
||||
getEmptyDirectory: true,
|
||||
});
|
||||
}
|
||||
|
||||
// ===============
|
||||
// Fast Operations
|
||||
// ===============
|
||||
|
||||
/**
|
||||
* store file
|
||||
*/
|
||||
public async fastPut(
|
||||
optionsArg: interfaces.IPathDecriptor & {
|
||||
contents: string | Buffer;
|
||||
overwrite?: boolean;
|
||||
}
|
||||
): Promise<File> {
|
||||
try {
|
||||
const reducedPath = await helpers.reducePathDescriptorToPath(optionsArg);
|
||||
const exists = await this.fastExists({ path: reducedPath });
|
||||
|
||||
if (exists && !optionsArg.overwrite) {
|
||||
throw new Error(
|
||||
`Object already exists at path '${reducedPath}' in bucket '${this.name}'. ` +
|
||||
`Set overwrite:true to replace it.`
|
||||
);
|
||||
} else if (exists && optionsArg.overwrite) {
|
||||
console.log(
|
||||
`Overwriting existing object at path '${reducedPath}' in bucket '${this.name}'.`
|
||||
);
|
||||
} else {
|
||||
console.log(`Creating new object at path '${reducedPath}' in bucket '${this.name}'.`);
|
||||
}
|
||||
|
||||
const command = new plugins.s3.PutObjectCommand({
|
||||
Bucket: this.name,
|
||||
Key: reducedPath,
|
||||
Body: optionsArg.contents,
|
||||
});
|
||||
await this.smartbucketRef.s3Client.send(command);
|
||||
|
||||
console.log(`Object '${reducedPath}' has been successfully stored in bucket '${this.name}'.`);
|
||||
const parsedPath = plugins.path.parse(reducedPath);
|
||||
return new File({
|
||||
directoryRefArg: await this.getDirectoryFromPath({
|
||||
path: parsedPath.dir,
|
||||
}),
|
||||
fileName: parsedPath.base,
|
||||
});
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Error storing object at path '${optionsArg.path}' in bucket '${this.name}':`,
|
||||
error
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* get file
|
||||
*/
|
||||
public async fastGet(optionsArg: { path: string }): Promise<Buffer> {
|
||||
const done = plugins.smartpromise.defer();
|
||||
let completeFile: Buffer;
|
||||
const replaySubject = await this.fastGetReplaySubject(optionsArg);
|
||||
const subscription = replaySubject.subscribe({
|
||||
next: (chunk) => {
|
||||
if (completeFile) {
|
||||
completeFile = Buffer.concat([completeFile, chunk]);
|
||||
} else {
|
||||
completeFile = chunk;
|
||||
}
|
||||
},
|
||||
complete: () => {
|
||||
done.resolve();
|
||||
subscription.unsubscribe();
|
||||
},
|
||||
error: (err) => {
|
||||
console.log(err);
|
||||
},
|
||||
});
|
||||
await done.promise;
|
||||
return completeFile!;
|
||||
}
|
||||
|
||||
/**
|
||||
* good when time to first byte is important
|
||||
* and multiple subscribers are expected
|
||||
* @param optionsArg
|
||||
* @returns
|
||||
*/
|
||||
public async fastGetReplaySubject(optionsArg: {
|
||||
path: string;
|
||||
}): Promise<plugins.smartrx.rxjs.ReplaySubject<Buffer>> {
|
||||
const command = new plugins.s3.GetObjectCommand({
|
||||
Bucket: this.name,
|
||||
Key: optionsArg.path,
|
||||
});
|
||||
const response = await this.smartbucketRef.s3Client.send(command);
|
||||
const replaySubject = new plugins.smartrx.rxjs.ReplaySubject<Buffer>();
|
||||
|
||||
// Convert the stream to a format that supports piping
|
||||
const stream = response.Body as any; // SdkStreamMixin includes readable stream
|
||||
if (typeof stream.pipe === 'function') {
|
||||
const duplexStream = new plugins.smartstream.SmartDuplex<Buffer, void>({
|
||||
writeFunction: async (chunk) => {
|
||||
replaySubject.next(chunk);
|
||||
return;
|
||||
},
|
||||
finalFunction: async (cb) => {
|
||||
replaySubject.complete();
|
||||
return;
|
||||
},
|
||||
});
|
||||
|
||||
stream.pipe(duplexStream);
|
||||
}
|
||||
|
||||
return replaySubject;
|
||||
}
|
||||
|
||||
public fastGetStream(
|
||||
optionsArg: {
|
||||
path: string;
|
||||
},
|
||||
typeArg: 'webstream'
|
||||
): Promise<ReadableStream>;
|
||||
public async fastGetStream(
|
||||
optionsArg: {
|
||||
path: string;
|
||||
},
|
||||
typeArg: 'nodestream'
|
||||
): Promise<plugins.stream.Readable>;
|
||||
|
||||
public async fastGetStream(
|
||||
optionsArg: { path: string },
|
||||
typeArg: 'webstream' | 'nodestream' = 'nodestream'
|
||||
): Promise<ReadableStream | plugins.stream.Readable> {
|
||||
const command = new plugins.s3.GetObjectCommand({
|
||||
Bucket: this.name,
|
||||
Key: optionsArg.path,
|
||||
});
|
||||
const response = await this.smartbucketRef.s3Client.send(command);
|
||||
const stream = response.Body as any; // SdkStreamMixin includes readable stream
|
||||
|
||||
const duplexStream = new plugins.smartstream.SmartDuplex<Buffer, Buffer>({
|
||||
writeFunction: async (chunk) => {
|
||||
return chunk;
|
||||
},
|
||||
finalFunction: async (cb) => {
|
||||
return null!;
|
||||
},
|
||||
});
|
||||
|
||||
if (typeof stream.pipe === 'function') {
|
||||
stream.pipe(duplexStream);
|
||||
}
|
||||
|
||||
if (typeArg === 'nodestream') {
|
||||
return duplexStream;
|
||||
}
|
||||
if (typeArg === 'webstream') {
|
||||
return (await duplexStream.getWebStreams()).readable;
|
||||
}
|
||||
throw new Error('unknown typeArg');
|
||||
}
|
||||
|
||||
/**
|
||||
* store file as stream
|
||||
*/
|
||||
public async fastPutStream(optionsArg: {
|
||||
path: string;
|
||||
readableStream: plugins.stream.Readable | ReadableStream;
|
||||
nativeMetadata?: { [key: string]: string };
|
||||
overwrite?: boolean;
|
||||
}): Promise<void> {
|
||||
try {
|
||||
const exists = await this.fastExists({ path: optionsArg.path });
|
||||
|
||||
if (exists && !optionsArg.overwrite) {
|
||||
throw new Error(
|
||||
`Object already exists at path '${optionsArg.path}' in bucket '${this.name}'. ` +
|
||||
`Set overwrite:true to replace it.`
|
||||
);
|
||||
} else if (exists && optionsArg.overwrite) {
|
||||
console.log(
|
||||
`Overwriting existing object at path '${optionsArg.path}' in bucket '${this.name}'.`
|
||||
);
|
||||
} else {
|
||||
console.log(`Creating new object at path '${optionsArg.path}' in bucket '${this.name}'.`);
|
||||
}
|
||||
|
||||
const command = new plugins.s3.PutObjectCommand({
|
||||
Bucket: this.name,
|
||||
Key: optionsArg.path,
|
||||
Body: optionsArg.readableStream,
|
||||
Metadata: optionsArg.nativeMetadata,
|
||||
});
|
||||
await this.smartbucketRef.s3Client.send(command);
|
||||
|
||||
console.log(
|
||||
`Object '${optionsArg.path}' has been successfully stored in bucket '${this.name}'.`
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Error storing object at path '${optionsArg.path}' in bucket '${this.name}':`,
|
||||
error
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
public async fastCopy(optionsArg: {
|
||||
sourcePath: string;
|
||||
destinationPath?: string;
|
||||
targetBucket?: Bucket;
|
||||
nativeMetadata?: { [key: string]: string };
|
||||
deleteExistingNativeMetadata?: boolean;
|
||||
}): Promise<void> {
|
||||
try {
|
||||
const targetBucketName = optionsArg.targetBucket ? optionsArg.targetBucket.name : this.name;
|
||||
|
||||
// Retrieve current object information to use in copy conditions
|
||||
const currentObjInfo = await this.smartbucketRef.s3Client.send(
|
||||
new plugins.s3.HeadObjectCommand({
|
||||
Bucket: this.name,
|
||||
Key: optionsArg.sourcePath,
|
||||
})
|
||||
);
|
||||
|
||||
// Prepare new metadata
|
||||
const newNativeMetadata = {
|
||||
...(optionsArg.deleteExistingNativeMetadata ? {} : currentObjInfo.Metadata),
|
||||
...optionsArg.nativeMetadata,
|
||||
};
|
||||
|
||||
// Define the copy operation
|
||||
const copySource = `${this.name}/${optionsArg.sourcePath}`;
|
||||
const command = new plugins.s3.CopyObjectCommand({
|
||||
Bucket: targetBucketName,
|
||||
CopySource: copySource,
|
||||
Key: optionsArg.destinationPath || optionsArg.sourcePath,
|
||||
Metadata: newNativeMetadata,
|
||||
MetadataDirective: optionsArg.deleteExistingNativeMetadata ? 'REPLACE' : 'COPY',
|
||||
});
|
||||
await this.smartbucketRef.s3Client.send(command);
|
||||
} catch (err) {
|
||||
console.error('Error updating metadata:', err);
|
||||
throw err; // rethrow to allow caller to handle
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Move object from one path to another within the same bucket or to another bucket
|
||||
*/
|
||||
public async fastMove(optionsArg: {
|
||||
sourcePath: string;
|
||||
destinationPath: string;
|
||||
targetBucket?: Bucket;
|
||||
overwrite?: boolean;
|
||||
}): Promise<void> {
|
||||
try {
|
||||
const destinationBucket = optionsArg.targetBucket || this;
|
||||
const exists = await destinationBucket.fastExists({
|
||||
path: optionsArg.destinationPath,
|
||||
});
|
||||
|
||||
if (exists && !optionsArg.overwrite) {
|
||||
console.error(
|
||||
`Object already exists at destination path '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`
|
||||
);
|
||||
return;
|
||||
} else if (exists && optionsArg.overwrite) {
|
||||
console.log(
|
||||
`Overwriting existing object at destination path '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`
|
||||
);
|
||||
} else {
|
||||
console.log(
|
||||
`Moving object to path '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`
|
||||
);
|
||||
}
|
||||
|
||||
await this.fastCopy(optionsArg);
|
||||
await this.fastRemove({ path: optionsArg.sourcePath });
|
||||
|
||||
console.log(
|
||||
`Object '${optionsArg.sourcePath}' has been successfully moved to '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`
|
||||
);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Error moving object from '${optionsArg.sourcePath}' to '${optionsArg.destinationPath}':`,
|
||||
error
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* removeObject
|
||||
*/
|
||||
public async fastRemove(optionsArg: { path: string }) {
|
||||
const command = new plugins.s3.DeleteObjectCommand({
|
||||
Bucket: this.name,
|
||||
Key: optionsArg.path,
|
||||
});
|
||||
await this.smartbucketRef.s3Client.send(command);
|
||||
}
|
||||
|
||||
/**
|
||||
* check whether file exists
|
||||
* @param optionsArg
|
||||
* @returns
|
||||
*/
|
||||
public async fastExists(optionsArg: { path: string }): Promise<boolean> {
|
||||
try {
|
||||
const command = new plugins.s3.HeadObjectCommand({
|
||||
Bucket: this.name,
|
||||
Key: optionsArg.path,
|
||||
});
|
||||
await this.smartbucketRef.s3Client.send(command);
|
||||
console.log(`Object '${optionsArg.path}' exists in bucket '${this.name}'.`);
|
||||
return true;
|
||||
} catch (error: any) {
|
||||
if (error?.name === 'NotFound') {
|
||||
console.log(`Object '${optionsArg.path}' does not exist in bucket '${this.name}'.`);
|
||||
return false;
|
||||
} else {
|
||||
console.error('Error checking object existence:', error);
|
||||
throw error; // Rethrow if it's not a NotFound error to handle unexpected issues
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* deletes this bucket
|
||||
*/
|
||||
public async delete() {
|
||||
await this.smartbucketRef.s3Client.send(
|
||||
new plugins.s3.DeleteBucketCommand({ Bucket: this.name })
|
||||
);
|
||||
}
|
||||
|
||||
public async fastStat(pathDescriptor: interfaces.IPathDecriptor) {
|
||||
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||
const command = new plugins.s3.HeadObjectCommand({
|
||||
Bucket: this.name,
|
||||
Key: checkPath,
|
||||
});
|
||||
return this.smartbucketRef.s3Client.send(command);
|
||||
}
|
||||
|
||||
public async isDirectory(pathDescriptor: interfaces.IPathDecriptor): Promise<boolean> {
|
||||
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||
const command = new plugins.s3.ListObjectsV2Command({
|
||||
Bucket: this.name,
|
||||
Prefix: checkPath,
|
||||
Delimiter: '/',
|
||||
});
|
||||
const { CommonPrefixes } = await this.smartbucketRef.s3Client.send(command);
|
||||
return !!CommonPrefixes && CommonPrefixes.length > 0;
|
||||
}
|
||||
|
||||
public async isFile(pathDescriptor: interfaces.IPathDecriptor): Promise<boolean> {
|
||||
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||
const command = new plugins.s3.ListObjectsV2Command({
|
||||
Bucket: this.name,
|
||||
Prefix: checkPath,
|
||||
Delimiter: '/',
|
||||
});
|
||||
const { Contents } = await this.smartbucketRef.s3Client.send(command);
|
||||
return !!Contents && Contents.length > 0;
|
||||
}
|
||||
|
||||
public async getMagicBytes(optionsArg: { path: string; length: number }): Promise<Buffer> {
|
||||
try {
|
||||
const command = new plugins.s3.GetObjectCommand({
|
||||
Bucket: this.name,
|
||||
Key: optionsArg.path,
|
||||
Range: `bytes=0-${optionsArg.length - 1}`,
|
||||
});
|
||||
const response = await this.smartbucketRef.s3Client.send(command);
|
||||
const chunks: Buffer[] = [];
|
||||
const stream = response.Body as any; // SdkStreamMixin includes readable stream
|
||||
|
||||
for await (const chunk of stream) {
|
||||
chunks.push(chunk);
|
||||
}
|
||||
return Buffer.concat(chunks);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Error retrieving magic bytes from object at path '${optionsArg.path}' in bucket '${this.name}':`,
|
||||
error
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// ==========================================
|
||||
// Memory-Efficient Listing Methods (Phase 1)
|
||||
// ==========================================
|
||||
|
||||
/**
|
||||
* List all objects with a given prefix using async generator (memory-efficient streaming)
|
||||
* @param prefix - Optional prefix to filter objects (default: '' for all objects)
|
||||
* @yields Object keys one at a time
|
||||
* @example
|
||||
* ```ts
|
||||
* for await (const key of bucket.listAllObjects('npm/')) {
|
||||
* console.log(key);
|
||||
* if (shouldStop) break; // Early exit supported
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
public async *listAllObjects(prefix: string = ''): AsyncIterableIterator<string> {
|
||||
let continuationToken: string | undefined;
|
||||
|
||||
do {
|
||||
const command = new plugins.s3.ListObjectsV2Command({
|
||||
Bucket: this.name,
|
||||
Prefix: prefix,
|
||||
ContinuationToken: continuationToken,
|
||||
});
|
||||
|
||||
const response = await this.smartbucketRef.s3Client.send(command);
|
||||
|
||||
for (const obj of response.Contents || []) {
|
||||
if (obj.Key) yield obj.Key;
|
||||
}
|
||||
|
||||
continuationToken = response.NextContinuationToken;
|
||||
} while (continuationToken);
|
||||
}
|
||||
|
||||
/**
|
||||
* List all objects as an RxJS Observable (for complex reactive pipelines)
|
||||
* @param prefix - Optional prefix to filter objects (default: '' for all objects)
|
||||
* @returns Observable that emits object keys
|
||||
* @example
|
||||
* ```ts
|
||||
* bucket.listAllObjectsObservable('npm/')
|
||||
* .pipe(
|
||||
* filter(key => key.endsWith('.json')),
|
||||
* take(100)
|
||||
* )
|
||||
* .subscribe(key => console.log(key));
|
||||
* ```
|
||||
*/
|
||||
public listAllObjectsObservable(prefix: string = ''): plugins.smartrx.rxjs.Observable<string> {
|
||||
return new plugins.smartrx.rxjs.Observable<string>((subscriber) => {
|
||||
const fetchPage = async (token?: string) => {
|
||||
try {
|
||||
const command = new plugins.s3.ListObjectsV2Command({
|
||||
Bucket: this.name,
|
||||
Prefix: prefix,
|
||||
ContinuationToken: token,
|
||||
});
|
||||
|
||||
const response = await this.smartbucketRef.s3Client.send(command);
|
||||
|
||||
for (const obj of response.Contents || []) {
|
||||
if (obj.Key) subscriber.next(obj.Key);
|
||||
}
|
||||
|
||||
if (response.NextContinuationToken) {
|
||||
await fetchPage(response.NextContinuationToken);
|
||||
} else {
|
||||
subscriber.complete();
|
||||
}
|
||||
} catch (error) {
|
||||
subscriber.error(error);
|
||||
}
|
||||
};
|
||||
|
||||
fetchPage();
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a cursor for manual pagination control
|
||||
* @param prefix - Optional prefix to filter objects (default: '' for all objects)
|
||||
* @param options - Cursor options (pageSize, etc.)
|
||||
* @returns ListCursor instance
|
||||
* @example
|
||||
* ```ts
|
||||
* const cursor = bucket.createCursor('npm/', { pageSize: 500 });
|
||||
* while (cursor.hasMore()) {
|
||||
* const { keys, done } = await cursor.next();
|
||||
* console.log(`Processing ${keys.length} keys...`);
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
public createCursor(prefix: string = '', options?: IListCursorOptions): ListCursor {
|
||||
return new ListCursor(this, prefix, options);
|
||||
}
|
||||
|
||||
// ==========================================
|
||||
// High-Level Listing Helpers (Phase 2)
|
||||
// ==========================================
|
||||
|
||||
/**
|
||||
* Find objects matching a glob pattern (memory-efficient)
|
||||
* @param pattern - Glob pattern (e.g., "**\/*.json", "npm/packages/*\/index.json")
|
||||
* @yields Matching object keys
|
||||
* @example
|
||||
* ```ts
|
||||
* for await (const key of bucket.findByGlob('npm/packages/*\/index.json')) {
|
||||
* console.log('Found package index:', key);
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
public async *findByGlob(pattern: string): AsyncIterableIterator<string> {
|
||||
const matcher = new plugins.Minimatch(pattern);
|
||||
for await (const key of this.listAllObjects('')) {
|
||||
if (matcher.match(key)) yield key;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* List all objects and collect into an array (convenience method)
|
||||
* WARNING: Loads entire result set into memory. Use listAllObjects() generator for large buckets.
|
||||
* @param prefix - Optional prefix to filter objects (default: '' for all objects)
|
||||
* @returns Array of all object keys
|
||||
* @example
|
||||
* ```ts
|
||||
* const allKeys = await bucket.listAllObjectsArray('npm/');
|
||||
* console.log(`Found ${allKeys.length} objects`);
|
||||
* ```
|
||||
*/
|
||||
public async listAllObjectsArray(prefix: string = ''): Promise<string[]> {
|
||||
const keys: string[] = [];
|
||||
for await (const key of this.listAllObjects(prefix)) {
|
||||
keys.push(key);
|
||||
}
|
||||
return keys;
|
||||
}
|
||||
|
||||
public async cleanAllContents(): Promise<void> {
|
||||
try {
|
||||
// Define the command type explicitly
|
||||
const listCommandInput: plugins.s3.ListObjectsV2CommandInput = {
|
||||
Bucket: this.name,
|
||||
};
|
||||
|
||||
let isTruncated = true;
|
||||
let continuationToken: string | undefined = undefined;
|
||||
|
||||
while (isTruncated) {
|
||||
// Add the continuation token to the input if present
|
||||
const listCommand = new plugins.s3.ListObjectsV2Command({
|
||||
...listCommandInput,
|
||||
ContinuationToken: continuationToken,
|
||||
});
|
||||
|
||||
// Explicitly type the response
|
||||
const response: plugins.s3.ListObjectsV2Output =
|
||||
await this.smartbucketRef.s3Client.send(listCommand);
|
||||
|
||||
console.log(`Cleaning contents of bucket '${this.name}': Now deleting ${response.Contents?.length} items...`);
|
||||
|
||||
if (response.Contents && response.Contents.length > 0) {
|
||||
// Delete objects in batches, mapping each item to { Key: string }
|
||||
const deleteCommand = new plugins.s3.DeleteObjectsCommand({
|
||||
Bucket: this.name,
|
||||
Delete: {
|
||||
Objects: response.Contents.map((item) => ({ Key: item.Key! })),
|
||||
Quiet: true,
|
||||
},
|
||||
});
|
||||
|
||||
await this.smartbucketRef.s3Client.send(deleteCommand);
|
||||
}
|
||||
|
||||
// Update continuation token and truncation status
|
||||
isTruncated = response.IsTruncated || false;
|
||||
continuationToken = response.NextContinuationToken;
|
||||
}
|
||||
|
||||
console.log(`All contents in bucket '${this.name}' have been deleted.`);
|
||||
} catch (error) {
|
||||
console.error(`Error cleaning contents of bucket '${this.name}':`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
420
ts/classes.directory.ts
Normal file
420
ts/classes.directory.ts
Normal file
@@ -0,0 +1,420 @@
|
||||
// classes.directory.ts
|
||||
|
||||
import * as plugins from './plugins.js';
|
||||
import { Bucket } from './classes.bucket.js';
|
||||
import { File } from './classes.file.js';
|
||||
import * as helpers from './helpers.js';
|
||||
|
||||
export class Directory {
|
||||
public bucketRef: Bucket;
|
||||
public parentDirectoryRef: Directory;
|
||||
public name: string;
|
||||
|
||||
public tree!: string[];
|
||||
public files!: string[];
|
||||
public folders!: string[];
|
||||
|
||||
constructor(bucketRefArg: Bucket, parentDirectory: Directory, name: string) {
|
||||
this.bucketRef = bucketRefArg;
|
||||
this.parentDirectoryRef = parentDirectory;
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/**
|
||||
* returns an array of parent directories
|
||||
*/
|
||||
public getParentDirectories(): Directory[] {
|
||||
let parentDirectories: Directory[] = [];
|
||||
if (this.parentDirectoryRef) {
|
||||
parentDirectories.push(this.parentDirectoryRef);
|
||||
parentDirectories = parentDirectories.concat(this.parentDirectoryRef.getParentDirectories());
|
||||
}
|
||||
return parentDirectories;
|
||||
}
|
||||
|
||||
/**
|
||||
* returns the directory level
|
||||
*/
|
||||
public getDirectoryLevel(): number {
|
||||
return this.getParentDirectories().length;
|
||||
}
|
||||
|
||||
/**
|
||||
* updates the base path
|
||||
*/
|
||||
public getBasePath(): string {
|
||||
const parentDirectories = this.getParentDirectories();
|
||||
let basePath = '';
|
||||
for (const parentDir of parentDirectories) {
|
||||
if (!parentDir.name && !basePath) {
|
||||
basePath = this.name + '/';
|
||||
continue;
|
||||
}
|
||||
if (parentDir.name && !basePath) {
|
||||
basePath = parentDir.name + '/' + this.name + '/';
|
||||
continue;
|
||||
}
|
||||
if (parentDir.name && basePath) {
|
||||
basePath = parentDir.name + '/' + basePath;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return basePath;
|
||||
}
|
||||
|
||||
/**
|
||||
* gets a file by name
|
||||
*/
|
||||
public async getFile(optionsArg: {
|
||||
path: string;
|
||||
createWithContents?: string | Buffer;
|
||||
getFromTrash?: boolean;
|
||||
}): Promise<File> {
|
||||
const pathDescriptor = {
|
||||
directory: this,
|
||||
path: optionsArg.path,
|
||||
};
|
||||
const exists = await this.bucketRef.fastExists({
|
||||
path: await helpers.reducePathDescriptorToPath(pathDescriptor),
|
||||
});
|
||||
if (!exists && optionsArg.getFromTrash) {
|
||||
const trash = await this.bucketRef.getTrash();
|
||||
const trashedFile = await trash.getTrashedFileByOriginalName(pathDescriptor);
|
||||
return trashedFile;
|
||||
}
|
||||
if (!exists && !optionsArg.createWithContents) {
|
||||
throw new Error(`File not found at path '${optionsArg.path}'`);
|
||||
}
|
||||
if (!exists && optionsArg.createWithContents) {
|
||||
await File.create({
|
||||
directory: this,
|
||||
name: optionsArg.path,
|
||||
contents: optionsArg.createWithContents,
|
||||
});
|
||||
}
|
||||
return new File({
|
||||
directoryRefArg: this,
|
||||
fileName: optionsArg.path,
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Check if a file exists in this directory
|
||||
*/
|
||||
public async fileExists(optionsArg: { path: string }): Promise<boolean> {
|
||||
const pathDescriptor = {
|
||||
directory: this,
|
||||
path: optionsArg.path,
|
||||
};
|
||||
return this.bucketRef.fastExists({
|
||||
path: await helpers.reducePathDescriptorToPath(pathDescriptor),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a subdirectory exists
|
||||
*/
|
||||
public async directoryExists(dirNameArg: string): Promise<boolean> {
|
||||
const directories = await this.listDirectories();
|
||||
return directories.some(dir => dir.name === dirNameArg);
|
||||
}
|
||||
|
||||
/**
|
||||
* Collects all ListObjectsV2 pages for a prefix.
|
||||
*/
|
||||
private async listObjectsV2AllPages(prefix: string, delimiter?: string) {
|
||||
const allContents: plugins.s3._Object[] = [];
|
||||
const allCommonPrefixes: plugins.s3.CommonPrefix[] = [];
|
||||
let continuationToken: string | undefined;
|
||||
|
||||
do {
|
||||
const command = new plugins.s3.ListObjectsV2Command({
|
||||
Bucket: this.bucketRef.name,
|
||||
Prefix: prefix,
|
||||
Delimiter: delimiter,
|
||||
ContinuationToken: continuationToken,
|
||||
});
|
||||
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
|
||||
|
||||
if (response.Contents) {
|
||||
allContents.push(...response.Contents);
|
||||
}
|
||||
if (response.CommonPrefixes) {
|
||||
allCommonPrefixes.push(...response.CommonPrefixes);
|
||||
}
|
||||
|
||||
continuationToken = response.IsTruncated ? response.NextContinuationToken : undefined;
|
||||
} while (continuationToken);
|
||||
|
||||
return { contents: allContents, commonPrefixes: allCommonPrefixes };
|
||||
}
|
||||
|
||||
/**
|
||||
* lists all files
|
||||
*/
|
||||
public async listFiles(): Promise<File[]> {
|
||||
const { contents } = await this.listObjectsV2AllPages(this.getBasePath(), '/');
|
||||
const fileArray: File[] = [];
|
||||
|
||||
contents.forEach((item) => {
|
||||
if (item.Key && !item.Key.endsWith('/')) {
|
||||
const subtractedPath = item.Key.replace(this.getBasePath(), '');
|
||||
if (!subtractedPath.includes('/')) {
|
||||
fileArray.push(
|
||||
new File({
|
||||
directoryRefArg: this,
|
||||
fileName: subtractedPath,
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
return fileArray;
|
||||
}
|
||||
|
||||
/**
|
||||
* lists all folders
|
||||
*/
|
||||
public async listDirectories(): Promise<Directory[]> {
|
||||
try {
|
||||
const { commonPrefixes } = await this.listObjectsV2AllPages(this.getBasePath(), '/');
|
||||
const directoryArray: Directory[] = [];
|
||||
|
||||
if (commonPrefixes) {
|
||||
commonPrefixes.forEach((item) => {
|
||||
if (item.Prefix) {
|
||||
const subtractedPath = item.Prefix.replace(this.getBasePath(), '');
|
||||
if (subtractedPath.endsWith('/')) {
|
||||
const dirName = subtractedPath.slice(0, -1);
|
||||
// Ensure the directory name is not empty (which would indicate the base directory itself)
|
||||
if (dirName) {
|
||||
directoryArray.push(new Directory(this.bucketRef, this, dirName));
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return directoryArray;
|
||||
} catch (error) {
|
||||
console.error('Error listing directories:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* gets an array that has all objects with a certain prefix
|
||||
*/
|
||||
public async getTreeArray() {
|
||||
const command = new plugins.s3.ListObjectsV2Command({
|
||||
Bucket: this.bucketRef.name,
|
||||
Prefix: this.getBasePath(),
|
||||
Delimiter: '/',
|
||||
});
|
||||
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
|
||||
return response.Contents;
|
||||
}
|
||||
|
||||
/**
|
||||
* gets a sub directory by name
|
||||
*/
|
||||
public async getSubDirectoryByName(dirNameArg: string, optionsArg: {
|
||||
/**
|
||||
* in s3 a directory does not exist if it is empty
|
||||
* this option returns a directory even if it is empty
|
||||
*/
|
||||
getEmptyDirectory?: boolean;
|
||||
/**
|
||||
* in s3 a directory does not exist if it is empty
|
||||
* this option creates a directory even if it is empty using a initializer file
|
||||
*/
|
||||
createWithInitializerFile?: boolean;
|
||||
/**
|
||||
* if the path is a file path, it will be treated as a file and the parent directory will be returned
|
||||
*/
|
||||
couldBeFilePath?: boolean;
|
||||
} = {}): Promise<Directory> {
|
||||
|
||||
const dirNameArray = dirNameArg.split('/').filter(str => str.trim() !== "");
|
||||
|
||||
optionsArg = {
|
||||
getEmptyDirectory: false,
|
||||
createWithInitializerFile: false,
|
||||
...optionsArg,
|
||||
}
|
||||
|
||||
|
||||
const getDirectory = async (directoryArg: Directory, dirNameToSearch: string, isFinalDirectory: boolean) => {
|
||||
const directories = await directoryArg.listDirectories();
|
||||
let returnDirectory = directories.find((directory) => {
|
||||
return directory.name === dirNameToSearch;
|
||||
});
|
||||
if (returnDirectory) {
|
||||
return returnDirectory;
|
||||
}
|
||||
if (optionsArg.getEmptyDirectory || optionsArg.createWithInitializerFile) {
|
||||
returnDirectory = new Directory(this.bucketRef, directoryArg, dirNameToSearch);
|
||||
}
|
||||
if (isFinalDirectory && optionsArg.createWithInitializerFile) {
|
||||
returnDirectory?.createEmptyFile('00init.txt');
|
||||
}
|
||||
return returnDirectory || null;
|
||||
};
|
||||
|
||||
if (optionsArg.couldBeFilePath) {
|
||||
const baseDirectory = await this.bucketRef.getBaseDirectory();
|
||||
const existingFile = await baseDirectory.getFile({
|
||||
path: dirNameArg,
|
||||
});
|
||||
if (existingFile) {
|
||||
const adjustedPath = dirNameArg.substring(0, dirNameArg.lastIndexOf('/'));
|
||||
return this.getSubDirectoryByName(adjustedPath);
|
||||
}
|
||||
}
|
||||
|
||||
let wantedDirectory: Directory | null = null;
|
||||
let counter = 0;
|
||||
for (const dirNameToSearch of dirNameArray) {
|
||||
counter++;
|
||||
const directoryToSearchIn = wantedDirectory ? wantedDirectory : this;
|
||||
wantedDirectory = await getDirectory(directoryToSearchIn, dirNameToSearch, counter === dirNameArray.length);
|
||||
}
|
||||
|
||||
if (!wantedDirectory) {
|
||||
throw new Error(`Directory not found at path '${dirNameArg}'`);
|
||||
}
|
||||
return wantedDirectory;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* moves the directory
|
||||
*/
|
||||
public async move() {
|
||||
// TODO
|
||||
throw new Error('Moving a directory is not yet implemented');
|
||||
}
|
||||
|
||||
/**
|
||||
* creates an empty file within this directory
|
||||
* @param relativePathArg
|
||||
*/
|
||||
public async createEmptyFile(relativePathArg: string) {
|
||||
const emptyFile = await File.create({
|
||||
directory: this,
|
||||
name: relativePathArg,
|
||||
contents: '',
|
||||
});
|
||||
return emptyFile;
|
||||
}
|
||||
|
||||
// file operations
|
||||
public async fastPut(optionsArg: { path: string; contents: string | Buffer }) {
|
||||
const path = plugins.path.join(this.getBasePath(), optionsArg.path);
|
||||
await this.bucketRef.fastPut({
|
||||
path,
|
||||
contents: optionsArg.contents,
|
||||
});
|
||||
}
|
||||
|
||||
public async fastGet(optionsArg: { path: string }) {
|
||||
const path = plugins.path.join(this.getBasePath(), optionsArg.path);
|
||||
const result = await this.bucketRef.fastGet({
|
||||
path,
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
public fastGetStream(
|
||||
optionsArg: {
|
||||
path: string;
|
||||
},
|
||||
typeArg: 'webstream'
|
||||
): Promise<ReadableStream>;
|
||||
public async fastGetStream(
|
||||
optionsArg: {
|
||||
path: string;
|
||||
},
|
||||
typeArg: 'nodestream'
|
||||
): Promise<plugins.stream.Readable>;
|
||||
|
||||
/**
|
||||
* fastGetStream
|
||||
* @param optionsArg
|
||||
* @returns
|
||||
*/
|
||||
public async fastGetStream(
|
||||
optionsArg: { path: string },
|
||||
typeArg: 'webstream' | 'nodestream'
|
||||
): Promise<ReadableStream | plugins.stream.Readable> {
|
||||
const path = plugins.path.join(this.getBasePath(), optionsArg.path);
|
||||
const result = await this.bucketRef.fastGetStream(
|
||||
{
|
||||
path,
|
||||
},
|
||||
typeArg as any
|
||||
);
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* fast put stream
|
||||
*/
|
||||
public async fastPutStream(optionsArg: {
|
||||
path: string;
|
||||
stream: plugins.stream.Readable;
|
||||
}): Promise<void> {
|
||||
const path = plugins.path.join(this.getBasePath(), optionsArg.path);
|
||||
await this.bucketRef.fastPutStream({
|
||||
path,
|
||||
readableStream: optionsArg.stream,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* removes a file within the directory
|
||||
* uses file class to make sure effects for metadata etc. are handled correctly
|
||||
* @param optionsArg
|
||||
*/
|
||||
public async fastRemove(optionsArg: {
|
||||
path: string
|
||||
/**
|
||||
* wether the file should be placed into trash. Default is false.
|
||||
*/
|
||||
mode?: 'permanent' | 'trash';
|
||||
}) {
|
||||
const file = await this.getFile({
|
||||
path: optionsArg.path,
|
||||
});
|
||||
await file.delete({
|
||||
mode: optionsArg.mode ? optionsArg.mode : 'permanent',
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* deletes the directory with all its contents
|
||||
*/
|
||||
public async delete(optionsArg: {
|
||||
mode?: 'permanent' | 'trash';
|
||||
}) {
|
||||
const deleteDirectory = async (directoryArg: Directory) => {
|
||||
const childDirectories = await directoryArg.listDirectories();
|
||||
if (childDirectories.length === 0) {
|
||||
console.log('Directory empty! Path complete!');
|
||||
} else {
|
||||
for (const childDir of childDirectories) {
|
||||
await deleteDirectory(childDir);
|
||||
}
|
||||
}
|
||||
const files = await directoryArg.listFiles();
|
||||
for (const file of files) {
|
||||
await file.delete({
|
||||
mode: optionsArg.mode ? optionsArg.mode : 'permanent',
|
||||
})
|
||||
}
|
||||
};
|
||||
await deleteDirectory(this);
|
||||
}
|
||||
}
|
||||
303
ts/classes.file.ts
Normal file
303
ts/classes.file.ts
Normal file
@@ -0,0 +1,303 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import * as helpers from './helpers.js';
|
||||
import * as interfaces from './interfaces.js';
|
||||
import { Directory } from './classes.directory.js';
|
||||
import { MetaData } from './classes.metadata.js';
|
||||
|
||||
/**
|
||||
* represents a file in a directory
|
||||
*/
|
||||
export class File {
|
||||
// STATIC
|
||||
|
||||
/**
|
||||
* creates a file in draft mode
|
||||
* you need to call .save() to store it in s3
|
||||
* @param optionsArg
|
||||
*/
|
||||
public static async create(optionsArg: {
|
||||
directory: Directory;
|
||||
name: string;
|
||||
contents: Buffer | string | plugins.stream.Readable;
|
||||
/**
|
||||
* if contents are of type string, you can specify the encoding here
|
||||
*/
|
||||
encoding?: 'utf8' | 'binary';
|
||||
}): Promise<File> {
|
||||
const contents =
|
||||
typeof optionsArg.contents === 'string'
|
||||
? Buffer.from(optionsArg.contents, optionsArg.encoding)
|
||||
: optionsArg.contents;
|
||||
const file = new File({
|
||||
directoryRefArg: optionsArg.directory,
|
||||
fileName: optionsArg.name,
|
||||
});
|
||||
if (contents instanceof plugins.stream.Readable) {
|
||||
await optionsArg.directory.fastPutStream({
|
||||
path: optionsArg.name,
|
||||
stream: contents,
|
||||
});
|
||||
} else {
|
||||
await optionsArg.directory.fastPut({
|
||||
path: optionsArg.name,
|
||||
contents: contents,
|
||||
});
|
||||
}
|
||||
return file;
|
||||
}
|
||||
|
||||
// INSTANCE
|
||||
public parentDirectoryRef: Directory;
|
||||
public name: string;
|
||||
|
||||
/**
|
||||
* get the full path to the file
|
||||
* @returns the full path to the file
|
||||
*/
|
||||
public getBasePath(): string {
|
||||
return plugins.path.join(this.parentDirectoryRef.getBasePath(), this.name);
|
||||
}
|
||||
|
||||
constructor(optionsArg: { directoryRefArg: Directory; fileName: string }) {
|
||||
this.parentDirectoryRef = optionsArg.directoryRefArg;
|
||||
this.name = optionsArg.fileName;
|
||||
}
|
||||
|
||||
public async getContentsAsString(): Promise<string> {
|
||||
const fileBuffer = await this.getContents();
|
||||
return fileBuffer.toString();
|
||||
}
|
||||
|
||||
public async getContents(): Promise<Buffer> {
|
||||
const resultBuffer = await this.parentDirectoryRef.bucketRef.fastGet({
|
||||
path: this.getBasePath(),
|
||||
});
|
||||
return resultBuffer;
|
||||
}
|
||||
|
||||
public async getReadStream(typeArg: 'webstream'): Promise<ReadableStream>;
|
||||
public async getReadStream(typeArg: 'nodestream'): Promise<plugins.stream.Readable>;
|
||||
public async getReadStream(
|
||||
typeArg: 'nodestream' | 'webstream'
|
||||
): Promise<ReadableStream | plugins.stream.Readable> {
|
||||
const readStream = this.parentDirectoryRef.bucketRef.fastGetStream(
|
||||
{
|
||||
path: this.getBasePath(),
|
||||
},
|
||||
typeArg as any
|
||||
);
|
||||
return readStream;
|
||||
}
|
||||
|
||||
/**
|
||||
* deletes this file
|
||||
*/
|
||||
public async delete(optionsArg?: { mode: 'trash' | 'permanent' }) {
|
||||
optionsArg = {
|
||||
...{
|
||||
mode: 'permanent',
|
||||
},
|
||||
...optionsArg,
|
||||
};
|
||||
|
||||
if (optionsArg.mode === 'permanent') {
|
||||
await this.parentDirectoryRef.bucketRef.fastRemove({
|
||||
path: this.getBasePath(),
|
||||
});
|
||||
if (!this.name.endsWith('.metadata')) {
|
||||
if (await this.hasMetaData()) {
|
||||
const metadata = await this.getMetaData();
|
||||
await metadata.metadataFile.delete(optionsArg);
|
||||
}
|
||||
}
|
||||
} else if (optionsArg.mode === 'trash') {
|
||||
const metadata = await this.getMetaData();
|
||||
await metadata.storeCustomMetaData({
|
||||
key: 'recycle',
|
||||
value: {
|
||||
deletedAt: Date.now(),
|
||||
originalPath: this.getBasePath(),
|
||||
},
|
||||
});
|
||||
const trash = await this.parentDirectoryRef.bucketRef.getTrash();
|
||||
const trashDir = await trash.getTrashDir();
|
||||
await this.move({
|
||||
directory: trashDir,
|
||||
path: await trash.getTrashKeyByOriginalBasePath(this.getBasePath()),
|
||||
});
|
||||
}
|
||||
|
||||
await this.parentDirectoryRef.listFiles();
|
||||
}
|
||||
|
||||
/**
|
||||
* restores
|
||||
*/
|
||||
public async restore(optionsArg: {
|
||||
useOriginalPath?: boolean;
|
||||
toPath?: string;
|
||||
overwrite?: boolean;
|
||||
} = {}) {
|
||||
optionsArg = {
|
||||
useOriginalPath: (() => {
|
||||
return optionsArg.toPath ? false : true;
|
||||
})(),
|
||||
overwrite: false,
|
||||
...optionsArg,
|
||||
};
|
||||
const metadata = await this.getMetaData();
|
||||
const moveToPath = optionsArg.toPath || (await metadata.getCustomMetaData({
|
||||
key: 'recycle'
|
||||
})).originalPath;
|
||||
await metadata.deleteCustomMetaData({
|
||||
key: 'recycle'
|
||||
})
|
||||
await this.move({
|
||||
path: moveToPath,
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* allows locking the file
|
||||
* @param optionsArg
|
||||
*/
|
||||
public async lock(optionsArg?: { timeoutMillis?: number }) {
|
||||
const metadata = await this.getMetaData();
|
||||
await metadata.setLock({
|
||||
lock: 'locked',
|
||||
expires: Date.now() + (optionsArg?.timeoutMillis || 1000),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* actively unlocks a file
|
||||
*
|
||||
*/
|
||||
public async unlock(optionsArg?: {
|
||||
/**
|
||||
* unlock the file even if not locked from this instance
|
||||
*/
|
||||
force?: boolean;
|
||||
}) {
|
||||
const metadata = await this.getMetaData();
|
||||
await metadata.removeLock({
|
||||
force: optionsArg?.force || false,
|
||||
});
|
||||
}
|
||||
|
||||
public async updateWithContents(optionsArg: {
|
||||
contents: Buffer | string | plugins.stream.Readable | ReadableStream;
|
||||
encoding?: 'utf8' | 'binary';
|
||||
}) {
|
||||
if (
|
||||
optionsArg.contents instanceof plugins.stream.Readable ||
|
||||
optionsArg.contents instanceof ReadableStream
|
||||
) {
|
||||
await this.parentDirectoryRef.bucketRef.fastPutStream({
|
||||
path: this.getBasePath(),
|
||||
readableStream: optionsArg.contents,
|
||||
overwrite: true,
|
||||
});
|
||||
} else if (Buffer.isBuffer(optionsArg.contents)) {
|
||||
await this.parentDirectoryRef.bucketRef.fastPut({
|
||||
path: this.getBasePath(),
|
||||
contents: optionsArg.contents,
|
||||
overwrite: true,
|
||||
});
|
||||
} else if (typeof optionsArg.contents === 'string') {
|
||||
await this.parentDirectoryRef.bucketRef.fastPut({
|
||||
path: this.getBasePath(),
|
||||
contents: Buffer.from(optionsArg.contents, optionsArg.encoding),
|
||||
overwrite: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* moves the file to another directory
|
||||
*/
|
||||
public async move(pathDescriptorArg: interfaces.IPathDecriptor) {
|
||||
let moveToPath: string = '';
|
||||
const isDirectory = await this.parentDirectoryRef.bucketRef.isDirectory(pathDescriptorArg);
|
||||
if (isDirectory) {
|
||||
moveToPath = await helpers.reducePathDescriptorToPath({
|
||||
...pathDescriptorArg,
|
||||
path: plugins.path.join(pathDescriptorArg.path!, this.name),
|
||||
});
|
||||
} else {
|
||||
moveToPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
|
||||
}
|
||||
// lets move the file
|
||||
await this.parentDirectoryRef.bucketRef.fastMove({
|
||||
sourcePath: this.getBasePath(),
|
||||
destinationPath: moveToPath,
|
||||
overwrite: true,
|
||||
});
|
||||
// lets move the metadatafile
|
||||
if (!this.name.endsWith('.metadata')) {
|
||||
const metadata = await this.getMetaData();
|
||||
await this.parentDirectoryRef.bucketRef.fastMove({
|
||||
sourcePath: metadata.metadataFile.getBasePath(),
|
||||
destinationPath: moveToPath + '.metadata',
|
||||
overwrite: true,
|
||||
});
|
||||
}
|
||||
|
||||
// lets update references of this
|
||||
const baseDirectory = await this.parentDirectoryRef.bucketRef.getBaseDirectory();
|
||||
this.parentDirectoryRef = await baseDirectory.getSubDirectoryByName(
|
||||
await helpers.reducePathDescriptorToPath(pathDescriptorArg),
|
||||
{
|
||||
couldBeFilePath: true,
|
||||
}
|
||||
);
|
||||
this.name = pathDescriptorArg.path!;
|
||||
}
|
||||
|
||||
public async hasMetaData(): Promise<boolean> {
|
||||
if (!this.name.endsWith('.metadata')) {
|
||||
const hasMetadataBool = MetaData.hasMetaData({
|
||||
file: this,
|
||||
});
|
||||
return hasMetadataBool;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* allows updating the metadata of a file
|
||||
* @param updatedMetadata
|
||||
*/
|
||||
public async getMetaData() {
|
||||
if (this.name.endsWith('.metadata')) {
|
||||
throw new Error('metadata files cannot have metadata');
|
||||
}
|
||||
const metadata = await MetaData.createForFile({
|
||||
file: this,
|
||||
});
|
||||
return metadata;
|
||||
}
|
||||
|
||||
/**
|
||||
* gets the contents as json
|
||||
*/
|
||||
public async getJsonData() {
|
||||
const json = await this.getContentsAsString();
|
||||
const parsed = await JSON.parse(json);
|
||||
return parsed;
|
||||
}
|
||||
|
||||
public async writeJsonData(dataArg: any) {
|
||||
await this.updateWithContents({
|
||||
contents: JSON.stringify(dataArg),
|
||||
});
|
||||
}
|
||||
|
||||
public async getMagicBytes(optionsArg: { length: number }): Promise<Buffer> {
|
||||
return this.parentDirectoryRef.bucketRef.getMagicBytes({
|
||||
path: this.getBasePath(),
|
||||
length: optionsArg.length,
|
||||
});
|
||||
}
|
||||
}
|
||||
89
ts/classes.listcursor.ts
Normal file
89
ts/classes.listcursor.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
// classes.listcursor.ts
|
||||
|
||||
import * as plugins from './plugins.js';
|
||||
import type { Bucket } from './classes.bucket.js';
|
||||
|
||||
export interface IListCursorOptions {
|
||||
pageSize?: number;
|
||||
}
|
||||
|
||||
export interface IListCursorResult {
|
||||
keys: string[];
|
||||
done: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* ListCursor provides explicit pagination control for listing objects in a bucket.
|
||||
* Useful for UI pagination, resumable operations, and manual batch processing.
|
||||
*/
|
||||
export class ListCursor {
|
||||
private continuationToken?: string;
|
||||
private exhausted = false;
|
||||
private pageSize: number;
|
||||
|
||||
constructor(
|
||||
private bucket: Bucket,
|
||||
private prefix: string,
|
||||
options: IListCursorOptions = {}
|
||||
) {
|
||||
this.pageSize = options.pageSize || 1000;
|
||||
}
|
||||
|
||||
/**
|
||||
* Fetch the next page of object keys
|
||||
* @returns Object with keys array and done flag
|
||||
*/
|
||||
public async next(): Promise<IListCursorResult> {
|
||||
if (this.exhausted) {
|
||||
return { keys: [], done: true };
|
||||
}
|
||||
|
||||
const command = new plugins.s3.ListObjectsV2Command({
|
||||
Bucket: this.bucket.name,
|
||||
Prefix: this.prefix,
|
||||
MaxKeys: this.pageSize,
|
||||
ContinuationToken: this.continuationToken,
|
||||
});
|
||||
|
||||
const response = await this.bucket.smartbucketRef.s3Client.send(command);
|
||||
|
||||
const keys = (response.Contents || [])
|
||||
.map((obj) => obj.Key)
|
||||
.filter((key): key is string => !!key);
|
||||
|
||||
this.continuationToken = response.NextContinuationToken;
|
||||
this.exhausted = !this.continuationToken;
|
||||
|
||||
return { keys, done: this.exhausted };
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if there are more pages to fetch
|
||||
*/
|
||||
public hasMore(): boolean {
|
||||
return !this.exhausted;
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset the cursor to start from the beginning
|
||||
*/
|
||||
public reset(): void {
|
||||
this.continuationToken = undefined;
|
||||
this.exhausted = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current continuation token (for saving/restoring state)
|
||||
*/
|
||||
public getToken(): string | undefined {
|
||||
return this.continuationToken;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the continuation token (for resuming from a saved state)
|
||||
*/
|
||||
public setToken(token: string | undefined): void {
|
||||
this.continuationToken = token;
|
||||
this.exhausted = !token;
|
||||
}
|
||||
}
|
||||
134
ts/classes.metadata.ts
Normal file
134
ts/classes.metadata.ts
Normal file
@@ -0,0 +1,134 @@
|
||||
import * as plugins from './plugins.js';
|
||||
|
||||
import { File } from './classes.file.js';
|
||||
|
||||
export class MetaData {
|
||||
public static async hasMetaData(optionsArg: { file: File }) {
|
||||
// try finding the existing metadata file; return false if it doesn't exist
|
||||
try {
|
||||
const existingFile = await optionsArg.file.parentDirectoryRef.getFile({
|
||||
path: optionsArg.file.name + '.metadata',
|
||||
});
|
||||
return !!existingFile;
|
||||
} catch (error: any) {
|
||||
const message = error?.message || '';
|
||||
const isNotFound =
|
||||
message.includes('File not found') ||
|
||||
error?.name === 'NotFound' ||
|
||||
error?.$metadata?.httpStatusCode === 404;
|
||||
if (isNotFound) {
|
||||
return false;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// static
|
||||
public static async createForFile(optionsArg: { file: File }) {
|
||||
const metaData = new MetaData();
|
||||
metaData.fileRef = optionsArg.file;
|
||||
|
||||
// lets find the existing metadata file
|
||||
metaData.metadataFile = await metaData.fileRef.parentDirectoryRef.getFile({
|
||||
path: metaData.fileRef.name + '.metadata',
|
||||
createWithContents: '{}',
|
||||
});
|
||||
|
||||
return metaData;
|
||||
}
|
||||
|
||||
// instance
|
||||
/**
|
||||
* the file that contains the metadata
|
||||
*/
|
||||
metadataFile!: File;
|
||||
|
||||
/**
|
||||
* the file that the metadata is for
|
||||
*/
|
||||
fileRef!: File;
|
||||
|
||||
public async getFileType(optionsArg?: {
|
||||
useFileExtension?: boolean;
|
||||
useMagicBytes?: boolean;
|
||||
}): Promise<plugins.smartmime.IFileTypeResult | undefined> {
|
||||
if ((optionsArg && optionsArg.useFileExtension) || !optionsArg) {
|
||||
const fileType = await plugins.smartmime.detectMimeType({
|
||||
path: this.fileRef.name,
|
||||
});
|
||||
|
||||
return fileType;
|
||||
}
|
||||
if (optionsArg && optionsArg.useMagicBytes) {
|
||||
const fileType = await plugins.smartmime.detectMimeType({
|
||||
buffer: await this.fileRef.getMagicBytes({
|
||||
length: 100,
|
||||
})
|
||||
});
|
||||
|
||||
return fileType;
|
||||
}
|
||||
throw new Error('optionsArg.useFileExtension and optionsArg.useMagicBytes cannot both be false');
|
||||
}
|
||||
|
||||
/**
|
||||
* gets the size of the fileRef
|
||||
*/
|
||||
public async getSizeInBytes(): Promise<number> {
|
||||
const stat = await this.fileRef.parentDirectoryRef.bucketRef.fastStat({
|
||||
path: this.fileRef.getBasePath(),
|
||||
});
|
||||
return stat.ContentLength!;
|
||||
}
|
||||
|
||||
private prefixCustomMetaData = 'custom_';
|
||||
|
||||
public async storeCustomMetaData<T = any>(optionsArg: { key: string; value: T }) {
|
||||
const data = await this.metadataFile.getJsonData();
|
||||
data[this.prefixCustomMetaData + optionsArg.key] = optionsArg.value;
|
||||
await this.metadataFile.writeJsonData(data);
|
||||
}
|
||||
|
||||
public async getCustomMetaData<T = any>(optionsArg: { key: string }): Promise<T> {
|
||||
const data = await this.metadataFile.getJsonData();
|
||||
return data[this.prefixCustomMetaData + optionsArg.key];
|
||||
}
|
||||
|
||||
public async deleteCustomMetaData(optionsArg: { key: string }) {
|
||||
const data = await this.metadataFile.getJsonData();
|
||||
delete data[this.prefixCustomMetaData + optionsArg.key];
|
||||
await this.metadataFile.writeJsonData(data);
|
||||
}
|
||||
|
||||
/**
|
||||
* set a lock on the ref file
|
||||
* @param optionsArg
|
||||
*/
|
||||
public async setLock(optionsArg: { lock: string; expires: number }) {
|
||||
const data = await this.metadataFile.getJsonData();
|
||||
data.lock = optionsArg.lock;
|
||||
data.lockExpires = optionsArg.expires;
|
||||
await this.metadataFile.writeJsonData(data);
|
||||
}
|
||||
|
||||
/**
|
||||
* remove the lock on the ref file
|
||||
* @param optionsArg
|
||||
*/
|
||||
public async removeLock(optionsArg: { force: boolean }) {
|
||||
const data = await this.metadataFile.getJsonData();
|
||||
delete data.lock;
|
||||
delete data.lockExpires;
|
||||
await this.metadataFile.writeJsonData(data);
|
||||
}
|
||||
|
||||
public async checkLocked(): Promise<boolean> {
|
||||
const data = await this.metadataFile.getJsonData();
|
||||
return data.lock && data.lockExpires > Date.now();
|
||||
}
|
||||
|
||||
public async getLockInfo(): Promise<{ lock: string; expires: number }> {
|
||||
const data = await this.metadataFile.getJsonData();
|
||||
return { lock: data.lock, expires: data.lockExpires };
|
||||
}
|
||||
}
|
||||
53
ts/classes.smartbucket.ts
Normal file
53
ts/classes.smartbucket.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
// classes.smartbucket.ts
|
||||
|
||||
import * as plugins from './plugins.js';
|
||||
import { Bucket } from './classes.bucket.js';
|
||||
import { normalizeS3Descriptor } from './helpers.js';
|
||||
|
||||
export class SmartBucket {
|
||||
public config: plugins.tsclass.storage.IS3Descriptor;
|
||||
|
||||
public s3Client: plugins.s3.S3Client;
|
||||
|
||||
/**
|
||||
* the constructor of SmartBucket
|
||||
*/
|
||||
/**
|
||||
* the constructor of SmartBucket
|
||||
*/
|
||||
constructor(configArg: plugins.tsclass.storage.IS3Descriptor) {
|
||||
this.config = configArg;
|
||||
|
||||
// Use the normalizer to handle various endpoint formats
|
||||
const { normalized } = normalizeS3Descriptor(configArg);
|
||||
|
||||
this.s3Client = new plugins.s3.S3Client({
|
||||
endpoint: normalized.endpointUrl,
|
||||
region: normalized.region,
|
||||
credentials: normalized.credentials,
|
||||
forcePathStyle: normalized.forcePathStyle, // Necessary for S3-compatible storage like MinIO or Wasabi
|
||||
});
|
||||
}
|
||||
|
||||
public async createBucket(bucketNameArg: string) {
|
||||
const bucket = await Bucket.createBucketByName(this, bucketNameArg);
|
||||
return bucket;
|
||||
}
|
||||
|
||||
public async removeBucket(bucketName: string) {
|
||||
await Bucket.removeBucketByName(this, bucketName);
|
||||
}
|
||||
|
||||
public async getBucketByName(bucketNameArg: string) {
|
||||
return Bucket.getBucketByName(this, bucketNameArg);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a bucket exists
|
||||
*/
|
||||
public async bucketExists(bucketNameArg: string): Promise<boolean> {
|
||||
const command = new plugins.s3.ListBucketsCommand({});
|
||||
const buckets = await this.s3Client.send(command);
|
||||
return buckets.Buckets?.some(bucket => bucket.Name === bucketNameArg) ?? false;
|
||||
}
|
||||
}
|
||||
30
ts/classes.trash.ts
Normal file
30
ts/classes.trash.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import * as interfaces from './interfaces.js';
|
||||
import * as helpers from './helpers.js';
|
||||
import type { Bucket } from './classes.bucket.js';
|
||||
import type { Directory } from './classes.directory.js';
|
||||
import type { File } from './classes.file.js';
|
||||
|
||||
|
||||
export class Trash {
|
||||
public bucketRef: Bucket;
|
||||
|
||||
constructor(bucketRefArg: Bucket) {
|
||||
this.bucketRef = bucketRefArg;
|
||||
}
|
||||
|
||||
public async getTrashDir() {
|
||||
return this.bucketRef.getDirectoryFromPath({ path: '.trash' });
|
||||
}
|
||||
|
||||
public async getTrashedFileByOriginalName(pathDescriptor: interfaces.IPathDecriptor): Promise<File> {
|
||||
const trashDir = await this.getTrashDir();
|
||||
const originalPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||
const trashKey = await this.getTrashKeyByOriginalBasePath(originalPath);
|
||||
return trashDir.getFile({ path: trashKey });
|
||||
}
|
||||
|
||||
public async getTrashKeyByOriginalBasePath (originalPath: string): Promise<string> {
|
||||
return plugins.smartstring.base64.encode(originalPath);
|
||||
}
|
||||
}
|
||||
254
ts/helpers.ts
Normal file
254
ts/helpers.ts
Normal file
@@ -0,0 +1,254 @@
|
||||
import * as plugins from './plugins.js';
|
||||
import * as interfaces from './interfaces.js';
|
||||
|
||||
export const reducePathDescriptorToPath = async (pathDescriptorArg: interfaces.IPathDecriptor): Promise<string> => {
|
||||
let returnPath = ``
|
||||
if (pathDescriptorArg.directory) {
|
||||
if (pathDescriptorArg.path && plugins.path.isAbsolute(pathDescriptorArg.path)) {
|
||||
console.warn('Directory is being ignored when path is absolute.');
|
||||
returnPath = pathDescriptorArg.path;
|
||||
} else if (pathDescriptorArg.path) {
|
||||
returnPath = plugins.path.join(pathDescriptorArg.directory.getBasePath(), pathDescriptorArg.path);
|
||||
}
|
||||
} else if (pathDescriptorArg.path) {
|
||||
returnPath = pathDescriptorArg.path;
|
||||
} else {
|
||||
throw new Error('You must specify either a path or a directory.');
|
||||
}
|
||||
if (returnPath.startsWith('/')) {
|
||||
returnPath = returnPath.substring(1);
|
||||
}
|
||||
return returnPath;
|
||||
}
|
||||
|
||||
// S3 Descriptor Normalization
|
||||
export interface IS3Warning {
|
||||
code: string;
|
||||
message: string;
|
||||
}
|
||||
|
||||
export interface INormalizedS3Config {
|
||||
endpointUrl: string;
|
||||
host: string;
|
||||
protocol: 'http' | 'https';
|
||||
port?: number;
|
||||
region: string;
|
||||
credentials: {
|
||||
accessKeyId: string;
|
||||
secretAccessKey: string;
|
||||
};
|
||||
forcePathStyle: boolean;
|
||||
}
|
||||
|
||||
function coerceBooleanMaybe(value: unknown): { value: boolean | undefined; warning?: IS3Warning } {
|
||||
if (typeof value === 'boolean') return { value };
|
||||
if (typeof value === 'string') {
|
||||
const v = value.trim().toLowerCase();
|
||||
if (v === 'true' || v === '1') {
|
||||
return {
|
||||
value: true,
|
||||
warning: {
|
||||
code: 'SBK_S3_COERCED_USESSL',
|
||||
message: `Coerced useSsl='${value}' (string) to boolean true.`
|
||||
}
|
||||
};
|
||||
}
|
||||
if (v === 'false' || v === '0') {
|
||||
return {
|
||||
value: false,
|
||||
warning: {
|
||||
code: 'SBK_S3_COERCED_USESSL',
|
||||
message: `Coerced useSsl='${value}' (string) to boolean false.`
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
return { value: undefined };
|
||||
}
|
||||
|
||||
function coercePortMaybe(port: unknown): { value: number | undefined; warning?: IS3Warning } {
|
||||
if (port === undefined || port === null || port === '') return { value: undefined };
|
||||
const n = typeof port === 'number' ? port : Number(String(port).trim());
|
||||
if (!Number.isFinite(n) || !Number.isInteger(n) || n <= 0 || n > 65535) {
|
||||
return {
|
||||
value: undefined,
|
||||
warning: {
|
||||
code: 'SBK_S3_INVALID_PORT',
|
||||
message: `Invalid port '${String(port)}' - expected integer in [1..65535].`
|
||||
}
|
||||
};
|
||||
}
|
||||
return { value: n };
|
||||
}
|
||||
|
||||
function sanitizeEndpointString(raw: unknown): { value: string; warnings: IS3Warning[] } {
|
||||
const warnings: IS3Warning[] = [];
|
||||
let s = String(raw ?? '').trim();
|
||||
if (s !== String(raw ?? '')) {
|
||||
warnings.push({
|
||||
code: 'SBK_S3_TRIMMED_ENDPOINT',
|
||||
message: 'Trimmed surrounding whitespace from endpoint.'
|
||||
});
|
||||
}
|
||||
return { value: s, warnings };
|
||||
}
|
||||
|
||||
function parseEndpointHostPort(
|
||||
endpoint: string,
|
||||
provisionalProtocol: 'http' | 'https'
|
||||
): {
|
||||
hadScheme: boolean;
|
||||
host: string;
|
||||
port?: number;
|
||||
extras: {
|
||||
droppedPath?: boolean;
|
||||
droppedQuery?: boolean;
|
||||
droppedCreds?: boolean
|
||||
}
|
||||
} {
|
||||
let url: URL | undefined;
|
||||
const extras: { droppedPath?: boolean; droppedQuery?: boolean; droppedCreds?: boolean } = {};
|
||||
|
||||
// Check if endpoint already has a scheme
|
||||
const hasScheme = /^https?:\/\//i.test(endpoint);
|
||||
|
||||
// Try parsing as full URL first
|
||||
try {
|
||||
if (hasScheme) {
|
||||
url = new URL(endpoint);
|
||||
} else {
|
||||
// Not a full URL; try host[:port] by attaching provisional scheme
|
||||
// Remove anything after first '/' for safety
|
||||
const cleanEndpoint = endpoint.replace(/\/.*/, '');
|
||||
url = new URL(`${provisionalProtocol}://${cleanEndpoint}`);
|
||||
}
|
||||
} catch (e) {
|
||||
throw new Error(`Unable to parse endpoint '${endpoint}'.`);
|
||||
}
|
||||
|
||||
// Check for dropped components
|
||||
if (url.username || url.password) extras.droppedCreds = true;
|
||||
if (url.pathname && url.pathname !== '/') extras.droppedPath = true;
|
||||
if (url.search) extras.droppedQuery = true;
|
||||
|
||||
const hadScheme = hasScheme;
|
||||
const host = url.hostname; // hostnames lowercased by URL; IPs preserved
|
||||
const port = url.port ? Number(url.port) : undefined;
|
||||
|
||||
return { hadScheme, host, port, extras };
|
||||
}
|
||||
|
||||
export function normalizeS3Descriptor(
|
||||
input: plugins.tsclass.storage.IS3Descriptor,
|
||||
logger?: { warn: (msg: string) => void }
|
||||
): { normalized: INormalizedS3Config; warnings: IS3Warning[] } {
|
||||
const warnings: IS3Warning[] = [];
|
||||
const logWarn = (w: IS3Warning) => {
|
||||
warnings.push(w);
|
||||
if (logger) {
|
||||
logger.warn(`[SmartBucket S3] ${w.code}: ${w.message}`);
|
||||
} else {
|
||||
console.warn(`[SmartBucket S3] ${w.code}: ${w.message}`);
|
||||
}
|
||||
};
|
||||
|
||||
// Coerce and sanitize inputs
|
||||
const { value: coercedUseSsl, warning: useSslWarn } = coerceBooleanMaybe((input as any).useSsl);
|
||||
if (useSslWarn) logWarn(useSslWarn);
|
||||
|
||||
const { value: coercedPort, warning: portWarn } = coercePortMaybe((input as any).port);
|
||||
if (portWarn) logWarn(portWarn);
|
||||
|
||||
const { value: endpointStr, warnings: endpointSanWarnings } = sanitizeEndpointString((input as any).endpoint);
|
||||
endpointSanWarnings.forEach(logWarn);
|
||||
|
||||
if (!endpointStr) {
|
||||
throw new Error('S3 endpoint is required (got empty string). Provide hostname or URL.');
|
||||
}
|
||||
|
||||
// Provisional protocol selection for parsing host:port forms
|
||||
const provisionalProtocol: 'http' | 'https' = coercedUseSsl === false ? 'http' : 'https';
|
||||
|
||||
const { hadScheme, host, port: epPort, extras } = parseEndpointHostPort(endpointStr, provisionalProtocol);
|
||||
|
||||
if (extras.droppedCreds) {
|
||||
logWarn({
|
||||
code: 'SBK_S3_DROPPED_CREDENTIALS',
|
||||
message: 'Ignored credentials in endpoint URL.'
|
||||
});
|
||||
}
|
||||
if (extras.droppedPath) {
|
||||
logWarn({
|
||||
code: 'SBK_S3_DROPPED_PATH',
|
||||
message: 'Removed path segment from endpoint URL; S3 endpoint should be host[:port] only.'
|
||||
});
|
||||
}
|
||||
if (extras.droppedQuery) {
|
||||
logWarn({
|
||||
code: 'SBK_S3_DROPPED_QUERY',
|
||||
message: 'Removed query string from endpoint URL; S3 endpoint should be host[:port] only.'
|
||||
});
|
||||
}
|
||||
|
||||
// Final protocol decision
|
||||
let finalProtocol: 'http' | 'https';
|
||||
if (hadScheme) {
|
||||
// Scheme from endpoint wins
|
||||
const schemeFromEndpoint = endpointStr.trim().toLowerCase().startsWith('http://') ? 'http' : 'https';
|
||||
finalProtocol = schemeFromEndpoint;
|
||||
if (typeof coercedUseSsl === 'boolean') {
|
||||
const expected = coercedUseSsl ? 'https' : 'http';
|
||||
if (expected !== finalProtocol) {
|
||||
logWarn({
|
||||
code: 'SBK_S3_SCHEME_CONFLICT',
|
||||
message: `useSsl=${String(coercedUseSsl)} conflicts with endpoint scheme '${finalProtocol}'; using endpoint scheme.`
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (typeof coercedUseSsl === 'boolean') {
|
||||
finalProtocol = coercedUseSsl ? 'https' : 'http';
|
||||
} else {
|
||||
finalProtocol = 'https';
|
||||
logWarn({
|
||||
code: 'SBK_S3_GUESSED_PROTOCOL',
|
||||
message: "No scheme in endpoint and useSsl not provided; defaulting to 'https'."
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Final port decision
|
||||
let finalPort: number | undefined = undefined;
|
||||
if (coercedPort !== undefined && epPort !== undefined && coercedPort !== epPort) {
|
||||
logWarn({
|
||||
code: 'SBK_S3_PORT_CONFLICT',
|
||||
message: `Port in config (${coercedPort}) conflicts with endpoint port (${epPort}); using config port.`
|
||||
});
|
||||
finalPort = coercedPort;
|
||||
} else {
|
||||
finalPort = (coercedPort !== undefined) ? coercedPort : epPort;
|
||||
}
|
||||
|
||||
// Build canonical endpoint URL (origin only, no trailing slash)
|
||||
const url = new URL(`${finalProtocol}://${host}`);
|
||||
if (finalPort !== undefined) url.port = String(finalPort);
|
||||
const endpointUrl = url.origin;
|
||||
|
||||
const region = input.region || 'us-east-1';
|
||||
|
||||
return {
|
||||
normalized: {
|
||||
endpointUrl,
|
||||
host,
|
||||
protocol: finalProtocol,
|
||||
port: finalPort,
|
||||
region,
|
||||
credentials: {
|
||||
accessKeyId: input.accessKey,
|
||||
secretAccessKey: input.accessSecret,
|
||||
},
|
||||
forcePathStyle: true,
|
||||
},
|
||||
warnings,
|
||||
};
|
||||
}
|
||||
11
ts/index.ts
11
ts/index.ts
@@ -1,4 +1,7 @@
|
||||
export * from './smartbucket.classes.smartbucket';
|
||||
export * from './smartbucket.classes.bucket';
|
||||
export * from './smartbucket.classes.directory';
|
||||
export * from './smartbucket.classes.file';
|
||||
export * from './classes.smartbucket.js';
|
||||
export * from './classes.bucket.js';
|
||||
export * from './classes.directory.js';
|
||||
export * from './classes.file.js';
|
||||
export * from './classes.listcursor.js';
|
||||
export * from './classes.metadata.js';
|
||||
export * from './classes.trash.js';
|
||||
|
||||
6
ts/interfaces.ts
Normal file
6
ts/interfaces.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import type { Directory } from "./classes.directory.js";
|
||||
|
||||
export interface IPathDecriptor {
|
||||
path?: string;
|
||||
directory?: Directory;
|
||||
}
|
||||
34
ts/plugins.ts
Normal file
34
ts/plugins.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
// plugins.ts
|
||||
|
||||
// node native
|
||||
import * as path from 'node:path';
|
||||
import * as stream from 'node:stream';
|
||||
|
||||
export { path, stream };
|
||||
|
||||
// @push.rocks scope
|
||||
import * as smartmime from '@push.rocks/smartmime';
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import * as smartpromise from '@push.rocks/smartpromise';
|
||||
import * as smartrx from '@push.rocks/smartrx';
|
||||
import * as smartstream from '@push.rocks/smartstream';
|
||||
import * as smartstring from '@push.rocks/smartstring';
|
||||
import * as smartunique from '@push.rocks/smartunique';
|
||||
|
||||
export { smartmime, smartpath, smartpromise, smartrx, smartstream, smartstring, smartunique };
|
||||
|
||||
// @tsclass
|
||||
import * as tsclass from '@tsclass/tsclass';
|
||||
|
||||
export {
|
||||
tsclass,
|
||||
}
|
||||
|
||||
// third party scope
|
||||
import * as s3 from '@aws-sdk/client-s3';
|
||||
import { Minimatch } from 'minimatch';
|
||||
|
||||
export {
|
||||
s3,
|
||||
Minimatch,
|
||||
}
|
||||
@@ -1,98 +0,0 @@
|
||||
import * as plugins from './smartbucket.plugins';
|
||||
import { SmartBucket } from './smartbucket.classes.smartbucket';
|
||||
import { Directory } from './smartbucket.classes.directory';
|
||||
|
||||
export class Bucket {
|
||||
public static async getBucketByName(smartbucketRef: SmartBucket, bucketNameArg: string) {
|
||||
const buckets = await smartbucketRef.minioClient.listBuckets();
|
||||
const foundBucket = buckets.find(bucket => {
|
||||
return bucket.name === bucketNameArg;
|
||||
});
|
||||
|
||||
if (foundBucket) {
|
||||
console.log(`bucket with name ${bucketNameArg} exists.`);
|
||||
console.log(`Taking this as base for new Bucket instance`);
|
||||
return new this(smartbucketRef, bucketNameArg);
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public static async createBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
|
||||
await smartbucketRef.minioClient.makeBucket(bucketName, 'ams3').catch(e => console.log(e));
|
||||
return new Bucket(smartbucketRef, bucketName);
|
||||
}
|
||||
|
||||
public static async removeBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
|
||||
await smartbucketRef.minioClient.removeBucket(bucketName).catch(e => console.log(e));
|
||||
}
|
||||
|
||||
public smartbucketRef: SmartBucket;
|
||||
public name: string;
|
||||
|
||||
constructor(smartbucketRef: SmartBucket, bucketName: string) {
|
||||
this.smartbucketRef = smartbucketRef;
|
||||
this.name = bucketName;
|
||||
}
|
||||
|
||||
/**
|
||||
* gets the base directory of the bucket
|
||||
*/
|
||||
public async getBaseDirectory() {
|
||||
return new Directory(this, null, '');
|
||||
}
|
||||
|
||||
// ===============
|
||||
// Fast Operations
|
||||
// ===============
|
||||
|
||||
/**
|
||||
* store file
|
||||
*/
|
||||
public async fastStore(pathArg: string, fileContent: string) {
|
||||
const streamIntake = new plugins.streamfunction.Intake();
|
||||
const putPromise = this.smartbucketRef.minioClient
|
||||
.putObject(this.name, pathArg, streamIntake.getReadable())
|
||||
.catch(e => console.log(e));
|
||||
streamIntake.pushData(fileContent);
|
||||
streamIntake.signalEnd();
|
||||
await putPromise;
|
||||
}
|
||||
|
||||
/**
|
||||
* get file
|
||||
*/
|
||||
public async fastGet(pathArg: string) {
|
||||
const done = plugins.smartpromise.defer();
|
||||
const fileStream = await this.smartbucketRef.minioClient
|
||||
.getObject(this.name, pathArg)
|
||||
.catch(e => console.log(e));
|
||||
let completeFile: string = '';
|
||||
const duplexStream = plugins.streamfunction.createDuplexStream<Buffer, Buffer>(
|
||||
async chunk => {
|
||||
const chunkString = chunk.toString();
|
||||
completeFile += chunkString;
|
||||
return chunk;
|
||||
},
|
||||
async cb => {
|
||||
done.resolve();
|
||||
return Buffer.from('');
|
||||
}
|
||||
);
|
||||
|
||||
if (!fileStream) {
|
||||
return null;
|
||||
}
|
||||
|
||||
fileStream.pipe(duplexStream);
|
||||
await done.promise;
|
||||
return completeFile;
|
||||
}
|
||||
|
||||
/**
|
||||
* removeObject
|
||||
*/
|
||||
public async fastRemove(pathArg: string) {
|
||||
await this.smartbucketRef.minioClient.removeObject(this.name, pathArg);
|
||||
}
|
||||
}
|
||||
@@ -1,165 +0,0 @@
|
||||
import * as plugins from './smartbucket.plugins';
|
||||
import { Bucket } from './smartbucket.classes.bucket';
|
||||
import { File } from './smartbucket.classes.file';
|
||||
|
||||
export class Directory {
|
||||
public bucketRef: Bucket;
|
||||
public parentDirectoryRef: Directory;
|
||||
public name: string;
|
||||
|
||||
public tree: string[];
|
||||
public files: string[];
|
||||
public folders: string[];
|
||||
|
||||
constructor(bucketRefArg: Bucket, parentDiretory: Directory, name: string) {
|
||||
this.bucketRef = bucketRefArg;
|
||||
this.parentDirectoryRef = parentDiretory;
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
/**
|
||||
* returns an array of parent directories
|
||||
*/
|
||||
public getParentDirectories(): Directory[] {
|
||||
let parentDirectories: Directory[] = [];
|
||||
if (this.parentDirectoryRef) {
|
||||
parentDirectories.push(this.parentDirectoryRef);
|
||||
parentDirectories = parentDirectories.concat(this.parentDirectoryRef.getParentDirectories());
|
||||
}
|
||||
return parentDirectories;
|
||||
}
|
||||
|
||||
/**
|
||||
* returns the directory level
|
||||
*/
|
||||
public getDirectoryLevel(): number {
|
||||
return this.getParentDirectories().length;
|
||||
}
|
||||
|
||||
/**
|
||||
* updates the base path
|
||||
*/
|
||||
public getBasePath(): string {
|
||||
const parentDirectories = this.getParentDirectories();
|
||||
let basePath = '';
|
||||
for (const parentDir of parentDirectories) {
|
||||
basePath = parentDir.name + '/' + basePath;
|
||||
}
|
||||
return basePath;
|
||||
}
|
||||
|
||||
/**
|
||||
* lists all files
|
||||
*/
|
||||
public async listFiles(): Promise<File[]> {
|
||||
const done = plugins.smartpromise.defer();
|
||||
const fileNameStream = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2(
|
||||
this.bucketRef.name,
|
||||
this.getBasePath()
|
||||
);
|
||||
const fileArray: File[] = [];
|
||||
const duplexStream = plugins.streamfunction.createDuplexStream<plugins.minio.BucketItem, void>(
|
||||
async bucketItem => {
|
||||
if(!bucketItem.name) {
|
||||
return;
|
||||
}
|
||||
const subtractedPath = bucketItem.name.replace(this.getBasePath(), '');
|
||||
if (!subtractedPath.includes('/')) {
|
||||
fileArray.push(new File(this, bucketItem.name));
|
||||
}
|
||||
|
||||
},
|
||||
async tools => {
|
||||
done.resolve();
|
||||
}
|
||||
);
|
||||
fileNameStream.pipe(duplexStream);
|
||||
await done.promise;
|
||||
return fileArray;
|
||||
}
|
||||
|
||||
/**
|
||||
* lists all folders
|
||||
*/
|
||||
public async listDirectories(): Promise<Directory[]> {
|
||||
const done = plugins.smartpromise.defer();
|
||||
const completeDirStream = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2(
|
||||
this.bucketRef.name,
|
||||
this.getBasePath(),
|
||||
true
|
||||
);
|
||||
const directoryArray: Directory[] = [];
|
||||
const duplexStream = plugins.streamfunction.createDuplexStream<plugins.minio.BucketItem, void>(
|
||||
async bucketItem => {
|
||||
const subtractedPath = bucketItem.name.replace(this.getBasePath(), '');
|
||||
if (subtractedPath.includes('/')) {
|
||||
const dirName = bucketItem.name.split('/')[0];
|
||||
if (directoryArray.find(directory => directory.name === dirName)) {
|
||||
return;
|
||||
}
|
||||
directoryArray.push(new Directory(this.bucketRef, this, dirName));
|
||||
}
|
||||
},
|
||||
async tools => {
|
||||
done.resolve();
|
||||
}
|
||||
);
|
||||
completeDirStream.pipe(duplexStream);
|
||||
await done.promise;
|
||||
return directoryArray;
|
||||
}
|
||||
|
||||
/**
|
||||
* gets an array that has all objects with a certain prefix;
|
||||
*/
|
||||
public async getTreeArray() {
|
||||
const treeArray = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2(
|
||||
this.bucketRef.name,
|
||||
this.getBasePath(),
|
||||
true
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* gets a sub directory
|
||||
*/
|
||||
public async getSubDirectoryByName(dirNameArg: string): Promise<Directory> {
|
||||
// TODO: make this recursive
|
||||
const directories = await this.listDirectories();
|
||||
return directories.find(directory => {
|
||||
return directory.name === dirNameArg;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* moves the directory
|
||||
*/
|
||||
public async move() {
|
||||
// TODO
|
||||
}
|
||||
|
||||
/**
|
||||
* creates a file within this directory
|
||||
* @param relativePathArg
|
||||
*/
|
||||
public async createFile(relativePathArg) {
|
||||
let completeFilePath: string = '';
|
||||
}
|
||||
|
||||
// file operations
|
||||
public async fastStore(pathArg: string, contentArg: string) {
|
||||
const path = plugins.path.join(this.getBasePath(), pathArg);
|
||||
await this.bucketRef.fastStore(path, contentArg);
|
||||
}
|
||||
|
||||
public async fastGet(pathArg: string) {
|
||||
const path = plugins.path.join(this.getBasePath(), pathArg);
|
||||
const result = await this.bucketRef.fastGet(path);
|
||||
return result;
|
||||
}
|
||||
|
||||
public async fastRemove(pathArg: string) {
|
||||
const path = plugins.path.join(this.getBasePath(), pathArg);
|
||||
await this.bucketRef.fastRemove(path);
|
||||
}
|
||||
}
|
||||
@@ -1,93 +0,0 @@
|
||||
import * as plugins from './smartbucket.plugins';
|
||||
import { Directory } from './smartbucket.classes.directory';
|
||||
|
||||
export interface IFileMetaData {
|
||||
name: string;
|
||||
fileType: string;
|
||||
size: string;
|
||||
}
|
||||
|
||||
export class File {
|
||||
// STATIC
|
||||
public static async createFileFromString(
|
||||
dirArg: Directory,
|
||||
fileName: string,
|
||||
fileContent: string
|
||||
) {
|
||||
await this.createFileFromBuffer(dirArg, fileName, Buffer.from(fileContent));
|
||||
}
|
||||
|
||||
public static async createFileFromBuffer(
|
||||
directoryRef: Directory,
|
||||
fileName: string,
|
||||
fileContent: Buffer
|
||||
) {
|
||||
const filePath = plugins.path.join(directoryRef.getBasePath(), fileName);
|
||||
const streamIntake = new plugins.streamfunction.Intake();
|
||||
const putPromise = directoryRef.bucketRef.smartbucketRef.minioClient
|
||||
.putObject(this.name, filePath, streamIntake.getReadable())
|
||||
.catch(e => console.log(e));
|
||||
streamIntake.pushData(fileContent);
|
||||
streamIntake.signalEnd();
|
||||
await putPromise;
|
||||
}
|
||||
|
||||
// INSTANCE
|
||||
public parentDirectoryRef: Directory;
|
||||
public name: string;
|
||||
|
||||
public path: string;
|
||||
public metaData: IFileMetaData;
|
||||
|
||||
constructor(directoryRefArg: Directory, fileName: string) {
|
||||
this.parentDirectoryRef = directoryRefArg;
|
||||
this.name = fileName;
|
||||
}
|
||||
|
||||
public async getContentAsString() {
|
||||
const fileBuffer = await this.getContentAsBuffer();
|
||||
return fileBuffer.toString();
|
||||
}
|
||||
|
||||
public async getContentAsBuffer() {
|
||||
const done = plugins.smartpromise.defer();
|
||||
const fileStream = await this.parentDirectoryRef.bucketRef.smartbucketRef.minioClient
|
||||
.getObject(this.parentDirectoryRef.bucketRef.name, this.path)
|
||||
.catch(e => console.log(e));
|
||||
let completeFile = new Buffer('');
|
||||
const duplexStream = plugins.streamfunction.createDuplexStream<Buffer, Buffer>(
|
||||
async chunk => {
|
||||
completeFile = Buffer.concat([chunk]);
|
||||
return chunk;
|
||||
},
|
||||
async cb => {
|
||||
done.resolve();
|
||||
return Buffer.from('');
|
||||
}
|
||||
);
|
||||
|
||||
if (!fileStream) {
|
||||
return null;
|
||||
}
|
||||
|
||||
fileStream.pipe(duplexStream);
|
||||
await done.promise;
|
||||
return completeFile;
|
||||
}
|
||||
|
||||
public async streamContent() {
|
||||
throw new Error('not yet implemented');
|
||||
// TODO
|
||||
}
|
||||
|
||||
/**
|
||||
* removes this file
|
||||
*/
|
||||
public async remove() {
|
||||
await this.parentDirectoryRef.bucketRef.smartbucketRef.minioClient.removeObject(
|
||||
this.parentDirectoryRef.bucketRef.name,
|
||||
this.path
|
||||
);
|
||||
await this.parentDirectoryRef.listFiles();
|
||||
}
|
||||
}
|
||||
@@ -1,41 +0,0 @@
|
||||
import * as plugins from './smartbucket.plugins';
|
||||
import { Bucket } from './smartbucket.classes.bucket';
|
||||
|
||||
export interface ISmartBucketConfig {
|
||||
endpoint: string;
|
||||
accessKey: string;
|
||||
accessSecret: string;
|
||||
}
|
||||
|
||||
export class SmartBucket {
|
||||
public config: ISmartBucketConfig;
|
||||
|
||||
public minioClient: plugins.minio.Client;
|
||||
|
||||
/**
|
||||
* the constructor of SmartBucket
|
||||
*/
|
||||
constructor(configArg: ISmartBucketConfig) {
|
||||
this.config = configArg;
|
||||
this.minioClient = new plugins.minio.Client({
|
||||
endPoint: this.config.endpoint,
|
||||
port: 443,
|
||||
useSSL: true,
|
||||
accessKey: this.config.accessKey,
|
||||
secretKey: this.config.accessSecret
|
||||
});
|
||||
}
|
||||
|
||||
public async createBucket(bucketNameArg: string) {
|
||||
const bucket = await Bucket.createBucketByName(this, bucketNameArg);
|
||||
return bucket;
|
||||
}
|
||||
|
||||
public async removeBucket(bucketName: string) {
|
||||
await Bucket.removeBucketByName(this, bucketName);
|
||||
}
|
||||
|
||||
public async getBucketByName(bucketName: string) {
|
||||
return Bucket.getBucketByName(this, bucketName);
|
||||
}
|
||||
}
|
||||
@@ -1,15 +0,0 @@
|
||||
// node native
|
||||
import * as path from 'path';
|
||||
|
||||
export { path };
|
||||
|
||||
import * as smartpath from '@pushrocks/smartpath';
|
||||
import * as smartpromise from '@pushrocks/smartpromise';
|
||||
import * as streamfunction from '@pushrocks/streamfunction';
|
||||
|
||||
export { smartpath, smartpromise, streamfunction };
|
||||
|
||||
// third party scope
|
||||
import * as minio from 'minio';
|
||||
|
||||
export { minio };
|
||||
15
tsconfig.json
Normal file
15
tsconfig.json
Normal file
@@ -0,0 +1,15 @@
|
||||
{
|
||||
"compilerOptions": {
|
||||
"experimentalDecorators": true,
|
||||
"useDefineForClassFields": false,
|
||||
"target": "ES2022",
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"esModuleInterop": true,
|
||||
"verbatimModuleSyntax": true,
|
||||
"strict": true
|
||||
},
|
||||
"exclude": [
|
||||
"dist_*/**/*.d.ts"
|
||||
]
|
||||
}
|
||||
17
tslint.json
17
tslint.json
@@ -1,17 +0,0 @@
|
||||
{
|
||||
"extends": ["tslint:latest", "tslint-config-prettier"],
|
||||
"rules": {
|
||||
"semicolon": [true, "always"],
|
||||
"no-console": false,
|
||||
"ordered-imports": false,
|
||||
"object-literal-sort-keys": false,
|
||||
"member-ordering": {
|
||||
"options":{
|
||||
"order": [
|
||||
"static-method"
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
"defaultSeverity": "warning"
|
||||
}
|
||||
Reference in New Issue
Block a user