Compare commits
88 Commits
Author | SHA1 | Date | |
---|---|---|---|
fa4c44ae04 | |||
708b0b63b1 | |||
8554554642 | |||
a04aabf78b | |||
47cf2cc2cb | |||
ef20e15d20 | |||
39a4bd6ab7 | |||
c2a30654c5 | |||
8085033de4 | |||
75dd1d43a9 | |||
8ba7cdc873 | |||
ed8db4536b | |||
96e3eadb31 | |||
e9426b9cc9 | |||
9801e15c32 | |||
cbfdd8e123 | |||
138c38ee30 | |||
a1e449cf94 | |||
aa9a2e9220 | |||
154854dc21 | |||
8e9041fbbf | |||
16a82ac50a | |||
0b396f19cf | |||
6ab77ece6e | |||
b7a1f2087c | |||
b0d41fa9a0 | |||
34082c38a7 | |||
8d160cefb0 | |||
cec9c07b7c | |||
383a5204f4 | |||
c7f0c97341 | |||
e7f60465ff | |||
7db4d24817 | |||
dc599585b8 | |||
a22e32cd32 | |||
4647181807 | |||
99c3935d0c | |||
05523dc7a1 | |||
dc99cfa229 | |||
23f8dc55d0 | |||
ffaf0fc97a | |||
2a0425ff54 | |||
9adcdee0a0 | |||
786f8d4365 | |||
67244ba5cf | |||
a9bb31c2a2 | |||
bd8b05920f | |||
535d9f8520 | |||
8401fe1c0c | |||
08c3f674bf | |||
df0a439def | |||
7245b49c31 | |||
4b70edb947 | |||
9629a04da6 | |||
963463d40d | |||
ce58b99fc7 | |||
591c99736d | |||
559e3da47b | |||
a7ac870e05 | |||
d48c5e229a | |||
b9c384dd08 | |||
91c04b2364 | |||
b5dcc131e2 | |||
cb0ab2c9db | |||
2a17ee542e | |||
95e9d2f0ff | |||
1a71c76da3 | |||
e924511147 | |||
645ebbdd4d | |||
168148b2c9 | |||
1293fc4ca6 | |||
b040120813 | |||
5c2d92c041 | |||
eaf2e7e6bb | |||
1e1f65119c | |||
c70ee820d7 | |||
2a15362ced | |||
9d5cdadd89 | |||
a92fae2617 | |||
2cacfcf990 | |||
72d1e1e5a2 | |||
a0be96bf23 | |||
dab74572b8 | |||
b871e23052 | |||
caa69ae6ba | |||
f1ee2f096c | |||
32e574197b | |||
f13db18b00 |
140
.gitlab-ci.yml
140
.gitlab-ci.yml
@@ -1,140 +0,0 @@
|
|||||||
# gitzone ci_default
|
|
||||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
|
||||||
|
|
||||||
cache:
|
|
||||||
paths:
|
|
||||||
- .npmci_cache/
|
|
||||||
key: '$CI_BUILD_STAGE'
|
|
||||||
|
|
||||||
stages:
|
|
||||||
- security
|
|
||||||
- test
|
|
||||||
- release
|
|
||||||
- metadata
|
|
||||||
|
|
||||||
before_script:
|
|
||||||
- npm install -g @shipzone/npmci
|
|
||||||
|
|
||||||
# ====================
|
|
||||||
# security stage
|
|
||||||
# ====================
|
|
||||||
mirror:
|
|
||||||
stage: security
|
|
||||||
script:
|
|
||||||
- npmci git mirror
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
auditProductionDependencies:
|
|
||||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
|
||||||
stage: security
|
|
||||||
script:
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci command npm install --production --ignore-scripts
|
|
||||||
- npmci command npm config set registry https://registry.npmjs.org
|
|
||||||
- npmci command npm audit --audit-level=high --only=prod --production
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
allow_failure: true
|
|
||||||
|
|
||||||
auditDevDependencies:
|
|
||||||
image: registry.gitlab.com/hosttoday/ht-docker-node:npmci
|
|
||||||
stage: security
|
|
||||||
script:
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci command npm install --ignore-scripts
|
|
||||||
- npmci command npm config set registry https://registry.npmjs.org
|
|
||||||
- npmci command npm audit --audit-level=high --only=dev
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
allow_failure: true
|
|
||||||
|
|
||||||
# ====================
|
|
||||||
# test stage
|
|
||||||
# ====================
|
|
||||||
|
|
||||||
testStable:
|
|
||||||
stage: test
|
|
||||||
script:
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci node install stable
|
|
||||||
- npmci npm install
|
|
||||||
- npmci npm test
|
|
||||||
coverage: /\d+.?\d+?\%\s*coverage/
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
|
|
||||||
testBuild:
|
|
||||||
stage: test
|
|
||||||
script:
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci node install stable
|
|
||||||
- npmci npm install
|
|
||||||
- npmci command npm run build
|
|
||||||
coverage: /\d+.?\d+?\%\s*coverage/
|
|
||||||
tags:
|
|
||||||
- docker
|
|
||||||
|
|
||||||
release:
|
|
||||||
stage: release
|
|
||||||
script:
|
|
||||||
- npmci node install stable
|
|
||||||
- npmci npm publish
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
# ====================
|
|
||||||
# metadata stage
|
|
||||||
# ====================
|
|
||||||
codequality:
|
|
||||||
stage: metadata
|
|
||||||
allow_failure: true
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
script:
|
|
||||||
- npmci command npm install -g typescript
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci npm install
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- priv
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
stage: metadata
|
|
||||||
script:
|
|
||||||
- npmci trigger
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
|
|
||||||
pages:
|
|
||||||
stage: metadata
|
|
||||||
script:
|
|
||||||
- npmci node install lts
|
|
||||||
- npmci command npm install -g @git.zone/tsdoc
|
|
||||||
- npmci npm prepare
|
|
||||||
- npmci npm install
|
|
||||||
- npmci command tsdoc
|
|
||||||
tags:
|
|
||||||
- lossless
|
|
||||||
- docker
|
|
||||||
- notpriv
|
|
||||||
only:
|
|
||||||
- tags
|
|
||||||
artifacts:
|
|
||||||
expire_in: 1 week
|
|
||||||
paths:
|
|
||||||
- public
|
|
||||||
allow_failure: true
|
|
168
changelog.md
Normal file
168
changelog.md
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
## 2025-08-15 - 3.3.9 - fix(docs)
|
||||||
|
Revise README with detailed usage examples and add local Claude settings
|
||||||
|
|
||||||
|
- Revamped README: reorganized content, added emojis and clearer headings for install, getting started, bucket/file/directory operations, streaming, metadata, trash/recovery, locking, and advanced configuration.
|
||||||
|
- Added many concrete code examples for SmartBucket, Bucket, Directory, File, streaming (node/web), RxJS replay subjects, metadata handling, trash workflow, file locking, magic-bytes detection, JSON operations, and cleaning bucket contents.
|
||||||
|
- Included testing instructions (pnpm test) and a Best Practices section with recommendations for strict mode, streaming, metadata, trash usage, and locking.
|
||||||
|
- Added .claude/settings.local.json to include local Claude configuration and tool permissions.
|
||||||
|
- No source code or public API changes; documentation and local tooling config only.
|
||||||
|
|
||||||
|
## 2025-08-15 - 3.3.8 - fix(tests)
|
||||||
|
Update tests to use @git.zone/tstest, upgrade dependencies, remove GitLab CI and add local CI/workspace config
|
||||||
|
|
||||||
|
- Tests: replace imports from @push.rocks/tapbundle with @git.zone/tstest/tapbundle and switch tap.start() to export default tap.start()
|
||||||
|
- Dependencies: bump @aws-sdk/client-s3 and several @push.rocks packages; upgrade @tsclass/tsclass to a newer major
|
||||||
|
- DevDependencies: upgrade @git.zone/tsbuild, @git.zone/tstest, @push.rocks/qenv, and @push.rocks/tapbundle
|
||||||
|
- CI/config: remove .gitlab-ci.yml, add .claude/settings.local.json
|
||||||
|
- Workspace: add pnpm-workspace.yaml and packageManager field in package.json
|
||||||
|
|
||||||
|
## 2024-12-02 - 3.3.7 - fix(package)
|
||||||
|
Update author field in package.json
|
||||||
|
|
||||||
|
- Corrected the author field from 'Lossless GmbH' to 'Task Venture Capital GmbH' in the package.json file.
|
||||||
|
|
||||||
|
## 2024-12-02 - 3.3.6 - fix(package)
|
||||||
|
Fix license field in package.json to reflect MIT licensing
|
||||||
|
|
||||||
|
|
||||||
|
## 2024-11-25 - 3.3.5 - fix(test)
|
||||||
|
Refactor trash test to improve metadata validation
|
||||||
|
|
||||||
|
- Added new checks in trash tests to ensure metadata files are correctly moved to trash.
|
||||||
|
- Validated the presence and integrity of metadata within trashed files.
|
||||||
|
|
||||||
|
## 2024-11-25 - 3.3.4 - fix(core)
|
||||||
|
Minor refactoring and cleanup of TypeScript source files for improved readability and maintainability.
|
||||||
|
|
||||||
|
|
||||||
|
## 2024-11-24 - 3.3.3 - fix(documentation)
|
||||||
|
Improved documentation accuracy and consistency
|
||||||
|
|
||||||
|
- Updated the project description to reflect the cloud-agnostic nature and advanced capabilities
|
||||||
|
- Enhanced the README with detailed explanations and code examples for advanced features like trash management
|
||||||
|
- Clarified the handling and importance of metadata using the MetaData utility
|
||||||
|
|
||||||
|
## 2024-11-24 - 3.3.2 - fix(documentation)
|
||||||
|
Updated keywords and description for clarity and consistency.
|
||||||
|
|
||||||
|
- Modified keywords and description in package.json and npmextra.json.
|
||||||
|
- Enhanced readme.md file structure and examples
|
||||||
|
|
||||||
|
## 2024-11-24 - 3.3.1 - fix(File)
|
||||||
|
Fixed issue with file restore metadata operations.
|
||||||
|
|
||||||
|
- Corrected the order of operations in the file restore function to ensure custom metadata is appropriately deleted after moving the file.
|
||||||
|
|
||||||
|
## 2024-11-24 - 3.3.0 - feat(core)
|
||||||
|
Enhanced directory handling and file restoration from trash
|
||||||
|
|
||||||
|
- Refined getSubDirectoryByName to handle file paths treated as directories.
|
||||||
|
- Introduced file restoration function from trash to original or specified paths.
|
||||||
|
|
||||||
|
## 2024-11-24 - 3.2.2 - fix(core)
|
||||||
|
Refactor Bucket class for improved error handling
|
||||||
|
|
||||||
|
- Ensured safe access using non-null assertions when finding a bucket.
|
||||||
|
- Enhanced fastPut method by adding fastPutStrict for safer operations.
|
||||||
|
- Added explicit error handling and type checking in fastExists method.
|
||||||
|
|
||||||
|
## 2024-11-24 - 3.2.1 - fix(metadata)
|
||||||
|
Fix metadata handling for deleted files
|
||||||
|
|
||||||
|
- Ensured metadata is correctly stored and managed when files are deleted into the trash.
|
||||||
|
|
||||||
|
## 2024-11-24 - 3.2.0 - feat(bucket)
|
||||||
|
Enhanced SmartBucket with trash management and metadata handling
|
||||||
|
|
||||||
|
- Added functionality to move files to a trash directory.
|
||||||
|
- Introduced methods to handle file metadata more robustly.
|
||||||
|
- Implemented a method to clean all contents from a bucket.
|
||||||
|
- Enhanced directory retrieval to handle non-existent directories with options.
|
||||||
|
- Improved handling of file paths and metadata within the storage system.
|
||||||
|
|
||||||
|
## 2024-11-18 - 3.1.0 - feat(file)
|
||||||
|
Added functionality to retrieve magic bytes from files and detect file types using magic bytes.
|
||||||
|
|
||||||
|
- Introduced method `getMagicBytes` in `File` and `Bucket` classes to retrieve a specific number of bytes from a file.
|
||||||
|
- Enhanced file type detection by utilizing magic bytes in `MetaData` class.
|
||||||
|
- Updated dependencies for better performance and compatibility.
|
||||||
|
|
||||||
|
## 2024-11-18 - 3.0.24 - fix(metadata)
|
||||||
|
Fix metadata handling to address type assertion and data retrieval.
|
||||||
|
|
||||||
|
- Fixed type assertion issues in `MetaData` class properties with type non-null assertions.
|
||||||
|
- Corrected the handling of JSON data retrieval in `MetaData.storeCustomMetaData` function.
|
||||||
|
|
||||||
|
## 2024-10-16 - 3.0.23 - fix(dependencies)
|
||||||
|
Update package dependencies for improved functionality and security.
|
||||||
|
|
||||||
|
- Updated @aws-sdk/client-s3 to version ^3.670.0 for enhanced S3 client capabilities.
|
||||||
|
- Updated @push.rocks/smartstream to version ^3.2.4.
|
||||||
|
- Updated the dev dependency @push.rocks/tapbundle to version ^5.3.0.
|
||||||
|
|
||||||
|
## 2024-07-28 - 3.0.22 - fix(dependencies)
|
||||||
|
Update dependencies and improve bucket retrieval logging
|
||||||
|
|
||||||
|
- Updated @aws-sdk/client-s3 to ^3.620.0
|
||||||
|
- Updated @git.zone/tsbuild to ^2.1.84
|
||||||
|
- Updated @git.zone/tsrun to ^1.2.49
|
||||||
|
- Updated @push.rocks/smartpromise to ^4.0.4
|
||||||
|
- Updated @tsclass/tsclass to ^4.1.2
|
||||||
|
- Added a log for when a bucket is not found by name in getBucketByName method
|
||||||
|
|
||||||
|
## 2024-07-04 - 3.0.21 - fix(test)
|
||||||
|
Update endpoint configuration in tests to use environment variable
|
||||||
|
|
||||||
|
- Modified `qenv.yml` to include `S3_ENDPOINT` as a required environment variable.
|
||||||
|
- Updated test files to fetch `S3_ENDPOINT` from environment instead of hardcoding.
|
||||||
|
|
||||||
|
## 2024-06-19 - 3.0.20 - Fix and Stability Updates
|
||||||
|
Improved overall stability and consistency.
|
||||||
|
|
||||||
|
## 2024-06-18 - 3.0.18 - Delete Functions Consistency
|
||||||
|
Ensured more consistency between delete methods and trash behavior.
|
||||||
|
|
||||||
|
## 2024-06-17 - 3.0.17 to 3.0.16 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-06-11 - 3.0.15 to 3.0.14 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-06-10 - 3.0.13 - Trash Feature Completion
|
||||||
|
Finished work on trash feature.
|
||||||
|
|
||||||
|
## 2024-06-09 - 3.0.12 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-06-08 - 3.0.11 to 3.0.10 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-06-03 - 3.0.10 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-05-29 - 3.0.9 - Update Description
|
||||||
|
Updated project description.
|
||||||
|
|
||||||
|
## 2024-05-27 - 3.0.8 to 3.0.6 - Pathing and Core Updates
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
- S3 paths' pathing differences now correctly handled with a reducePath method.
|
||||||
|
|
||||||
|
## 2024-05-21 - 3.0.5 to 3.0.4 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-05-17 - 3.0.3 to 3.0.2 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-05-17 - 3.0.0 - Major Release
|
||||||
|
Introduced breaking changes in core and significant improvements.
|
||||||
|
|
||||||
|
## 2024-05-05 - 2.0.5 - Breaking Changes
|
||||||
|
Introduced breaking changes in core functionality.
|
||||||
|
|
||||||
|
## 2024-04-14 - 2.0.4 - TSConfig Update
|
||||||
|
Updated TypeScript configuration.
|
||||||
|
|
||||||
|
## 2024-01-01 - 2.0.2 - Organization Scheme Update
|
||||||
|
Switched to the new organizational scheme.
|
19
license
Normal file
19
license
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
Copyright (c) 2014 Task Venture Capital GmbH (hello@task.vc)
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
@@ -5,12 +5,37 @@
|
|||||||
"gitzone": {
|
"gitzone": {
|
||||||
"projectType": "npm",
|
"projectType": "npm",
|
||||||
"module": {
|
"module": {
|
||||||
"githost": "gitlab.com",
|
"githost": "code.foss.global",
|
||||||
"gitscope": "push.rocks",
|
"gitscope": "push.rocks",
|
||||||
"gitrepo": "smartbucket",
|
"gitrepo": "smartbucket",
|
||||||
"description": "simple cloud independent object storage",
|
"description": "A TypeScript library providing a cloud-agnostic interface for managing object storage with functionalities like bucket management, file and directory operations, and advanced features such as metadata handling and file locking.",
|
||||||
"npmPackagename": "@push.rocks/smartbucket",
|
"npmPackagename": "@push.rocks/smartbucket",
|
||||||
"license": "MIT"
|
"license": "MIT",
|
||||||
}
|
"keywords": [
|
||||||
|
"TypeScript",
|
||||||
|
"cloud agnostic",
|
||||||
|
"object storage",
|
||||||
|
"bucket management",
|
||||||
|
"file operations",
|
||||||
|
"directory management",
|
||||||
|
"data streaming",
|
||||||
|
"S3",
|
||||||
|
"multi-cloud",
|
||||||
|
"file locking",
|
||||||
|
"metadata management",
|
||||||
|
"buffer handling",
|
||||||
|
"access control",
|
||||||
|
"environment configuration",
|
||||||
|
"unified storage",
|
||||||
|
"bucket policies",
|
||||||
|
"trash management",
|
||||||
|
"file transfer",
|
||||||
|
"data management",
|
||||||
|
"streaming"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"tsdoc": {
|
||||||
|
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
|
||||||
}
|
}
|
||||||
}
|
}
|
17893
package-lock.json
generated
17893
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
64
package.json
64
package.json
@@ -1,31 +1,33 @@
|
|||||||
{
|
{
|
||||||
"name": "@push.rocks/smartbucket",
|
"name": "@push.rocks/smartbucket",
|
||||||
"version": "2.0.4",
|
"version": "3.3.9",
|
||||||
"description": "simple cloud independent object storage",
|
"description": "A TypeScript library providing a cloud-agnostic interface for managing object storage with functionalities like bucket management, file and directory operations, and advanced features such as metadata handling and file locking.",
|
||||||
"main": "dist_ts/index.js",
|
"main": "dist_ts/index.js",
|
||||||
"typings": "dist_ts/index.d.ts",
|
"typings": "dist_ts/index.d.ts",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
"author": "Lossless GmbH",
|
"author": "Task Venture Capital GmbH",
|
||||||
"license": "UNLICENSED",
|
"license": "MIT",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "(tstest test/)",
|
"test": "(tstest test/)",
|
||||||
"build": "(tsbuild --web --allowimplicitany)"
|
"build": "(tsbuild --web --allowimplicitany)"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.1.63",
|
"@git.zone/tsbuild": "^2.6.4",
|
||||||
"@git.zone/tsrun": "^1.2.46",
|
"@git.zone/tsrun": "^1.2.49",
|
||||||
"@git.zone/tstest": "^1.0.71",
|
"@git.zone/tstest": "^2.3.2",
|
||||||
"@push.rocks/qenv": "^6.0.4",
|
"@push.rocks/qenv": "^6.1.2",
|
||||||
"@push.rocks/tapbundle": "^5.0.3"
|
"@push.rocks/tapbundle": "^6.0.3"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@push.rocks/smartpath": "^5.0.5",
|
"@aws-sdk/client-s3": "^3.864.0",
|
||||||
"@push.rocks/smartpromise": "^4.0.3",
|
"@push.rocks/smartmime": "^2.0.4",
|
||||||
"@push.rocks/smartrx": "^3.0.7",
|
"@push.rocks/smartpath": "^6.0.0",
|
||||||
"@push.rocks/smartstream": "^2.0.2",
|
"@push.rocks/smartpromise": "^4.2.3",
|
||||||
"@tsclass/tsclass": "^4.0.50",
|
"@push.rocks/smartrx": "^3.0.10",
|
||||||
"@types/minio": "^7.0.13",
|
"@push.rocks/smartstream": "^3.2.5",
|
||||||
"minio": "^7.0.28"
|
"@push.rocks/smartstring": "^4.0.15",
|
||||||
|
"@push.rocks/smartunique": "^3.0.9",
|
||||||
|
"@tsclass/tsclass": "^9.2.0"
|
||||||
},
|
},
|
||||||
"private": false,
|
"private": false,
|
||||||
"files": [
|
"files": [
|
||||||
@@ -42,5 +44,33 @@
|
|||||||
],
|
],
|
||||||
"browserslist": [
|
"browserslist": [
|
||||||
"last 1 chrome versions"
|
"last 1 chrome versions"
|
||||||
]
|
],
|
||||||
|
"keywords": [
|
||||||
|
"TypeScript",
|
||||||
|
"cloud agnostic",
|
||||||
|
"object storage",
|
||||||
|
"bucket management",
|
||||||
|
"file operations",
|
||||||
|
"directory management",
|
||||||
|
"data streaming",
|
||||||
|
"S3",
|
||||||
|
"multi-cloud",
|
||||||
|
"file locking",
|
||||||
|
"metadata management",
|
||||||
|
"buffer handling",
|
||||||
|
"access control",
|
||||||
|
"environment configuration",
|
||||||
|
"unified storage",
|
||||||
|
"bucket policies",
|
||||||
|
"trash management",
|
||||||
|
"file transfer",
|
||||||
|
"data management",
|
||||||
|
"streaming"
|
||||||
|
],
|
||||||
|
"homepage": "https://code.foss.global/push.rocks/smartbucket",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://code.foss.global/push.rocks/smartbucket.git"
|
||||||
|
},
|
||||||
|
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748"
|
||||||
}
|
}
|
||||||
|
13944
pnpm-lock.yaml
generated
13944
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
4
pnpm-workspace.yaml
Normal file
4
pnpm-workspace.yaml
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
onlyBuiltDependencies:
|
||||||
|
- esbuild
|
||||||
|
- mongodb-memory-server
|
||||||
|
- puppeteer
|
1
qenv.yml
1
qenv.yml
@@ -1,3 +1,4 @@
|
|||||||
required:
|
required:
|
||||||
- S3_KEY
|
- S3_KEY
|
||||||
- S3_SECRET
|
- S3_SECRET
|
||||||
|
- S3_ENDPOINT
|
3
readme.hints.md
Normal file
3
readme.hints.md
Normal file
@@ -0,0 +1,3 @@
|
|||||||
|
* The project uses the official s3 client, not the minio client.
|
||||||
|
* notice the difference between *Strict methods and the normal methods.
|
||||||
|
* metadata is handled though the MetaData class. Important!
|
491
readme.md
491
readme.md
@@ -1,39 +1,472 @@
|
|||||||
# @push.rocks/smartbucket
|
# @push.rocks/smartbucket 🪣
|
||||||
simple cloud independent object storage
|
|
||||||
|
|
||||||
## Availabililty and Links
|
> A powerful, cloud-agnostic TypeScript library for object storage with advanced features like file locking, metadata management, and intelligent trash handling.
|
||||||
* [npmjs.org (npm package)](https://www.npmjs.com/package/@push.rocks/smartbucket)
|
|
||||||
* [gitlab.com (source)](https://gitlab.com/pushrocks/smartbucket)
|
|
||||||
* [github.com (source mirror)](https://github.com/pushrocks/smartbucket)
|
|
||||||
* [docs (typedoc)](https://pushrocks.gitlab.io/smartbucket/)
|
|
||||||
|
|
||||||
## Status for master
|
## Install 📦
|
||||||
|
|
||||||
Status Category | Status Badge
|
To install `@push.rocks/smartbucket`, run:
|
||||||
-- | --
|
|
||||||
GitLab Pipelines | [](https://lossless.cloud)
|
|
||||||
GitLab Pipline Test Coverage | [](https://lossless.cloud)
|
|
||||||
npm | [](https://lossless.cloud)
|
|
||||||
Snyk | [](https://lossless.cloud)
|
|
||||||
TypeScript Support | [](https://lossless.cloud)
|
|
||||||
node Support | [](https://nodejs.org/dist/latest-v10.x/docs/api/)
|
|
||||||
Code Style | [](https://lossless.cloud)
|
|
||||||
PackagePhobia (total standalone install weight) | [](https://lossless.cloud)
|
|
||||||
PackagePhobia (package size on registry) | [](https://lossless.cloud)
|
|
||||||
BundlePhobia (total size when bundled) | [](https://lossless.cloud)
|
|
||||||
Platform support | [](https://lossless.cloud) [](https://lossless.cloud)
|
|
||||||
|
|
||||||
## Usage
|
```bash
|
||||||
|
npm install @push.rocks/smartbucket --save
|
||||||
|
```
|
||||||
|
|
||||||
Use TypeScript for best in class intellisense.
|
Or if you're using pnpm (recommended):
|
||||||
|
|
||||||
## Contribution
|
```bash
|
||||||
|
pnpm add @push.rocks/smartbucket
|
||||||
|
```
|
||||||
|
|
||||||
We are always happy for code contributions. If you are not the code contributing type that is ok. Still, maintaining Open Source repositories takes considerable time and thought. If you like the quality of what we do and our modules are useful to you we would appreciate a little monthly contribution: You can [contribute one time](https://lossless.link/contribute-onetime) or [contribute monthly](https://lossless.link/contribute). :)
|
## Usage 🚀
|
||||||
|
|
||||||
For further information read the linked docs at the top of this readme.
|
### Introduction
|
||||||
|
|
||||||
> MIT licensed | **©** [Lossless GmbH](https://lossless.gmbh)
|
`@push.rocks/smartbucket` provides a unified, cloud-agnostic API for object storage operations across major providers like AWS S3, Google Cloud Storage, MinIO, and more. It abstracts away provider-specific complexities while offering advanced features like metadata management, file locking, streaming operations, and intelligent trash management.
|
||||||
| By using this npm module you agree to our [privacy policy](https://lossless.gmbH/privacy)
|
|
||||||
|
|
||||||
[](https://maintainedby.lossless.com)
|
### Table of Contents
|
||||||
|
|
||||||
|
1. [🏁 Getting Started](#-getting-started)
|
||||||
|
2. [🗂️ Working with Buckets](#️-working-with-buckets)
|
||||||
|
3. [📁 File Operations](#-file-operations)
|
||||||
|
4. [📂 Directory Management](#-directory-management)
|
||||||
|
5. [🌊 Streaming Operations](#-streaming-operations)
|
||||||
|
6. [🔒 File Locking](#-file-locking)
|
||||||
|
7. [🏷️ Metadata Management](#️-metadata-management)
|
||||||
|
8. [🗑️ Trash & Recovery](#️-trash--recovery)
|
||||||
|
9. [⚡ Advanced Features](#-advanced-features)
|
||||||
|
10. [☁️ Cloud Provider Support](#️-cloud-provider-support)
|
||||||
|
|
||||||
|
### 🏁 Getting Started
|
||||||
|
|
||||||
|
First, set up your storage connection:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { SmartBucket } from '@push.rocks/smartbucket';
|
||||||
|
|
||||||
|
// Initialize with your cloud storage credentials
|
||||||
|
const smartBucket = new SmartBucket({
|
||||||
|
accessKey: 'your-access-key',
|
||||||
|
accessSecret: 'your-secret-key',
|
||||||
|
endpoint: 's3.amazonaws.com', // Or your provider's endpoint
|
||||||
|
port: 443,
|
||||||
|
useSsl: true,
|
||||||
|
region: 'us-east-1' // Optional, defaults to 'us-east-1'
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🗂️ Working with Buckets
|
||||||
|
|
||||||
|
#### Creating Buckets
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Create a new bucket
|
||||||
|
const myBucket = await smartBucket.createBucket('my-awesome-bucket');
|
||||||
|
console.log(`✅ Bucket created: ${myBucket.name}`);
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Getting Existing Buckets
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Get a bucket reference
|
||||||
|
const existingBucket = await smartBucket.getBucketByName('existing-bucket');
|
||||||
|
|
||||||
|
// Or use strict mode (throws if bucket doesn't exist)
|
||||||
|
const bucketStrict = await smartBucket.getBucketByNameStrict('must-exist-bucket');
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Removing Buckets
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Delete a bucket (must be empty)
|
||||||
|
await smartBucket.removeBucket('old-bucket');
|
||||||
|
console.log('🗑️ Bucket removed');
|
||||||
|
```
|
||||||
|
|
||||||
|
### 📁 File Operations
|
||||||
|
|
||||||
|
#### Upload Files
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const bucket = await smartBucket.getBucketByName('my-bucket');
|
||||||
|
|
||||||
|
// Simple file upload
|
||||||
|
await bucket.fastPut({
|
||||||
|
path: 'documents/report.pdf',
|
||||||
|
contents: Buffer.from('Your file content here')
|
||||||
|
});
|
||||||
|
|
||||||
|
// Upload with string content
|
||||||
|
await bucket.fastPut({
|
||||||
|
path: 'notes/todo.txt',
|
||||||
|
contents: 'Buy milk\nCall mom\nRule the world'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Strict upload (returns File object)
|
||||||
|
const uploadedFile = await bucket.fastPutStrict({
|
||||||
|
path: 'images/logo.png',
|
||||||
|
contents: imageBuffer,
|
||||||
|
overwrite: true // Optional: control overwrite behavior
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Download Files
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Get file as Buffer
|
||||||
|
const fileContent = await bucket.fastGet({
|
||||||
|
path: 'documents/report.pdf'
|
||||||
|
});
|
||||||
|
console.log(`📄 File size: ${fileContent.length} bytes`);
|
||||||
|
|
||||||
|
// Get file as string
|
||||||
|
const textContent = fileContent.toString('utf-8');
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Check File Existence
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const exists = await bucket.fastExists({
|
||||||
|
path: 'documents/report.pdf'
|
||||||
|
});
|
||||||
|
console.log(`File exists: ${exists ? '✅' : '❌'}`);
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Delete Files
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Permanent deletion
|
||||||
|
await bucket.fastRemove({
|
||||||
|
path: 'old-file.txt'
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Copy & Move Files
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Copy file within bucket
|
||||||
|
await bucket.fastCopy({
|
||||||
|
sourcePath: 'original/file.txt',
|
||||||
|
destinationPath: 'backup/file-copy.txt'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Move file (copy + delete original)
|
||||||
|
await bucket.fastMove({
|
||||||
|
sourcePath: 'temp/draft.txt',
|
||||||
|
destinationPath: 'final/document.txt'
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### 📂 Directory Management
|
||||||
|
|
||||||
|
SmartBucket provides powerful directory-like operations for organizing your files:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Get base directory
|
||||||
|
const baseDir = await bucket.getBaseDirectory();
|
||||||
|
|
||||||
|
// List directories and files
|
||||||
|
const directories = await baseDir.listDirectories();
|
||||||
|
const files = await baseDir.listFiles();
|
||||||
|
|
||||||
|
console.log(`📁 Found ${directories.length} directories`);
|
||||||
|
console.log(`📄 Found ${files.length} files`);
|
||||||
|
|
||||||
|
// Navigate subdirectories
|
||||||
|
const subDir = await baseDir.getSubDirectoryByName('projects/2024');
|
||||||
|
|
||||||
|
// Create nested file
|
||||||
|
await subDir.fastPut({
|
||||||
|
path: 'report.pdf',
|
||||||
|
contents: reportBuffer
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get directory tree structure
|
||||||
|
const tree = await subDir.getTreeArray();
|
||||||
|
console.log('🌳 Directory tree:', tree);
|
||||||
|
|
||||||
|
// Create empty file as placeholder
|
||||||
|
await subDir.createEmptyFile('placeholder.txt');
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🌊 Streaming Operations
|
||||||
|
|
||||||
|
Handle large files efficiently with streaming:
|
||||||
|
|
||||||
|
#### Download Streams
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Node.js stream
|
||||||
|
const nodeStream = await bucket.fastGetStream(
|
||||||
|
{ path: 'large-video.mp4' },
|
||||||
|
'nodestream'
|
||||||
|
);
|
||||||
|
nodeStream.pipe(fs.createWriteStream('local-video.mp4'));
|
||||||
|
|
||||||
|
// Web stream (for modern environments)
|
||||||
|
const webStream = await bucket.fastGetStream(
|
||||||
|
{ path: 'large-file.zip' },
|
||||||
|
'webstream'
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Upload Streams
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Stream upload from file
|
||||||
|
const readStream = fs.createReadStream('big-data.csv');
|
||||||
|
await bucket.fastPutStream({
|
||||||
|
path: 'uploads/big-data.csv',
|
||||||
|
stream: readStream,
|
||||||
|
metadata: {
|
||||||
|
contentType: 'text/csv',
|
||||||
|
userMetadata: {
|
||||||
|
uploadedBy: 'data-team',
|
||||||
|
version: '2.0'
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Reactive Streams with RxJS
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Get file as ReplaySubject for reactive programming
|
||||||
|
const replaySubject = await bucket.fastGetReplaySubject({
|
||||||
|
path: 'data/sensor-readings.json',
|
||||||
|
chunkSize: 1024
|
||||||
|
});
|
||||||
|
|
||||||
|
replaySubject.subscribe({
|
||||||
|
next: (chunk) => processChunk(chunk),
|
||||||
|
complete: () => console.log('✅ Stream complete')
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🔒 File Locking
|
||||||
|
|
||||||
|
Prevent accidental modifications with file locking:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const file = await bucket.getBaseDirectory()
|
||||||
|
.getFileStrict({ path: 'important-config.json' });
|
||||||
|
|
||||||
|
// Lock file for 10 minutes
|
||||||
|
await file.lock({ timeoutMillis: 600000 });
|
||||||
|
console.log('🔒 File locked');
|
||||||
|
|
||||||
|
// Try to modify locked file (will throw error)
|
||||||
|
try {
|
||||||
|
await file.delete();
|
||||||
|
} catch (error) {
|
||||||
|
console.log('❌ Cannot delete locked file');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unlock when done
|
||||||
|
await file.unlock();
|
||||||
|
console.log('🔓 File unlocked');
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🏷️ Metadata Management
|
||||||
|
|
||||||
|
Attach and manage metadata for your files:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const file = await bucket.getBaseDirectory()
|
||||||
|
.getFileStrict({ path: 'document.pdf' });
|
||||||
|
|
||||||
|
// Get metadata handler
|
||||||
|
const metadata = await file.getMetaData();
|
||||||
|
|
||||||
|
// Set custom metadata
|
||||||
|
await metadata.setCustomMetaData({
|
||||||
|
key: 'author',
|
||||||
|
value: 'John Doe'
|
||||||
|
});
|
||||||
|
|
||||||
|
await metadata.setCustomMetaData({
|
||||||
|
key: 'department',
|
||||||
|
value: 'Engineering'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Retrieve metadata
|
||||||
|
const author = await metadata.getCustomMetaData({ key: 'author' });
|
||||||
|
console.log(`📝 Author: ${author}`);
|
||||||
|
|
||||||
|
// Get all metadata
|
||||||
|
const allMeta = await metadata.getAllCustomMetaData();
|
||||||
|
console.log('📋 All metadata:', allMeta);
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🗑️ Trash & Recovery
|
||||||
|
|
||||||
|
SmartBucket includes an intelligent trash system for safe file deletion:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const file = await bucket.getBaseDirectory()
|
||||||
|
.getFileStrict({ path: 'important-data.xlsx' });
|
||||||
|
|
||||||
|
// Move to trash instead of permanent deletion
|
||||||
|
await file.delete({ mode: 'trash' });
|
||||||
|
console.log('🗑️ File moved to trash');
|
||||||
|
|
||||||
|
// Access trash
|
||||||
|
const trash = await bucket.getTrash();
|
||||||
|
const trashDir = await trash.getTrashDir();
|
||||||
|
const trashedFiles = await trashDir.listFiles();
|
||||||
|
console.log(`📦 ${trashedFiles.length} files in trash`);
|
||||||
|
|
||||||
|
// Restore from trash
|
||||||
|
const trashedFile = await bucket.getBaseDirectory()
|
||||||
|
.getFileStrict({
|
||||||
|
path: 'important-data.xlsx',
|
||||||
|
getFromTrash: true
|
||||||
|
});
|
||||||
|
|
||||||
|
await trashedFile.restore({ useOriginalPath: true });
|
||||||
|
console.log('♻️ File restored successfully');
|
||||||
|
|
||||||
|
// Permanent deletion from trash
|
||||||
|
await trash.emptyTrash();
|
||||||
|
console.log('🧹 Trash emptied');
|
||||||
|
```
|
||||||
|
|
||||||
|
### ⚡ Advanced Features
|
||||||
|
|
||||||
|
#### File Statistics
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Get detailed file statistics
|
||||||
|
const stats = await bucket.fastStat({ path: 'document.pdf' });
|
||||||
|
console.log(`📊 Size: ${stats.size} bytes`);
|
||||||
|
console.log(`📅 Last modified: ${stats.lastModified}`);
|
||||||
|
console.log(`🏷️ ETag: ${stats.etag}`);
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Magic Bytes Detection
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Read first bytes for file type detection
|
||||||
|
const magicBytes = await bucket.getMagicBytes({
|
||||||
|
path: 'mystery-file',
|
||||||
|
length: 16
|
||||||
|
});
|
||||||
|
|
||||||
|
// Or from a File object
|
||||||
|
const file = await bucket.getBaseDirectory()
|
||||||
|
.getFileStrict({ path: 'image.jpg' });
|
||||||
|
const magic = await file.getMagicBytes({ length: 4 });
|
||||||
|
console.log(`🔮 Magic bytes: ${magic.toString('hex')}`);
|
||||||
|
```
|
||||||
|
|
||||||
|
#### JSON Data Operations
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
const file = await bucket.getBaseDirectory()
|
||||||
|
.getFileStrict({ path: 'config.json' });
|
||||||
|
|
||||||
|
// Read JSON data
|
||||||
|
const config = await file.getJsonData();
|
||||||
|
console.log('⚙️ Config loaded:', config);
|
||||||
|
|
||||||
|
// Update JSON data
|
||||||
|
config.version = '2.0';
|
||||||
|
config.updated = new Date().toISOString();
|
||||||
|
await file.writeJsonData(config);
|
||||||
|
console.log('💾 Config updated');
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Directory & File Type Detection
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Check if path is a directory
|
||||||
|
const isDir = await bucket.isDirectory({ path: 'uploads/' });
|
||||||
|
|
||||||
|
// Check if path is a file
|
||||||
|
const isFile = await bucket.isFile({ path: 'uploads/document.pdf' });
|
||||||
|
|
||||||
|
console.log(`Is directory: ${isDir ? '📁' : '❌'}`);
|
||||||
|
console.log(`Is file: ${isFile ? '📄' : '❌'}`);
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Clean Bucket Contents
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Remove all files and directories (use with caution!)
|
||||||
|
await bucket.cleanAllContents();
|
||||||
|
console.log('🧹 Bucket cleaned');
|
||||||
|
```
|
||||||
|
|
||||||
|
### ☁️ Cloud Provider Support
|
||||||
|
|
||||||
|
SmartBucket works seamlessly with:
|
||||||
|
|
||||||
|
- ✅ **AWS S3** - Full compatibility with S3 API
|
||||||
|
- ✅ **Google Cloud Storage** - Via S3-compatible API
|
||||||
|
- ✅ **MinIO** - Self-hosted S3-compatible storage
|
||||||
|
- ✅ **DigitalOcean Spaces** - S3-compatible object storage
|
||||||
|
- ✅ **Backblaze B2** - Cost-effective cloud storage
|
||||||
|
- ✅ **Wasabi** - High-performance S3-compatible storage
|
||||||
|
- ✅ **Any S3-compatible provider**
|
||||||
|
|
||||||
|
The library automatically handles provider quirks and optimizes operations for each platform while maintaining a consistent API.
|
||||||
|
|
||||||
|
### 🔧 Advanced Configuration
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Configure with custom options
|
||||||
|
const smartBucket = new SmartBucket({
|
||||||
|
accessKey: process.env.S3_ACCESS_KEY,
|
||||||
|
accessSecret: process.env.S3_SECRET_KEY,
|
||||||
|
endpoint: process.env.S3_ENDPOINT,
|
||||||
|
port: 443,
|
||||||
|
useSsl: true,
|
||||||
|
region: 'eu-central-1',
|
||||||
|
// Additional S3 client options can be passed through
|
||||||
|
});
|
||||||
|
|
||||||
|
// Environment-based configuration
|
||||||
|
import { Qenv } from '@push.rocks/qenv';
|
||||||
|
const qenv = new Qenv('./', './.nogit/');
|
||||||
|
|
||||||
|
const smartBucket = new SmartBucket({
|
||||||
|
accessKey: await qenv.getEnvVarOnDemandStrict('S3_ACCESS_KEY'),
|
||||||
|
accessSecret: await qenv.getEnvVarOnDemandStrict('S3_SECRET'),
|
||||||
|
endpoint: await qenv.getEnvVarOnDemandStrict('S3_ENDPOINT'),
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🧪 Testing
|
||||||
|
|
||||||
|
SmartBucket is thoroughly tested. Run tests with:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pnpm test
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🤝 Best Practices
|
||||||
|
|
||||||
|
1. **Always use strict mode** for critical operations to catch errors early
|
||||||
|
2. **Implement proper error handling** for network and permission issues
|
||||||
|
3. **Use streaming** for large files to optimize memory usage
|
||||||
|
4. **Leverage metadata** for organizing and searching files
|
||||||
|
5. **Enable trash mode** for important data to prevent accidental loss
|
||||||
|
6. **Lock files** during critical operations to prevent race conditions
|
||||||
|
7. **Clean up resources** properly when done
|
||||||
|
|
||||||
|
## License and Legal Information
|
||||||
|
|
||||||
|
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.
|
||||||
|
|
||||||
|
**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.
|
||||||
|
|
||||||
|
### Trademarks
|
||||||
|
|
||||||
|
This project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.
|
||||||
|
|
||||||
|
### Company Information
|
||||||
|
|
||||||
|
Task Venture Capital GmbH
|
||||||
|
Registered at District court Bremen HRB 35230 HB, Germany
|
||||||
|
|
||||||
|
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
|
||||||
|
|
||||||
|
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
|
0
test/helpers/prepare.ts
Normal file
0
test/helpers/prepare.ts
Normal file
7
test/test.metadata.ts
Normal file
7
test/test.metadata.ts
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||||
|
|
||||||
|
tap.test('test metadata functionality', async () => {
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
export default tap.start();
|
92
test/test.trash.ts
Normal file
92
test/test.trash.ts
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import { jestExpect } from '@push.rocks/tapbundle/node';
|
||||||
|
import { Qenv } from '@push.rocks/qenv';
|
||||||
|
|
||||||
|
import * as smartbucket from '../ts/index.js';
|
||||||
|
|
||||||
|
const testQenv = new Qenv('./', './.nogit/');
|
||||||
|
|
||||||
|
let testSmartbucket: smartbucket.SmartBucket;
|
||||||
|
let myBucket: smartbucket.Bucket;
|
||||||
|
let baseDirectory: smartbucket.Directory;
|
||||||
|
|
||||||
|
tap.test('should create a valid smartbucket', async () => {
|
||||||
|
testSmartbucket = new smartbucket.SmartBucket({
|
||||||
|
accessKey: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSKEY'),
|
||||||
|
accessSecret: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSSECRET'),
|
||||||
|
endpoint: await testQenv.getEnvVarOnDemandStrict('S3_ENDPOINT'),
|
||||||
|
});
|
||||||
|
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
|
||||||
|
myBucket = await testSmartbucket.getBucketByNameStrict(await testQenv.getEnvVarOnDemandStrict('S3_BUCKET'),);
|
||||||
|
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
|
||||||
|
expect(myBucket.name).toEqual('test-pushrocks-smartbucket');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should clean all contents', async () => {
|
||||||
|
await myBucket.cleanAllContents();
|
||||||
|
expect(await myBucket.fastExists({ path: 'hithere/socool.txt' })).toBeFalse();
|
||||||
|
expect(await myBucket.fastExists({ path: 'trashtest/trashme.txt' })).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should delete a file into the normally', async () => {
|
||||||
|
const path = 'trashtest/trashme.txt';
|
||||||
|
const file = await myBucket.fastPutStrict({
|
||||||
|
path,
|
||||||
|
contents: 'I\'m in the trash test content!',
|
||||||
|
});
|
||||||
|
const fileMetadata = await (await file.getMetaData()).metadataFile.getContents();
|
||||||
|
console.log(fileMetadata.toString());
|
||||||
|
expect(await file.getMetaData().then((meta) => meta.metadataFile.getJsonData())).toEqual({});
|
||||||
|
await file.delete({ mode: 'permanent' });
|
||||||
|
expect((await (await myBucket.getBaseDirectory()).listFiles()).length).toEqual(0);
|
||||||
|
expect((await (await myBucket.getBaseDirectory()).listDirectories()).length).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should put a file into the trash', async () => {
|
||||||
|
const path = 'trashtest/trashme.txt';
|
||||||
|
const file = await myBucket.fastPutStrict({
|
||||||
|
path,
|
||||||
|
contents: 'I\'m in the trash test content!',
|
||||||
|
});
|
||||||
|
const fileMetadata = await (await file.getMetaData()).metadataFile.getContents();
|
||||||
|
console.log(fileMetadata.toString());
|
||||||
|
expect(await file.getMetaData().then((meta) => meta.metadataFile.getJsonData())).toEqual({});
|
||||||
|
await file.delete({ mode: 'trash' });
|
||||||
|
|
||||||
|
const getTrashContents = async () => {
|
||||||
|
const trash = await myBucket.getTrash();
|
||||||
|
const trashDir = await trash.getTrashDir();
|
||||||
|
return await trashDir.listFiles();
|
||||||
|
}
|
||||||
|
|
||||||
|
const trashedFiles = await getTrashContents();
|
||||||
|
expect(trashedFiles.length).toEqual(2);
|
||||||
|
|
||||||
|
const trashedMetaFile = trashedFiles.find(file => file.name.endsWith('.metadata'));
|
||||||
|
expect(trashedMetaFile).toBeDefined();
|
||||||
|
expect(trashedMetaFile).toBeInstanceOf(smartbucket.File);
|
||||||
|
|
||||||
|
jestExpect(await trashedMetaFile!.getJsonData()).toEqual({
|
||||||
|
custom_recycle: {
|
||||||
|
deletedAt: jestExpect.any(Number),
|
||||||
|
originalPath: "trashtest/trashme.txt",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should restore a file from trash', async () => {
|
||||||
|
const baseDirectory = await myBucket.getBaseDirectory();
|
||||||
|
const file = await baseDirectory.getFileStrict({
|
||||||
|
path: 'trashtest/trashme.txt',
|
||||||
|
getFromTrash: true
|
||||||
|
});
|
||||||
|
const trashFileMeta = await file.getMetaData();
|
||||||
|
const data = await trashFileMeta.getCustomMetaData({
|
||||||
|
key: 'recycle'
|
||||||
|
});
|
||||||
|
expect(file).toBeInstanceOf(smartbucket.File);
|
||||||
|
await file.restore();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
|
export default tap.start();
|
101
test/test.ts
101
test/test.ts
@@ -1,4 +1,4 @@
|
|||||||
import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
import { Qenv } from '@push.rocks/qenv';
|
import { Qenv } from '@push.rocks/qenv';
|
||||||
|
|
||||||
import * as smartbucket from '../ts/index.js';
|
import * as smartbucket from '../ts/index.js';
|
||||||
@@ -11,50 +11,84 @@ let baseDirectory: smartbucket.Directory;
|
|||||||
|
|
||||||
tap.test('should create a valid smartbucket', async () => {
|
tap.test('should create a valid smartbucket', async () => {
|
||||||
testSmartbucket = new smartbucket.SmartBucket({
|
testSmartbucket = new smartbucket.SmartBucket({
|
||||||
accessKey: await testQenv.getEnvVarOnDemand('S3_KEY'),
|
accessKey: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSKEY'),
|
||||||
accessSecret: await testQenv.getEnvVarOnDemand('S3_SECRET'),
|
accessSecret: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSSECRET'),
|
||||||
endpoint: 's3.eu-central-1.wasabisys.com',
|
endpoint: await testQenv.getEnvVarOnDemandStrict('S3_ENDPOINT'),
|
||||||
});
|
});
|
||||||
|
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
|
||||||
|
myBucket = await testSmartbucket.getBucketByNameStrict(await testQenv.getEnvVarOnDemandStrict('S3_BUCKET'),);
|
||||||
|
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
|
||||||
|
expect(myBucket.name).toEqual('test-pushrocks-smartbucket');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should clean all contents', async () => {
|
||||||
|
await myBucket.cleanAllContents();
|
||||||
|
expect(await myBucket.fastExists({ path: 'hithere/socool.txt' })).toBeFalse();
|
||||||
|
expect(await myBucket.fastExists({ path: 'trashtest/trashme.txt' })).toBeFalse();
|
||||||
});
|
});
|
||||||
|
|
||||||
tap.skip.test('should create testbucket', async () => {
|
tap.skip.test('should create testbucket', async () => {
|
||||||
// await testSmartbucket.createBucket('testzone');
|
// await testSmartbucket.createBucket('testzone2');
|
||||||
});
|
});
|
||||||
|
|
||||||
tap.skip.test('should remove testbucket', async () => {
|
tap.skip.test('should remove testbucket', async () => {
|
||||||
// await testSmartbucket.removeBucket('testzone');
|
// await testSmartbucket.removeBucket('testzone2');
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('should get a bucket', async () => {
|
|
||||||
myBucket = await testSmartbucket.getBucketByName('testzone');
|
|
||||||
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
|
|
||||||
expect(myBucket.name).toEqual('testzone');
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Fast operations
|
// Fast operations
|
||||||
tap.test('should store data in bucket fast', async () => {
|
tap.test('should store data in bucket fast', async () => {
|
||||||
await myBucket.fastStore('hithere/socool.txt', 'hi there!');
|
await myBucket.fastPut({
|
||||||
|
path: 'hithere/socool.txt',
|
||||||
|
contents: 'hi there!',
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
tap.test('should get data in bucket', async () => {
|
tap.test('should get data in bucket', async () => {
|
||||||
const fileString = await myBucket.fastGet('hithere/socool.txt');
|
const fileString = await myBucket.fastGet({
|
||||||
const fileStringStream = await myBucket.fastGetStream('hithere/socool.txt');
|
path: 'hithere/socool.txt',
|
||||||
|
});
|
||||||
|
const fileStringStream = await myBucket.fastGetStream(
|
||||||
|
{
|
||||||
|
path: 'hithere/socool.txt',
|
||||||
|
},
|
||||||
|
'nodestream'
|
||||||
|
);
|
||||||
console.log(fileString);
|
console.log(fileString);
|
||||||
});
|
});
|
||||||
|
|
||||||
tap.test('should delete data in bucket', async () => {
|
tap.test('should delete data in bucket', async () => {
|
||||||
await myBucket.fastRemove('hithere/socool.txt');
|
await myBucket.fastRemove({
|
||||||
|
path: 'hithere/socool.txt',
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
// fs operations
|
// fs operations
|
||||||
|
|
||||||
tap.test('prepare for directory style tests', async () => {
|
tap.test('prepare for directory style tests', async () => {
|
||||||
await myBucket.fastStore('dir1/file1.txt', 'dir1/file1.txt content');
|
await myBucket.fastPut({
|
||||||
await myBucket.fastStore('dir1/file2.txt', 'dir1/file2.txt content');
|
path: 'dir1/file1.txt',
|
||||||
await myBucket.fastStore('dir2/file1.txt', 'dir2/file1.txt content');
|
contents: 'dir1/file1.txt content',
|
||||||
await myBucket.fastStore('dir3/file1.txt', 'dir3/file1.txt content');
|
});
|
||||||
await myBucket.fastStore('dir3/dir4/file1.txt', 'dir3/dir4/file1.txt content');
|
await myBucket.fastPut({
|
||||||
await myBucket.fastStore('file1.txt', 'file1 content');
|
path: 'dir1/file2.txt',
|
||||||
|
contents: 'dir1/file2.txt content',
|
||||||
|
});
|
||||||
|
await myBucket.fastPut({
|
||||||
|
path: 'dir2/file1.txt',
|
||||||
|
contents: 'dir2/file1.txt content',
|
||||||
|
});
|
||||||
|
await myBucket.fastPut({
|
||||||
|
path: 'dir3/file1.txt',
|
||||||
|
contents: 'dir3/file1.txt content',
|
||||||
|
});
|
||||||
|
await myBucket.fastPut({
|
||||||
|
path: 'dir3/dir4/file1.txt',
|
||||||
|
contents: 'dir3/dir4/file1.txt content',
|
||||||
|
});
|
||||||
|
await myBucket.fastPut({
|
||||||
|
path: '/file1.txt',
|
||||||
|
contents: 'file1 content',
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
tap.test('should get base directory', async () => {
|
tap.test('should get base directory', async () => {
|
||||||
@@ -72,17 +106,24 @@ tap.test('should get base directory', async () => {
|
|||||||
tap.test('should correctly build paths for sub directories', async () => {
|
tap.test('should correctly build paths for sub directories', async () => {
|
||||||
const dir4 = await baseDirectory.getSubDirectoryByName('dir3/dir4');
|
const dir4 = await baseDirectory.getSubDirectoryByName('dir3/dir4');
|
||||||
expect(dir4).toBeInstanceOf(smartbucket.Directory);
|
expect(dir4).toBeInstanceOf(smartbucket.Directory);
|
||||||
const dir4BasePath = dir4.getBasePath();
|
const dir4BasePath = dir4?.getBasePath();
|
||||||
console.log(dir4BasePath);
|
console.log(dir4BasePath);
|
||||||
|
expect(dir4BasePath).toEqual('dir3/dir4/');
|
||||||
});
|
});
|
||||||
|
|
||||||
tap.test('clean up directory style tests', async () => {
|
tap.test('clean up directory style tests', async () => {
|
||||||
await myBucket.fastRemove('dir1/file1.txt');
|
await myBucket.fastRemove({
|
||||||
await myBucket.fastRemove('dir1/file2.txt');
|
path: 'dir1/file1.txt',
|
||||||
await myBucket.fastRemove('dir2/file1.txt');
|
});
|
||||||
await myBucket.fastRemove('dir3/file1.txt');
|
await myBucket.fastRemove({
|
||||||
await myBucket.fastRemove('dir3/dir4/file1.txt');
|
path: 'dir1/file2.txt',
|
||||||
await myBucket.fastRemove('file1.txt');
|
});
|
||||||
|
await myBucket.fastRemove({
|
||||||
|
path: 'dir2/file1.txt',
|
||||||
|
});
|
||||||
|
await myBucket.fastRemove({ path: 'dir3/file1.txt' });
|
||||||
|
await myBucket.fastRemove({ path: 'dir3/dir4/file1.txt' });
|
||||||
|
await myBucket.fastRemove({ path: 'file1.txt' });
|
||||||
});
|
});
|
||||||
|
|
||||||
tap.start();
|
export default tap.start();
|
@@ -1,8 +1,8 @@
|
|||||||
/**
|
/**
|
||||||
* autocreated commitinfo by @pushrocks/commitinfo
|
* autocreated commitinfo by @push.rocks/commitinfo
|
||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@push.rocks/smartbucket',
|
name: '@push.rocks/smartbucket',
|
||||||
version: '2.0.4',
|
version: '3.3.9',
|
||||||
description: 'simple cloud independent object storage'
|
description: 'A TypeScript library providing a cloud-agnostic interface for managing object storage with functionalities like bucket management, file and directory operations, and advanced features such as metadata handling and file locking.'
|
||||||
}
|
}
|
||||||
|
526
ts/classes.bucket.ts
Normal file
526
ts/classes.bucket.ts
Normal file
@@ -0,0 +1,526 @@
|
|||||||
|
// classes.bucket.ts
|
||||||
|
|
||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import * as helpers from './helpers.js';
|
||||||
|
import * as interfaces from './interfaces.js';
|
||||||
|
import { SmartBucket } from './classes.smartbucket.js';
|
||||||
|
import { Directory } from './classes.directory.js';
|
||||||
|
import { File } from './classes.file.js';
|
||||||
|
import { Trash } from './classes.trash.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The bucket class exposes the basic functionality of a bucket.
|
||||||
|
* The functions of the bucket alone are enough to
|
||||||
|
* operate in S3 basic fashion on blobs of data.
|
||||||
|
*/
|
||||||
|
export class Bucket {
|
||||||
|
public static async getBucketByName(smartbucketRef: SmartBucket, bucketNameArg: string) {
|
||||||
|
const command = new plugins.s3.ListBucketsCommand({});
|
||||||
|
const buckets = await smartbucketRef.s3Client.send(command);
|
||||||
|
const foundBucket = buckets.Buckets!.find((bucket) => bucket.Name === bucketNameArg);
|
||||||
|
|
||||||
|
if (foundBucket) {
|
||||||
|
console.log(`bucket with name ${bucketNameArg} exists.`);
|
||||||
|
console.log(`Taking this as base for new Bucket instance`);
|
||||||
|
return new this(smartbucketRef, bucketNameArg);
|
||||||
|
} else {
|
||||||
|
console.log(`did not find bucket by name: ${bucketNameArg}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async createBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
|
||||||
|
const command = new plugins.s3.CreateBucketCommand({ Bucket: bucketName });
|
||||||
|
await smartbucketRef.s3Client.send(command).catch((e) => console.log(e));
|
||||||
|
return new Bucket(smartbucketRef, bucketName);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async removeBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
|
||||||
|
const command = new plugins.s3.DeleteBucketCommand({ Bucket: bucketName });
|
||||||
|
await smartbucketRef.s3Client.send(command).catch((e) => console.log(e));
|
||||||
|
}
|
||||||
|
|
||||||
|
public smartbucketRef: SmartBucket;
|
||||||
|
public name: string;
|
||||||
|
|
||||||
|
constructor(smartbucketRef: SmartBucket, bucketName: string) {
|
||||||
|
this.smartbucketRef = smartbucketRef;
|
||||||
|
this.name = bucketName;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gets the base directory of the bucket
|
||||||
|
*/
|
||||||
|
public async getBaseDirectory(): Promise<Directory> {
|
||||||
|
return new Directory(this, null!, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gets the trash directory
|
||||||
|
*/
|
||||||
|
public async getTrash(): Promise<Trash> {
|
||||||
|
const trash = new Trash(this);
|
||||||
|
return trash;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getDirectoryFromPath(
|
||||||
|
pathDescriptorArg: interfaces.IPathDecriptor
|
||||||
|
): Promise<Directory> {
|
||||||
|
if (!pathDescriptorArg.path && !pathDescriptorArg.directory) {
|
||||||
|
return this.getBaseDirectory();
|
||||||
|
}
|
||||||
|
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
|
||||||
|
const baseDirectory = await this.getBaseDirectory();
|
||||||
|
return await baseDirectory.getSubDirectoryByNameStrict(checkPath, {
|
||||||
|
getEmptyDirectory: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// ===============
|
||||||
|
// Fast Operations
|
||||||
|
// ===============
|
||||||
|
|
||||||
|
/**
|
||||||
|
* store file
|
||||||
|
*/
|
||||||
|
public async fastPut(
|
||||||
|
optionsArg: interfaces.IPathDecriptor & {
|
||||||
|
contents: string | Buffer;
|
||||||
|
overwrite?: boolean;
|
||||||
|
}
|
||||||
|
): Promise<File | null> {
|
||||||
|
try {
|
||||||
|
const reducedPath = await helpers.reducePathDescriptorToPath(optionsArg);
|
||||||
|
const exists = await this.fastExists({ path: reducedPath });
|
||||||
|
|
||||||
|
if (exists && !optionsArg.overwrite) {
|
||||||
|
const errorText = `Object already exists at path '${reducedPath}' in bucket '${this.name}'.`;
|
||||||
|
console.error(errorText);
|
||||||
|
return null;
|
||||||
|
} else if (exists && optionsArg.overwrite) {
|
||||||
|
console.log(
|
||||||
|
`Overwriting existing object at path '${reducedPath}' in bucket '${this.name}'.`
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
console.log(`Creating new object at path '${reducedPath}' in bucket '${this.name}'.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const command = new plugins.s3.PutObjectCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Key: reducedPath,
|
||||||
|
Body: optionsArg.contents,
|
||||||
|
});
|
||||||
|
await this.smartbucketRef.s3Client.send(command);
|
||||||
|
|
||||||
|
console.log(`Object '${reducedPath}' has been successfully stored in bucket '${this.name}'.`);
|
||||||
|
const parsedPath = plugins.path.parse(reducedPath);
|
||||||
|
return new File({
|
||||||
|
directoryRefArg: await this.getDirectoryFromPath({
|
||||||
|
path: parsedPath.dir,
|
||||||
|
}),
|
||||||
|
fileName: parsedPath.base,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
`Error storing object at path '${optionsArg.path}' in bucket '${this.name}':`,
|
||||||
|
error
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async fastPutStrict(...args: Parameters<Bucket['fastPut']>) {
|
||||||
|
const file = await this.fastPut(...args);
|
||||||
|
if (!file) {
|
||||||
|
throw new Error(`File not stored at path '${args[0].path}'`);
|
||||||
|
}
|
||||||
|
return file;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get file
|
||||||
|
*/
|
||||||
|
public async fastGet(optionsArg: { path: string }): Promise<Buffer> {
|
||||||
|
const done = plugins.smartpromise.defer();
|
||||||
|
let completeFile: Buffer;
|
||||||
|
const replaySubject = await this.fastGetReplaySubject(optionsArg);
|
||||||
|
const subscription = replaySubject.subscribe({
|
||||||
|
next: (chunk) => {
|
||||||
|
if (completeFile) {
|
||||||
|
completeFile = Buffer.concat([completeFile, chunk]);
|
||||||
|
} else {
|
||||||
|
completeFile = chunk;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
complete: () => {
|
||||||
|
done.resolve();
|
||||||
|
subscription.unsubscribe();
|
||||||
|
},
|
||||||
|
error: (err) => {
|
||||||
|
console.log(err);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
await done.promise;
|
||||||
|
return completeFile!;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* good when time to first byte is important
|
||||||
|
* and multiple subscribers are expected
|
||||||
|
* @param optionsArg
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
public async fastGetReplaySubject(optionsArg: {
|
||||||
|
path: string;
|
||||||
|
}): Promise<plugins.smartrx.rxjs.ReplaySubject<Buffer>> {
|
||||||
|
const command = new plugins.s3.GetObjectCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Key: optionsArg.path,
|
||||||
|
});
|
||||||
|
const response = await this.smartbucketRef.s3Client.send(command);
|
||||||
|
const replaySubject = new plugins.smartrx.rxjs.ReplaySubject<Buffer>();
|
||||||
|
|
||||||
|
// Convert the stream to a format that supports piping
|
||||||
|
const stream = response.Body as any; // SdkStreamMixin includes readable stream
|
||||||
|
if (typeof stream.pipe === 'function') {
|
||||||
|
const duplexStream = new plugins.smartstream.SmartDuplex<Buffer, void>({
|
||||||
|
writeFunction: async (chunk) => {
|
||||||
|
replaySubject.next(chunk);
|
||||||
|
return;
|
||||||
|
},
|
||||||
|
finalFunction: async (cb) => {
|
||||||
|
replaySubject.complete();
|
||||||
|
return;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
stream.pipe(duplexStream);
|
||||||
|
}
|
||||||
|
|
||||||
|
return replaySubject;
|
||||||
|
}
|
||||||
|
|
||||||
|
public fastGetStream(
|
||||||
|
optionsArg: {
|
||||||
|
path: string;
|
||||||
|
},
|
||||||
|
typeArg: 'webstream'
|
||||||
|
): Promise<ReadableStream>;
|
||||||
|
public async fastGetStream(
|
||||||
|
optionsArg: {
|
||||||
|
path: string;
|
||||||
|
},
|
||||||
|
typeArg: 'nodestream'
|
||||||
|
): Promise<plugins.stream.Readable>;
|
||||||
|
|
||||||
|
public async fastGetStream(
|
||||||
|
optionsArg: { path: string },
|
||||||
|
typeArg: 'webstream' | 'nodestream' = 'nodestream'
|
||||||
|
): Promise<ReadableStream | plugins.stream.Readable> {
|
||||||
|
const command = new plugins.s3.GetObjectCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Key: optionsArg.path,
|
||||||
|
});
|
||||||
|
const response = await this.smartbucketRef.s3Client.send(command);
|
||||||
|
const stream = response.Body as any; // SdkStreamMixin includes readable stream
|
||||||
|
|
||||||
|
const duplexStream = new plugins.smartstream.SmartDuplex<Buffer, Buffer>({
|
||||||
|
writeFunction: async (chunk) => {
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
finalFunction: async (cb) => {
|
||||||
|
return null!;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (typeof stream.pipe === 'function') {
|
||||||
|
stream.pipe(duplexStream);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeArg === 'nodestream') {
|
||||||
|
return duplexStream;
|
||||||
|
}
|
||||||
|
if (typeArg === 'webstream') {
|
||||||
|
return (await duplexStream.getWebStreams()).readable;
|
||||||
|
}
|
||||||
|
throw new Error('unknown typeArg');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* store file as stream
|
||||||
|
*/
|
||||||
|
public async fastPutStream(optionsArg: {
|
||||||
|
path: string;
|
||||||
|
readableStream: plugins.stream.Readable | ReadableStream;
|
||||||
|
nativeMetadata?: { [key: string]: string };
|
||||||
|
overwrite?: boolean;
|
||||||
|
}): Promise<void> {
|
||||||
|
try {
|
||||||
|
const exists = await this.fastExists({ path: optionsArg.path });
|
||||||
|
|
||||||
|
if (exists && !optionsArg.overwrite) {
|
||||||
|
console.error(
|
||||||
|
`Object already exists at path '${optionsArg.path}' in bucket '${this.name}'.`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
} else if (exists && optionsArg.overwrite) {
|
||||||
|
console.log(
|
||||||
|
`Overwriting existing object at path '${optionsArg.path}' in bucket '${this.name}'.`
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
console.log(`Creating new object at path '${optionsArg.path}' in bucket '${this.name}'.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const command = new plugins.s3.PutObjectCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Key: optionsArg.path,
|
||||||
|
Body: optionsArg.readableStream,
|
||||||
|
Metadata: optionsArg.nativeMetadata,
|
||||||
|
});
|
||||||
|
await this.smartbucketRef.s3Client.send(command);
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`Object '${optionsArg.path}' has been successfully stored in bucket '${this.name}'.`
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
`Error storing object at path '${optionsArg.path}' in bucket '${this.name}':`,
|
||||||
|
error
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async fastCopy(optionsArg: {
|
||||||
|
sourcePath: string;
|
||||||
|
destinationPath?: string;
|
||||||
|
targetBucket?: Bucket;
|
||||||
|
nativeMetadata?: { [key: string]: string };
|
||||||
|
deleteExistingNativeMetadata?: boolean;
|
||||||
|
}): Promise<void> {
|
||||||
|
try {
|
||||||
|
const targetBucketName = optionsArg.targetBucket ? optionsArg.targetBucket.name : this.name;
|
||||||
|
|
||||||
|
// Retrieve current object information to use in copy conditions
|
||||||
|
const currentObjInfo = await this.smartbucketRef.s3Client.send(
|
||||||
|
new plugins.s3.HeadObjectCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Key: optionsArg.sourcePath,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
// Prepare new metadata
|
||||||
|
const newNativeMetadata = {
|
||||||
|
...(optionsArg.deleteExistingNativeMetadata ? {} : currentObjInfo.Metadata),
|
||||||
|
...optionsArg.nativeMetadata,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Define the copy operation
|
||||||
|
const copySource = `${this.name}/${optionsArg.sourcePath}`;
|
||||||
|
const command = new plugins.s3.CopyObjectCommand({
|
||||||
|
Bucket: targetBucketName,
|
||||||
|
CopySource: copySource,
|
||||||
|
Key: optionsArg.destinationPath || optionsArg.sourcePath,
|
||||||
|
Metadata: newNativeMetadata,
|
||||||
|
MetadataDirective: optionsArg.deleteExistingNativeMetadata ? 'REPLACE' : 'COPY',
|
||||||
|
});
|
||||||
|
await this.smartbucketRef.s3Client.send(command);
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Error updating metadata:', err);
|
||||||
|
throw err; // rethrow to allow caller to handle
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Move object from one path to another within the same bucket or to another bucket
|
||||||
|
*/
|
||||||
|
public async fastMove(optionsArg: {
|
||||||
|
sourcePath: string;
|
||||||
|
destinationPath: string;
|
||||||
|
targetBucket?: Bucket;
|
||||||
|
overwrite?: boolean;
|
||||||
|
}): Promise<void> {
|
||||||
|
try {
|
||||||
|
const destinationBucket = optionsArg.targetBucket || this;
|
||||||
|
const exists = await destinationBucket.fastExists({
|
||||||
|
path: optionsArg.destinationPath,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (exists && !optionsArg.overwrite) {
|
||||||
|
console.error(
|
||||||
|
`Object already exists at destination path '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
} else if (exists && optionsArg.overwrite) {
|
||||||
|
console.log(
|
||||||
|
`Overwriting existing object at destination path '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
console.log(
|
||||||
|
`Moving object to path '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.fastCopy(optionsArg);
|
||||||
|
await this.fastRemove({ path: optionsArg.sourcePath });
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`Object '${optionsArg.sourcePath}' has been successfully moved to '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
`Error moving object from '${optionsArg.sourcePath}' to '${optionsArg.destinationPath}':`,
|
||||||
|
error
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* removeObject
|
||||||
|
*/
|
||||||
|
public async fastRemove(optionsArg: { path: string }) {
|
||||||
|
const command = new plugins.s3.DeleteObjectCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Key: optionsArg.path,
|
||||||
|
});
|
||||||
|
await this.smartbucketRef.s3Client.send(command);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* check whether file exists
|
||||||
|
* @param optionsArg
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
public async fastExists(optionsArg: { path: string }): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const command = new plugins.s3.HeadObjectCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Key: optionsArg.path,
|
||||||
|
});
|
||||||
|
await this.smartbucketRef.s3Client.send(command);
|
||||||
|
console.log(`Object '${optionsArg.path}' exists in bucket '${this.name}'.`);
|
||||||
|
return true;
|
||||||
|
} catch (error: any) {
|
||||||
|
if (error?.name === 'NotFound') {
|
||||||
|
console.log(`Object '${optionsArg.path}' does not exist in bucket '${this.name}'.`);
|
||||||
|
return false;
|
||||||
|
} else {
|
||||||
|
console.error('Error checking object existence:', error);
|
||||||
|
throw error; // Rethrow if it's not a NotFound error to handle unexpected issues
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* deletes this bucket
|
||||||
|
*/
|
||||||
|
public async delete() {
|
||||||
|
await this.smartbucketRef.s3Client.send(
|
||||||
|
new plugins.s3.DeleteBucketCommand({ Bucket: this.name })
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async fastStat(pathDescriptor: interfaces.IPathDecriptor) {
|
||||||
|
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||||
|
const command = new plugins.s3.HeadObjectCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Key: checkPath,
|
||||||
|
});
|
||||||
|
return this.smartbucketRef.s3Client.send(command);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async isDirectory(pathDescriptor: interfaces.IPathDecriptor): Promise<boolean> {
|
||||||
|
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||||
|
const command = new plugins.s3.ListObjectsV2Command({
|
||||||
|
Bucket: this.name,
|
||||||
|
Prefix: checkPath,
|
||||||
|
Delimiter: '/',
|
||||||
|
});
|
||||||
|
const { CommonPrefixes } = await this.smartbucketRef.s3Client.send(command);
|
||||||
|
return !!CommonPrefixes && CommonPrefixes.length > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async isFile(pathDescriptor: interfaces.IPathDecriptor): Promise<boolean> {
|
||||||
|
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||||
|
const command = new plugins.s3.ListObjectsV2Command({
|
||||||
|
Bucket: this.name,
|
||||||
|
Prefix: checkPath,
|
||||||
|
Delimiter: '/',
|
||||||
|
});
|
||||||
|
const { Contents } = await this.smartbucketRef.s3Client.send(command);
|
||||||
|
return !!Contents && Contents.length > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getMagicBytes(optionsArg: { path: string; length: number }): Promise<Buffer> {
|
||||||
|
try {
|
||||||
|
const command = new plugins.s3.GetObjectCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Key: optionsArg.path,
|
||||||
|
Range: `bytes=0-${optionsArg.length - 1}`,
|
||||||
|
});
|
||||||
|
const response = await this.smartbucketRef.s3Client.send(command);
|
||||||
|
const chunks = [];
|
||||||
|
const stream = response.Body as any; // SdkStreamMixin includes readable stream
|
||||||
|
|
||||||
|
for await (const chunk of stream) {
|
||||||
|
chunks.push(chunk);
|
||||||
|
}
|
||||||
|
return Buffer.concat(chunks);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
`Error retrieving magic bytes from object at path '${optionsArg.path}' in bucket '${this.name}':`,
|
||||||
|
error
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async cleanAllContents(): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Define the command type explicitly
|
||||||
|
const listCommandInput: plugins.s3.ListObjectsV2CommandInput = {
|
||||||
|
Bucket: this.name,
|
||||||
|
};
|
||||||
|
|
||||||
|
let isTruncated = true;
|
||||||
|
let continuationToken: string | undefined = undefined;
|
||||||
|
|
||||||
|
while (isTruncated) {
|
||||||
|
// Add the continuation token to the input if present
|
||||||
|
const listCommand = new plugins.s3.ListObjectsV2Command({
|
||||||
|
...listCommandInput,
|
||||||
|
ContinuationToken: continuationToken,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Explicitly type the response
|
||||||
|
const response: plugins.s3.ListObjectsV2Output =
|
||||||
|
await this.smartbucketRef.s3Client.send(listCommand);
|
||||||
|
|
||||||
|
console.log(`Cleaning contents of bucket '${this.name}': Now deleting ${response.Contents?.length} items...`);
|
||||||
|
|
||||||
|
if (response.Contents && response.Contents.length > 0) {
|
||||||
|
// Delete objects in batches, mapping each item to { Key: string }
|
||||||
|
const deleteCommand = new plugins.s3.DeleteObjectsCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Delete: {
|
||||||
|
Objects: response.Contents.map((item) => ({ Key: item.Key! })),
|
||||||
|
Quiet: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.smartbucketRef.s3Client.send(deleteCommand);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update continuation token and truncation status
|
||||||
|
isTruncated = response.IsTruncated || false;
|
||||||
|
continuationToken = response.NextContinuationToken;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`All contents in bucket '${this.name}' have been deleted.`);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error cleaning contents of bucket '${this.name}':`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
395
ts/classes.directory.ts
Normal file
395
ts/classes.directory.ts
Normal file
@@ -0,0 +1,395 @@
|
|||||||
|
// classes.directory.ts
|
||||||
|
|
||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import { Bucket } from './classes.bucket.js';
|
||||||
|
import { File } from './classes.file.js';
|
||||||
|
import * as helpers from './helpers.js';
|
||||||
|
|
||||||
|
export class Directory {
|
||||||
|
public bucketRef: Bucket;
|
||||||
|
public parentDirectoryRef: Directory;
|
||||||
|
public name: string;
|
||||||
|
|
||||||
|
public tree!: string[];
|
||||||
|
public files!: string[];
|
||||||
|
public folders!: string[];
|
||||||
|
|
||||||
|
constructor(bucketRefArg: Bucket, parentDirectory: Directory, name: string) {
|
||||||
|
this.bucketRef = bucketRefArg;
|
||||||
|
this.parentDirectoryRef = parentDirectory;
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* returns an array of parent directories
|
||||||
|
*/
|
||||||
|
public getParentDirectories(): Directory[] {
|
||||||
|
let parentDirectories: Directory[] = [];
|
||||||
|
if (this.parentDirectoryRef) {
|
||||||
|
parentDirectories.push(this.parentDirectoryRef);
|
||||||
|
parentDirectories = parentDirectories.concat(this.parentDirectoryRef.getParentDirectories());
|
||||||
|
}
|
||||||
|
return parentDirectories;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* returns the directory level
|
||||||
|
*/
|
||||||
|
public getDirectoryLevel(): number {
|
||||||
|
return this.getParentDirectories().length;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* updates the base path
|
||||||
|
*/
|
||||||
|
public getBasePath(): string {
|
||||||
|
const parentDirectories = this.getParentDirectories();
|
||||||
|
let basePath = '';
|
||||||
|
for (const parentDir of parentDirectories) {
|
||||||
|
if (!parentDir.name && !basePath) {
|
||||||
|
basePath = this.name + '/';
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (parentDir.name && !basePath) {
|
||||||
|
basePath = parentDir.name + '/' + this.name + '/';
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (parentDir.name && basePath) {
|
||||||
|
basePath = parentDir.name + '/' + basePath;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return basePath;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gets a file by name
|
||||||
|
*/
|
||||||
|
public async getFile(optionsArg: {
|
||||||
|
path: string;
|
||||||
|
createWithContents?: string | Buffer;
|
||||||
|
getFromTrash?: boolean;
|
||||||
|
}): Promise<File | null> {
|
||||||
|
const pathDescriptor = {
|
||||||
|
directory: this,
|
||||||
|
path: optionsArg.path,
|
||||||
|
};
|
||||||
|
const exists = await this.bucketRef.fastExists({
|
||||||
|
path: await helpers.reducePathDescriptorToPath(pathDescriptor),
|
||||||
|
});
|
||||||
|
if (!exists && optionsArg.getFromTrash) {
|
||||||
|
const trash = await this.bucketRef.getTrash();
|
||||||
|
const trashedFile = await trash.getTrashedFileByOriginalName(pathDescriptor);
|
||||||
|
return trashedFile;
|
||||||
|
}
|
||||||
|
if (!exists && !optionsArg.createWithContents) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (!exists && optionsArg.createWithContents) {
|
||||||
|
await File.create({
|
||||||
|
directory: this,
|
||||||
|
name: optionsArg.path,
|
||||||
|
contents: optionsArg.createWithContents,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return new File({
|
||||||
|
directoryRefArg: this,
|
||||||
|
fileName: optionsArg.path,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gets a file strictly
|
||||||
|
* @param args
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
public async getFileStrict(...args: Parameters<Directory['getFile']>) {
|
||||||
|
const file = await this.getFile(...args);
|
||||||
|
if (!file) {
|
||||||
|
throw new Error(`File not found at path '${args[0].path}'`);
|
||||||
|
}
|
||||||
|
return file;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* lists all files
|
||||||
|
*/
|
||||||
|
public async listFiles(): Promise<File[]> {
|
||||||
|
const command = new plugins.s3.ListObjectsV2Command({
|
||||||
|
Bucket: this.bucketRef.name,
|
||||||
|
Prefix: this.getBasePath(),
|
||||||
|
Delimiter: '/',
|
||||||
|
});
|
||||||
|
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
|
||||||
|
const fileArray: File[] = [];
|
||||||
|
|
||||||
|
response.Contents?.forEach((item) => {
|
||||||
|
if (item.Key && !item.Key.endsWith('/')) {
|
||||||
|
const subtractedPath = item.Key.replace(this.getBasePath(), '');
|
||||||
|
if (!subtractedPath.includes('/')) {
|
||||||
|
fileArray.push(
|
||||||
|
new File({
|
||||||
|
directoryRefArg: this,
|
||||||
|
fileName: subtractedPath,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return fileArray;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* lists all folders
|
||||||
|
*/
|
||||||
|
public async listDirectories(): Promise<Directory[]> {
|
||||||
|
try {
|
||||||
|
const command = new plugins.s3.ListObjectsV2Command({
|
||||||
|
Bucket: this.bucketRef.name,
|
||||||
|
Prefix: this.getBasePath(),
|
||||||
|
Delimiter: '/',
|
||||||
|
});
|
||||||
|
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
|
||||||
|
const directoryArray: Directory[] = [];
|
||||||
|
|
||||||
|
if (response.CommonPrefixes) {
|
||||||
|
response.CommonPrefixes.forEach((item) => {
|
||||||
|
if (item.Prefix) {
|
||||||
|
const subtractedPath = item.Prefix.replace(this.getBasePath(), '');
|
||||||
|
if (subtractedPath.endsWith('/')) {
|
||||||
|
const dirName = subtractedPath.slice(0, -1);
|
||||||
|
// Ensure the directory name is not empty (which would indicate the base directory itself)
|
||||||
|
if (dirName) {
|
||||||
|
directoryArray.push(new Directory(this.bucketRef, this, dirName));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return directoryArray;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error listing directories:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gets an array that has all objects with a certain prefix
|
||||||
|
*/
|
||||||
|
public async getTreeArray() {
|
||||||
|
const command = new plugins.s3.ListObjectsV2Command({
|
||||||
|
Bucket: this.bucketRef.name,
|
||||||
|
Prefix: this.getBasePath(),
|
||||||
|
Delimiter: '/',
|
||||||
|
});
|
||||||
|
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
|
||||||
|
return response.Contents;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gets a sub directory by name
|
||||||
|
*/
|
||||||
|
public async getSubDirectoryByName(dirNameArg: string, optionsArg: {
|
||||||
|
/**
|
||||||
|
* in s3 a directory does not exist if it is empty
|
||||||
|
* this option returns a directory even if it is empty
|
||||||
|
*/
|
||||||
|
getEmptyDirectory?: boolean;
|
||||||
|
/**
|
||||||
|
* in s3 a directory does not exist if it is empty
|
||||||
|
* this option creates a directory even if it is empty using a initializer file
|
||||||
|
*/
|
||||||
|
createWithInitializerFile?: boolean;
|
||||||
|
/**
|
||||||
|
* if the path is a file path, it will be treated as a file and the parent directory will be returned
|
||||||
|
*/
|
||||||
|
couldBeFilePath?: boolean;
|
||||||
|
} = {}): Promise<Directory | null> {
|
||||||
|
|
||||||
|
const dirNameArray = dirNameArg.split('/').filter(str => str.trim() !== "");
|
||||||
|
|
||||||
|
optionsArg = {
|
||||||
|
getEmptyDirectory: false,
|
||||||
|
createWithInitializerFile: false,
|
||||||
|
...optionsArg,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
const getDirectory = async (directoryArg: Directory, dirNameToSearch: string, isFinalDirectory: boolean) => {
|
||||||
|
const directories = await directoryArg.listDirectories();
|
||||||
|
let returnDirectory = directories.find((directory) => {
|
||||||
|
return directory.name === dirNameToSearch;
|
||||||
|
});
|
||||||
|
if (returnDirectory) {
|
||||||
|
return returnDirectory;
|
||||||
|
}
|
||||||
|
if (optionsArg.getEmptyDirectory || optionsArg.createWithInitializerFile) {
|
||||||
|
returnDirectory = new Directory(this.bucketRef, this, dirNameToSearch);
|
||||||
|
}
|
||||||
|
if (isFinalDirectory && optionsArg.createWithInitializerFile) {
|
||||||
|
returnDirectory?.createEmptyFile('00init.txt');
|
||||||
|
}
|
||||||
|
return returnDirectory || null;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (optionsArg.couldBeFilePath) {
|
||||||
|
const baseDirectory = await this.bucketRef.getBaseDirectory();
|
||||||
|
const existingFile = await baseDirectory.getFile({
|
||||||
|
path: dirNameArg,
|
||||||
|
});
|
||||||
|
if (existingFile) {
|
||||||
|
const adjustedPath = dirNameArg.substring(0, dirNameArg.lastIndexOf('/'));
|
||||||
|
return this.getSubDirectoryByName(adjustedPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let wantedDirectory: Directory | null = null;
|
||||||
|
let counter = 0;
|
||||||
|
for (const dirNameToSearch of dirNameArray) {
|
||||||
|
counter++;
|
||||||
|
const directoryToSearchIn = wantedDirectory ? wantedDirectory : this;
|
||||||
|
wantedDirectory = await getDirectory(directoryToSearchIn, dirNameToSearch, counter === dirNameArray.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
return wantedDirectory || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getSubDirectoryByNameStrict(...args: Parameters<Directory['getSubDirectoryByName']>) {
|
||||||
|
const directory = await this.getSubDirectoryByName(...args);
|
||||||
|
if (!directory) {
|
||||||
|
throw new Error(`Directory not found at path '${args[0]}'`);
|
||||||
|
}
|
||||||
|
return directory;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* moves the directory
|
||||||
|
*/
|
||||||
|
public async move() {
|
||||||
|
// TODO
|
||||||
|
throw new Error('Moving a directory is not yet implemented');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* creates an empty file within this directory
|
||||||
|
* @param relativePathArg
|
||||||
|
*/
|
||||||
|
public async createEmptyFile(relativePathArg: string) {
|
||||||
|
const emptyFile = await File.create({
|
||||||
|
directory: this,
|
||||||
|
name: relativePathArg,
|
||||||
|
contents: '',
|
||||||
|
});
|
||||||
|
return emptyFile;
|
||||||
|
}
|
||||||
|
|
||||||
|
// file operations
|
||||||
|
public async fastPut(optionsArg: { path: string; contents: string | Buffer }) {
|
||||||
|
const path = plugins.path.join(this.getBasePath(), optionsArg.path);
|
||||||
|
await this.bucketRef.fastPut({
|
||||||
|
path,
|
||||||
|
contents: optionsArg.contents,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public async fastGet(optionsArg: { path: string }) {
|
||||||
|
const path = plugins.path.join(this.getBasePath(), optionsArg.path);
|
||||||
|
const result = await this.bucketRef.fastGet({
|
||||||
|
path,
|
||||||
|
});
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public fastGetStream(
|
||||||
|
optionsArg: {
|
||||||
|
path: string;
|
||||||
|
},
|
||||||
|
typeArg: 'webstream'
|
||||||
|
): Promise<ReadableStream>;
|
||||||
|
public async fastGetStream(
|
||||||
|
optionsArg: {
|
||||||
|
path: string;
|
||||||
|
},
|
||||||
|
typeArg: 'nodestream'
|
||||||
|
): Promise<plugins.stream.Readable>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* fastGetStream
|
||||||
|
* @param optionsArg
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
public async fastGetStream(
|
||||||
|
optionsArg: { path: string },
|
||||||
|
typeArg: 'webstream' | 'nodestream'
|
||||||
|
): Promise<ReadableStream | plugins.stream.Readable> {
|
||||||
|
const path = plugins.path.join(this.getBasePath(), optionsArg.path);
|
||||||
|
const result = await this.bucketRef.fastGetStream(
|
||||||
|
{
|
||||||
|
path,
|
||||||
|
},
|
||||||
|
typeArg as any
|
||||||
|
);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* fast put stream
|
||||||
|
*/
|
||||||
|
public async fastPutStream(optionsArg: {
|
||||||
|
path: string;
|
||||||
|
stream: plugins.stream.Readable;
|
||||||
|
}): Promise<void> {
|
||||||
|
const path = plugins.path.join(this.getBasePath(), optionsArg.path);
|
||||||
|
await this.bucketRef.fastPutStream({
|
||||||
|
path,
|
||||||
|
readableStream: optionsArg.stream,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* removes a file within the directory
|
||||||
|
* uses file class to make sure effects for metadata etc. are handled correctly
|
||||||
|
* @param optionsArg
|
||||||
|
*/
|
||||||
|
public async fastRemove(optionsArg: {
|
||||||
|
path: string
|
||||||
|
/**
|
||||||
|
* wether the file should be placed into trash. Default is false.
|
||||||
|
*/
|
||||||
|
mode?: 'permanent' | 'trash';
|
||||||
|
}) {
|
||||||
|
const file = await this.getFileStrict({
|
||||||
|
path: optionsArg.path,
|
||||||
|
});
|
||||||
|
await file.delete({
|
||||||
|
mode: optionsArg.mode ? optionsArg.mode : 'permanent',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* deletes the directory with all its contents
|
||||||
|
*/
|
||||||
|
public async delete(optionsArg: {
|
||||||
|
mode?: 'permanent' | 'trash';
|
||||||
|
}) {
|
||||||
|
const deleteDirectory = async (directoryArg: Directory) => {
|
||||||
|
const childDirectories = await directoryArg.listDirectories();
|
||||||
|
if (childDirectories.length === 0) {
|
||||||
|
console.log('Directory empty! Path complete!');
|
||||||
|
} else {
|
||||||
|
for (const childDir of childDirectories) {
|
||||||
|
await deleteDirectory(childDir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const files = await directoryArg.listFiles();
|
||||||
|
for (const file of files) {
|
||||||
|
await file.delete({
|
||||||
|
mode: optionsArg.mode ? optionsArg.mode : 'permanent',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
};
|
||||||
|
await deleteDirectory(this);
|
||||||
|
}
|
||||||
|
}
|
303
ts/classes.file.ts
Normal file
303
ts/classes.file.ts
Normal file
@@ -0,0 +1,303 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import * as helpers from './helpers.js';
|
||||||
|
import * as interfaces from './interfaces.js';
|
||||||
|
import { Directory } from './classes.directory.js';
|
||||||
|
import { MetaData } from './classes.metadata.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* represents a file in a directory
|
||||||
|
*/
|
||||||
|
export class File {
|
||||||
|
// STATIC
|
||||||
|
|
||||||
|
/**
|
||||||
|
* creates a file in draft mode
|
||||||
|
* you need to call .save() to store it in s3
|
||||||
|
* @param optionsArg
|
||||||
|
*/
|
||||||
|
public static async create(optionsArg: {
|
||||||
|
directory: Directory;
|
||||||
|
name: string;
|
||||||
|
contents: Buffer | string | plugins.stream.Readable;
|
||||||
|
/**
|
||||||
|
* if contents are of type string, you can specify the encoding here
|
||||||
|
*/
|
||||||
|
encoding?: 'utf8' | 'binary';
|
||||||
|
}): Promise<File> {
|
||||||
|
const contents =
|
||||||
|
typeof optionsArg.contents === 'string'
|
||||||
|
? Buffer.from(optionsArg.contents, optionsArg.encoding)
|
||||||
|
: optionsArg.contents;
|
||||||
|
const file = new File({
|
||||||
|
directoryRefArg: optionsArg.directory,
|
||||||
|
fileName: optionsArg.name,
|
||||||
|
});
|
||||||
|
if (contents instanceof plugins.stream.Readable) {
|
||||||
|
await optionsArg.directory.fastPutStream({
|
||||||
|
path: optionsArg.name,
|
||||||
|
stream: contents,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await optionsArg.directory.fastPut({
|
||||||
|
path: optionsArg.name,
|
||||||
|
contents: contents,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return file;
|
||||||
|
}
|
||||||
|
|
||||||
|
// INSTANCE
|
||||||
|
public parentDirectoryRef: Directory;
|
||||||
|
public name: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get the full path to the file
|
||||||
|
* @returns the full path to the file
|
||||||
|
*/
|
||||||
|
public getBasePath(): string {
|
||||||
|
return plugins.path.join(this.parentDirectoryRef.getBasePath(), this.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
constructor(optionsArg: { directoryRefArg: Directory; fileName: string }) {
|
||||||
|
this.parentDirectoryRef = optionsArg.directoryRefArg;
|
||||||
|
this.name = optionsArg.fileName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getContentsAsString(): Promise<string> {
|
||||||
|
const fileBuffer = await this.getContents();
|
||||||
|
return fileBuffer.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getContents(): Promise<Buffer> {
|
||||||
|
const resultBuffer = await this.parentDirectoryRef.bucketRef.fastGet({
|
||||||
|
path: this.getBasePath(),
|
||||||
|
});
|
||||||
|
return resultBuffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getReadStream(typeArg: 'webstream'): Promise<ReadableStream>;
|
||||||
|
public async getReadStream(typeArg: 'nodestream'): Promise<plugins.stream.Readable>;
|
||||||
|
public async getReadStream(
|
||||||
|
typeArg: 'nodestream' | 'webstream'
|
||||||
|
): Promise<ReadableStream | plugins.stream.Readable> {
|
||||||
|
const readStream = this.parentDirectoryRef.bucketRef.fastGetStream(
|
||||||
|
{
|
||||||
|
path: this.getBasePath(),
|
||||||
|
},
|
||||||
|
typeArg as any
|
||||||
|
);
|
||||||
|
return readStream;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* deletes this file
|
||||||
|
*/
|
||||||
|
public async delete(optionsArg?: { mode: 'trash' | 'permanent' }) {
|
||||||
|
optionsArg = {
|
||||||
|
...{
|
||||||
|
mode: 'permanent',
|
||||||
|
},
|
||||||
|
...optionsArg,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (optionsArg.mode === 'permanent') {
|
||||||
|
await this.parentDirectoryRef.bucketRef.fastRemove({
|
||||||
|
path: this.getBasePath(),
|
||||||
|
});
|
||||||
|
if (!this.name.endsWith('.metadata')) {
|
||||||
|
if (await this.hasMetaData()) {
|
||||||
|
const metadata = await this.getMetaData();
|
||||||
|
await metadata.metadataFile.delete(optionsArg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (optionsArg.mode === 'trash') {
|
||||||
|
const metadata = await this.getMetaData();
|
||||||
|
await metadata.storeCustomMetaData({
|
||||||
|
key: 'recycle',
|
||||||
|
value: {
|
||||||
|
deletedAt: Date.now(),
|
||||||
|
originalPath: this.getBasePath(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const trash = await this.parentDirectoryRef.bucketRef.getTrash();
|
||||||
|
const trashDir = await trash.getTrashDir();
|
||||||
|
await this.move({
|
||||||
|
directory: trashDir,
|
||||||
|
path: await trash.getTrashKeyByOriginalBasePath(this.getBasePath()),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.parentDirectoryRef.listFiles();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* restores
|
||||||
|
*/
|
||||||
|
public async restore(optionsArg: {
|
||||||
|
useOriginalPath?: boolean;
|
||||||
|
toPath?: string;
|
||||||
|
overwrite?: boolean;
|
||||||
|
} = {}) {
|
||||||
|
optionsArg = {
|
||||||
|
useOriginalPath: (() => {
|
||||||
|
return optionsArg.toPath ? false : true;
|
||||||
|
})(),
|
||||||
|
overwrite: false,
|
||||||
|
...optionsArg,
|
||||||
|
};
|
||||||
|
const metadata = await this.getMetaData();
|
||||||
|
const moveToPath = optionsArg.toPath || (await metadata.getCustomMetaData({
|
||||||
|
key: 'recycle'
|
||||||
|
})).originalPath;
|
||||||
|
await metadata.deleteCustomMetaData({
|
||||||
|
key: 'recycle'
|
||||||
|
})
|
||||||
|
await this.move({
|
||||||
|
path: moveToPath,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* allows locking the file
|
||||||
|
* @param optionsArg
|
||||||
|
*/
|
||||||
|
public async lock(optionsArg?: { timeoutMillis?: number }) {
|
||||||
|
const metadata = await this.getMetaData();
|
||||||
|
await metadata.setLock({
|
||||||
|
lock: 'locked',
|
||||||
|
expires: Date.now() + (optionsArg?.timeoutMillis || 1000),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* actively unlocks a file
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public async unlock(optionsArg?: {
|
||||||
|
/**
|
||||||
|
* unlock the file even if not locked from this instance
|
||||||
|
*/
|
||||||
|
force?: boolean;
|
||||||
|
}) {
|
||||||
|
const metadata = await this.getMetaData();
|
||||||
|
await metadata.removeLock({
|
||||||
|
force: optionsArg?.force || false,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public async updateWithContents(optionsArg: {
|
||||||
|
contents: Buffer | string | plugins.stream.Readable | ReadableStream;
|
||||||
|
encoding?: 'utf8' | 'binary';
|
||||||
|
}) {
|
||||||
|
if (
|
||||||
|
optionsArg.contents instanceof plugins.stream.Readable ||
|
||||||
|
optionsArg.contents instanceof ReadableStream
|
||||||
|
) {
|
||||||
|
await this.parentDirectoryRef.bucketRef.fastPutStream({
|
||||||
|
path: this.getBasePath(),
|
||||||
|
readableStream: optionsArg.contents,
|
||||||
|
overwrite: true,
|
||||||
|
});
|
||||||
|
} else if (Buffer.isBuffer(optionsArg.contents)) {
|
||||||
|
await this.parentDirectoryRef.bucketRef.fastPut({
|
||||||
|
path: this.getBasePath(),
|
||||||
|
contents: optionsArg.contents,
|
||||||
|
overwrite: true,
|
||||||
|
});
|
||||||
|
} else if (typeof optionsArg.contents === 'string') {
|
||||||
|
await this.parentDirectoryRef.bucketRef.fastPut({
|
||||||
|
path: this.getBasePath(),
|
||||||
|
contents: Buffer.from(optionsArg.contents, optionsArg.encoding),
|
||||||
|
overwrite: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* moves the file to another directory
|
||||||
|
*/
|
||||||
|
public async move(pathDescriptorArg: interfaces.IPathDecriptor) {
|
||||||
|
let moveToPath: string = '';
|
||||||
|
const isDirectory = await this.parentDirectoryRef.bucketRef.isDirectory(pathDescriptorArg);
|
||||||
|
if (isDirectory) {
|
||||||
|
moveToPath = await helpers.reducePathDescriptorToPath({
|
||||||
|
...pathDescriptorArg,
|
||||||
|
path: plugins.path.join(pathDescriptorArg.path!, this.name),
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
moveToPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
|
||||||
|
}
|
||||||
|
// lets move the file
|
||||||
|
await this.parentDirectoryRef.bucketRef.fastMove({
|
||||||
|
sourcePath: this.getBasePath(),
|
||||||
|
destinationPath: moveToPath,
|
||||||
|
overwrite: true,
|
||||||
|
});
|
||||||
|
// lets move the metadatafile
|
||||||
|
if (!this.name.endsWith('.metadata')) {
|
||||||
|
const metadata = await this.getMetaData();
|
||||||
|
await this.parentDirectoryRef.bucketRef.fastMove({
|
||||||
|
sourcePath: metadata.metadataFile.getBasePath(),
|
||||||
|
destinationPath: moveToPath + '.metadata',
|
||||||
|
overwrite: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// lets update references of this
|
||||||
|
const baseDirectory = await this.parentDirectoryRef.bucketRef.getBaseDirectory();
|
||||||
|
this.parentDirectoryRef = await baseDirectory.getSubDirectoryByNameStrict(
|
||||||
|
await helpers.reducePathDescriptorToPath(pathDescriptorArg),
|
||||||
|
{
|
||||||
|
couldBeFilePath: true,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
this.name = pathDescriptorArg.path!;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async hasMetaData(): Promise<boolean> {
|
||||||
|
if (!this.name.endsWith('.metadata')) {
|
||||||
|
const hasMetadataBool = MetaData.hasMetaData({
|
||||||
|
file: this,
|
||||||
|
});
|
||||||
|
return hasMetadataBool;
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* allows updating the metadata of a file
|
||||||
|
* @param updatedMetadata
|
||||||
|
*/
|
||||||
|
public async getMetaData() {
|
||||||
|
if (this.name.endsWith('.metadata')) {
|
||||||
|
throw new Error('metadata files cannot have metadata');
|
||||||
|
}
|
||||||
|
const metadata = await MetaData.createForFile({
|
||||||
|
file: this,
|
||||||
|
});
|
||||||
|
return metadata;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gets the contents as json
|
||||||
|
*/
|
||||||
|
public async getJsonData() {
|
||||||
|
const json = await this.getContentsAsString();
|
||||||
|
const parsed = await JSON.parse(json);
|
||||||
|
return parsed;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async writeJsonData(dataArg: any) {
|
||||||
|
await this.updateWithContents({
|
||||||
|
contents: JSON.stringify(dataArg),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getMagicBytes(optionsArg: { length: number }): Promise<Buffer> {
|
||||||
|
return this.parentDirectoryRef.bucketRef.getMagicBytes({
|
||||||
|
path: this.getBasePath(),
|
||||||
|
length: optionsArg.length,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
122
ts/classes.metadata.ts
Normal file
122
ts/classes.metadata.ts
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
|
||||||
|
import { File } from './classes.file.js';
|
||||||
|
|
||||||
|
export class MetaData {
|
||||||
|
public static async hasMetaData(optionsArg: { file: File }) {
|
||||||
|
// lets find the existing metadata file
|
||||||
|
const existingFile = await optionsArg.file.parentDirectoryRef.getFile({
|
||||||
|
path: optionsArg.file.name + '.metadata',
|
||||||
|
});
|
||||||
|
return !!existingFile;
|
||||||
|
}
|
||||||
|
|
||||||
|
// static
|
||||||
|
public static async createForFile(optionsArg: { file: File }) {
|
||||||
|
const metaData = new MetaData();
|
||||||
|
metaData.fileRef = optionsArg.file;
|
||||||
|
|
||||||
|
// lets find the existing metadata file
|
||||||
|
metaData.metadataFile = await metaData.fileRef.parentDirectoryRef.getFileStrict({
|
||||||
|
path: metaData.fileRef.name + '.metadata',
|
||||||
|
createWithContents: '{}',
|
||||||
|
});
|
||||||
|
|
||||||
|
return metaData;
|
||||||
|
}
|
||||||
|
|
||||||
|
// instance
|
||||||
|
/**
|
||||||
|
* the file that contains the metadata
|
||||||
|
*/
|
||||||
|
metadataFile!: File;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* the file that the metadata is for
|
||||||
|
*/
|
||||||
|
fileRef!: File;
|
||||||
|
|
||||||
|
public async getFileType(optionsArg?: {
|
||||||
|
useFileExtension?: boolean;
|
||||||
|
useMagicBytes?: boolean;
|
||||||
|
}): Promise<plugins.smartmime.IFileTypeResult | undefined> {
|
||||||
|
if ((optionsArg && optionsArg.useFileExtension) || !optionsArg) {
|
||||||
|
const fileType = await plugins.smartmime.detectMimeType({
|
||||||
|
path: this.fileRef.name,
|
||||||
|
});
|
||||||
|
|
||||||
|
return fileType;
|
||||||
|
}
|
||||||
|
if (optionsArg && optionsArg.useMagicBytes) {
|
||||||
|
const fileType = await plugins.smartmime.detectMimeType({
|
||||||
|
buffer: await this.fileRef.getMagicBytes({
|
||||||
|
length: 100,
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
return fileType;
|
||||||
|
}
|
||||||
|
throw new Error('optionsArg.useFileExtension and optionsArg.useMagicBytes cannot both be false');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gets the size of the fileRef
|
||||||
|
*/
|
||||||
|
public async getSizeInBytes(): Promise<number> {
|
||||||
|
const stat = await this.fileRef.parentDirectoryRef.bucketRef.fastStat({
|
||||||
|
path: this.fileRef.getBasePath(),
|
||||||
|
});
|
||||||
|
return stat.ContentLength!;
|
||||||
|
}
|
||||||
|
|
||||||
|
private prefixCustomMetaData = 'custom_';
|
||||||
|
|
||||||
|
public async storeCustomMetaData<T = any>(optionsArg: { key: string; value: T }) {
|
||||||
|
const data = await this.metadataFile.getJsonData();
|
||||||
|
data[this.prefixCustomMetaData + optionsArg.key] = optionsArg.value;
|
||||||
|
await this.metadataFile.writeJsonData(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getCustomMetaData<T = any>(optionsArg: { key: string }): Promise<T> {
|
||||||
|
const data = await this.metadataFile.getJsonData();
|
||||||
|
return data[this.prefixCustomMetaData + optionsArg.key];
|
||||||
|
}
|
||||||
|
|
||||||
|
public async deleteCustomMetaData(optionsArg: { key: string }) {
|
||||||
|
const data = await this.metadataFile.getJsonData();
|
||||||
|
delete data[this.prefixCustomMetaData + optionsArg.key];
|
||||||
|
await this.metadataFile.writeJsonData(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* set a lock on the ref file
|
||||||
|
* @param optionsArg
|
||||||
|
*/
|
||||||
|
public async setLock(optionsArg: { lock: string; expires: number }) {
|
||||||
|
const data = await this.metadataFile.getJsonData();
|
||||||
|
data.lock = optionsArg.lock;
|
||||||
|
data.lockExpires = optionsArg.expires;
|
||||||
|
await this.metadataFile.writeJsonData(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* remove the lock on the ref file
|
||||||
|
* @param optionsArg
|
||||||
|
*/
|
||||||
|
public async removeLock(optionsArg: { force: boolean }) {
|
||||||
|
const data = await this.metadataFile.getJsonData();
|
||||||
|
delete data.lock;
|
||||||
|
delete data.lockExpires;
|
||||||
|
await this.metadataFile.writeJsonData(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async checkLocked(): Promise<boolean> {
|
||||||
|
const data = await this.metadataFile.getJsonData();
|
||||||
|
return data.lock && data.lockExpires > Date.now();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getLockInfo(): Promise<{ lock: string; expires: number }> {
|
||||||
|
const data = await this.metadataFile.getJsonData();
|
||||||
|
return { lock: data.lock, expires: data.lockExpires };
|
||||||
|
}
|
||||||
|
}
|
55
ts/classes.smartbucket.ts
Normal file
55
ts/classes.smartbucket.ts
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
// classes.smartbucket.ts
|
||||||
|
|
||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import { Bucket } from './classes.bucket.js';
|
||||||
|
|
||||||
|
export class SmartBucket {
|
||||||
|
public config: plugins.tsclass.storage.IS3Descriptor;
|
||||||
|
|
||||||
|
public s3Client: plugins.s3.S3Client;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* the constructor of SmartBucket
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* the constructor of SmartBucket
|
||||||
|
*/
|
||||||
|
constructor(configArg: plugins.tsclass.storage.IS3Descriptor) {
|
||||||
|
this.config = configArg;
|
||||||
|
|
||||||
|
const protocol = configArg.useSsl === false ? 'http' : 'https';
|
||||||
|
const port = configArg.port ? `:${configArg.port}` : '';
|
||||||
|
const endpoint = `${protocol}://${configArg.endpoint}${port}`;
|
||||||
|
|
||||||
|
this.s3Client = new plugins.s3.S3Client({
|
||||||
|
endpoint,
|
||||||
|
region: configArg.region || 'us-east-1',
|
||||||
|
credentials: {
|
||||||
|
accessKeyId: configArg.accessKey,
|
||||||
|
secretAccessKey: configArg.accessSecret,
|
||||||
|
},
|
||||||
|
forcePathStyle: true, // Necessary for S3-compatible storage like MinIO or Wasabi
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public async createBucket(bucketNameArg: string) {
|
||||||
|
const bucket = await Bucket.createBucketByName(this, bucketNameArg);
|
||||||
|
return bucket;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async removeBucket(bucketName: string) {
|
||||||
|
await Bucket.removeBucketByName(this, bucketName);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getBucketByName(bucketNameArg: string) {
|
||||||
|
return Bucket.getBucketByName(this, bucketNameArg);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getBucketByNameStrict(...args: Parameters<SmartBucket['getBucketByName']>) {
|
||||||
|
const bucket = await this.getBucketByName(...args);
|
||||||
|
if (!bucket) {
|
||||||
|
throw new Error(`Bucket ${args[0]} does not exist.`);
|
||||||
|
}
|
||||||
|
return bucket;
|
||||||
|
}
|
||||||
|
}
|
30
ts/classes.trash.ts
Normal file
30
ts/classes.trash.ts
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import * as interfaces from './interfaces.js';
|
||||||
|
import * as helpers from './helpers.js';
|
||||||
|
import type { Bucket } from './classes.bucket.js';
|
||||||
|
import type { Directory } from './classes.directory.js';
|
||||||
|
import type { File } from './classes.file.js';
|
||||||
|
|
||||||
|
|
||||||
|
export class Trash {
|
||||||
|
public bucketRef: Bucket;
|
||||||
|
|
||||||
|
constructor(bucketRefArg: Bucket) {
|
||||||
|
this.bucketRef = bucketRefArg;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getTrashDir() {
|
||||||
|
return this.bucketRef.getDirectoryFromPath({ path: '.trash' });
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getTrashedFileByOriginalName(pathDescriptor: interfaces.IPathDecriptor): Promise<File> {
|
||||||
|
const trashDir = await this.getTrashDir();
|
||||||
|
const originalPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||||
|
const trashKey = await this.getTrashKeyByOriginalBasePath(originalPath);
|
||||||
|
return trashDir.getFileStrict({ path: trashKey });
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getTrashKeyByOriginalBasePath (originalPath: string): Promise<string> {
|
||||||
|
return plugins.smartstring.base64.encode(originalPath);
|
||||||
|
}
|
||||||
|
}
|
22
ts/helpers.ts
Normal file
22
ts/helpers.ts
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import * as interfaces from './interfaces.js';
|
||||||
|
|
||||||
|
export const reducePathDescriptorToPath = async (pathDescriptorArg: interfaces.IPathDecriptor): Promise<string> => {
|
||||||
|
let returnPath = ``
|
||||||
|
if (pathDescriptorArg.directory) {
|
||||||
|
if (pathDescriptorArg.path && plugins.path.isAbsolute(pathDescriptorArg.path)) {
|
||||||
|
console.warn('Directory is being ignored when path is absolute.');
|
||||||
|
returnPath = pathDescriptorArg.path;
|
||||||
|
} else if (pathDescriptorArg.path) {
|
||||||
|
returnPath = plugins.path.join(pathDescriptorArg.directory.getBasePath(), pathDescriptorArg.path);
|
||||||
|
}
|
||||||
|
} else if (pathDescriptorArg.path) {
|
||||||
|
returnPath = pathDescriptorArg.path;
|
||||||
|
} else {
|
||||||
|
throw new Error('You must specify either a path or a directory.');
|
||||||
|
}
|
||||||
|
if (returnPath.startsWith('/')) {
|
||||||
|
returnPath = returnPath.substring(1);
|
||||||
|
}
|
||||||
|
return returnPath;
|
||||||
|
}
|
@@ -1,4 +1,4 @@
|
|||||||
export * from './smartbucket.classes.smartbucket.js';
|
export * from './classes.smartbucket.js';
|
||||||
export * from './smartbucket.classes.bucket.js';
|
export * from './classes.bucket.js';
|
||||||
export * from './smartbucket.classes.directory.js';
|
export * from './classes.directory.js';
|
||||||
export * from './smartbucket.classes.file.js';
|
export * from './classes.file.js';
|
||||||
|
6
ts/interfaces.ts
Normal file
6
ts/interfaces.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
import type { Directory } from "./classes.directory.js";
|
||||||
|
|
||||||
|
export interface IPathDecriptor {
|
||||||
|
path?: string;
|
||||||
|
directory?: Directory;
|
||||||
|
}
|
32
ts/plugins.ts
Normal file
32
ts/plugins.ts
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
// plugins.ts
|
||||||
|
|
||||||
|
// node native
|
||||||
|
import * as path from 'path';
|
||||||
|
import * as stream from 'stream';
|
||||||
|
|
||||||
|
export { path, stream };
|
||||||
|
|
||||||
|
// @push.rocks scope
|
||||||
|
import * as smartmime from '@push.rocks/smartmime';
|
||||||
|
import * as smartpath from '@push.rocks/smartpath';
|
||||||
|
import * as smartpromise from '@push.rocks/smartpromise';
|
||||||
|
import * as smartrx from '@push.rocks/smartrx';
|
||||||
|
import * as smartstream from '@push.rocks/smartstream';
|
||||||
|
import * as smartstring from '@push.rocks/smartstring';
|
||||||
|
import * as smartunique from '@push.rocks/smartunique';
|
||||||
|
|
||||||
|
export { smartmime, smartpath, smartpromise, smartrx, smartstream, smartstring, smartunique };
|
||||||
|
|
||||||
|
// @tsclass
|
||||||
|
import * as tsclass from '@tsclass/tsclass';
|
||||||
|
|
||||||
|
export {
|
||||||
|
tsclass,
|
||||||
|
}
|
||||||
|
|
||||||
|
// third party scope
|
||||||
|
import * as s3 from '@aws-sdk/client-s3';
|
||||||
|
|
||||||
|
export {
|
||||||
|
s3,
|
||||||
|
}
|
@@ -1,124 +0,0 @@
|
|||||||
import * as plugins from './smartbucket.plugins.js';
|
|
||||||
import { SmartBucket } from './smartbucket.classes.smartbucket.js';
|
|
||||||
import { Directory } from './smartbucket.classes.directory.js';
|
|
||||||
|
|
||||||
export class Bucket {
|
|
||||||
public static async getBucketByName(smartbucketRef: SmartBucket, bucketNameArg: string) {
|
|
||||||
const buckets = await smartbucketRef.minioClient.listBuckets();
|
|
||||||
const foundBucket = buckets.find((bucket) => {
|
|
||||||
return bucket.name === bucketNameArg;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (foundBucket) {
|
|
||||||
console.log(`bucket with name ${bucketNameArg} exists.`);
|
|
||||||
console.log(`Taking this as base for new Bucket instance`);
|
|
||||||
return new this(smartbucketRef, bucketNameArg);
|
|
||||||
} else {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static async createBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
|
|
||||||
await smartbucketRef.minioClient.makeBucket(bucketName, 'ams3').catch((e) => console.log(e));
|
|
||||||
return new Bucket(smartbucketRef, bucketName);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static async removeBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
|
|
||||||
await smartbucketRef.minioClient.removeBucket(bucketName).catch((e) => console.log(e));
|
|
||||||
}
|
|
||||||
|
|
||||||
public smartbucketRef: SmartBucket;
|
|
||||||
public name: string;
|
|
||||||
|
|
||||||
constructor(smartbucketRef: SmartBucket, bucketName: string) {
|
|
||||||
this.smartbucketRef = smartbucketRef;
|
|
||||||
this.name = bucketName;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* gets the base directory of the bucket
|
|
||||||
*/
|
|
||||||
public async getBaseDirectory() {
|
|
||||||
return new Directory(this, null, '');
|
|
||||||
}
|
|
||||||
|
|
||||||
// ===============
|
|
||||||
// Fast Operations
|
|
||||||
// ===============
|
|
||||||
|
|
||||||
/**
|
|
||||||
* store file
|
|
||||||
*/
|
|
||||||
public async fastStore(pathArg: string, fileContent: string | Buffer): Promise<void> {
|
|
||||||
const streamIntake = new plugins.smartstream.StreamIntake();
|
|
||||||
const putPromise = this.smartbucketRef.minioClient
|
|
||||||
.putObject(this.name, pathArg, streamIntake.getReadable())
|
|
||||||
.catch((e) => console.log(e));
|
|
||||||
streamIntake.pushData(fileContent);
|
|
||||||
streamIntake.signalEnd();
|
|
||||||
await putPromise;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* get file
|
|
||||||
*/
|
|
||||||
public async fastGet(pathArg: string): Promise<Buffer> {
|
|
||||||
const done = plugins.smartpromise.defer();
|
|
||||||
let completeFile: Buffer;
|
|
||||||
const replaySubject = await this.fastGetStream(pathArg);
|
|
||||||
const subscription = replaySubject.subscribe(
|
|
||||||
(chunk) => {
|
|
||||||
if (completeFile) {
|
|
||||||
completeFile = Buffer.concat([completeFile, chunk]);
|
|
||||||
} else {
|
|
||||||
completeFile = chunk;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
(err) => {
|
|
||||||
console.log(err);
|
|
||||||
},
|
|
||||||
() => {
|
|
||||||
done.resolve();
|
|
||||||
subscription.unsubscribe();
|
|
||||||
}
|
|
||||||
);
|
|
||||||
await done.promise;
|
|
||||||
return completeFile;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async fastGetStream(pathArg: string): Promise<plugins.smartrx.rxjs.ReplaySubject<Buffer>> {
|
|
||||||
const fileStream = await this.smartbucketRef.minioClient
|
|
||||||
.getObject(this.name, pathArg)
|
|
||||||
.catch((e) => console.log(e));
|
|
||||||
const replaySubject = new plugins.smartrx.rxjs.ReplaySubject<Buffer>();
|
|
||||||
const duplexStream = plugins.smartstream.createDuplexStream<Buffer, Buffer>(
|
|
||||||
async (chunk) => {
|
|
||||||
replaySubject.next(chunk);
|
|
||||||
return chunk;
|
|
||||||
},
|
|
||||||
async (cb) => {
|
|
||||||
replaySubject.complete();
|
|
||||||
return Buffer.from('');
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!fileStream) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const smartstream = new plugins.smartstream.StreamWrapper([
|
|
||||||
fileStream,
|
|
||||||
duplexStream,
|
|
||||||
plugins.smartstream.cleanPipe(),
|
|
||||||
]);
|
|
||||||
smartstream.run();
|
|
||||||
return replaySubject;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* removeObject
|
|
||||||
*/
|
|
||||||
public async fastRemove(pathArg: string) {
|
|
||||||
await this.smartbucketRef.minioClient.removeObject(this.name, pathArg);
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,226 +0,0 @@
|
|||||||
import * as plugins from './smartbucket.plugins.js';
|
|
||||||
import { Bucket } from './smartbucket.classes.bucket.js';
|
|
||||||
import { File } from './smartbucket.classes.file.js';
|
|
||||||
|
|
||||||
export class Directory {
|
|
||||||
public bucketRef: Bucket;
|
|
||||||
public parentDirectoryRef: Directory;
|
|
||||||
public name: string;
|
|
||||||
|
|
||||||
public tree: string[];
|
|
||||||
public files: string[];
|
|
||||||
public folders: string[];
|
|
||||||
|
|
||||||
constructor(bucketRefArg: Bucket, parentDiretory: Directory, name: string) {
|
|
||||||
this.bucketRef = bucketRefArg;
|
|
||||||
this.parentDirectoryRef = parentDiretory;
|
|
||||||
this.name = name;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* returns an array of parent directories
|
|
||||||
*/
|
|
||||||
public getParentDirectories(): Directory[] {
|
|
||||||
let parentDirectories: Directory[] = [];
|
|
||||||
if (this.parentDirectoryRef) {
|
|
||||||
parentDirectories.push(this.parentDirectoryRef);
|
|
||||||
parentDirectories = parentDirectories.concat(this.parentDirectoryRef.getParentDirectories());
|
|
||||||
}
|
|
||||||
return parentDirectories;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* returns the directory level
|
|
||||||
*/
|
|
||||||
public getDirectoryLevel(): number {
|
|
||||||
return this.getParentDirectories().length;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* updates the base path
|
|
||||||
*/
|
|
||||||
public getBasePath(): string {
|
|
||||||
const parentDirectories = this.getParentDirectories();
|
|
||||||
let basePath = '';
|
|
||||||
for (const parentDir of parentDirectories) {
|
|
||||||
if (!parentDir.name && !basePath) {
|
|
||||||
basePath = this.name + '/';
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (parentDir.name && !basePath) {
|
|
||||||
basePath = parentDir.name + '/' + this.name + '/';
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (parentDir.name && basePath) {
|
|
||||||
basePath = parentDir.name + '/' + basePath;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return basePath;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* lists all files
|
|
||||||
*/
|
|
||||||
public async listFiles(): Promise<File[]> {
|
|
||||||
const done = plugins.smartpromise.defer();
|
|
||||||
const fileNameStream = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2(
|
|
||||||
this.bucketRef.name,
|
|
||||||
this.getBasePath(),
|
|
||||||
false
|
|
||||||
);
|
|
||||||
const fileArray: File[] = [];
|
|
||||||
const duplexStream = plugins.smartstream.createDuplexStream<plugins.minio.BucketItem, void>(
|
|
||||||
async (bucketItem) => {
|
|
||||||
if (bucketItem.prefix) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (!bucketItem.name) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let subtractedPath = bucketItem.name.replace(this.getBasePath(), '');
|
|
||||||
if (subtractedPath.startsWith('/')) {
|
|
||||||
subtractedPath = subtractedPath.substr(1);
|
|
||||||
}
|
|
||||||
if (!subtractedPath.includes('/')) {
|
|
||||||
fileArray.push(new File(this, subtractedPath));
|
|
||||||
}
|
|
||||||
},
|
|
||||||
async (tools) => {
|
|
||||||
done.resolve();
|
|
||||||
}
|
|
||||||
);
|
|
||||||
fileNameStream.pipe(duplexStream);
|
|
||||||
await done.promise;
|
|
||||||
return fileArray;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* lists all folders
|
|
||||||
*/
|
|
||||||
public async listDirectories(): Promise<Directory[]> {
|
|
||||||
const done = plugins.smartpromise.defer();
|
|
||||||
const basePath = this.getBasePath();
|
|
||||||
const completeDirStream = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2(
|
|
||||||
this.bucketRef.name,
|
|
||||||
this.getBasePath(),
|
|
||||||
false
|
|
||||||
);
|
|
||||||
const directoryArray: Directory[] = [];
|
|
||||||
const duplexStream = plugins.smartstream.createDuplexStream<plugins.minio.BucketItem, void>(
|
|
||||||
async (bucketItem) => {
|
|
||||||
if (bucketItem.name) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let subtractedPath = bucketItem.prefix.replace(this.getBasePath(), '');
|
|
||||||
if (subtractedPath.startsWith('/')) {
|
|
||||||
subtractedPath = subtractedPath.substr(1);
|
|
||||||
}
|
|
||||||
if (subtractedPath.includes('/')) {
|
|
||||||
const dirName = subtractedPath.split('/')[0];
|
|
||||||
if (directoryArray.find((directory) => directory.name === dirName)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
directoryArray.push(new Directory(this.bucketRef, this, dirName));
|
|
||||||
}
|
|
||||||
},
|
|
||||||
async (tools) => {
|
|
||||||
done.resolve();
|
|
||||||
}
|
|
||||||
);
|
|
||||||
completeDirStream.pipe(duplexStream);
|
|
||||||
await done.promise;
|
|
||||||
return directoryArray;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* gets an array that has all objects with a certain prefix;
|
|
||||||
*/
|
|
||||||
public async getTreeArray() {
|
|
||||||
const treeArray = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2(
|
|
||||||
this.bucketRef.name,
|
|
||||||
this.getBasePath(),
|
|
||||||
true
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* gets a sub directory
|
|
||||||
*/
|
|
||||||
public async getSubDirectoryByName(dirNameArg: string): Promise<Directory> {
|
|
||||||
const dirNameArray = dirNameArg.split('/');
|
|
||||||
|
|
||||||
const getDirectory = async (directoryArg: Directory, dirNameToSearch: string) => {
|
|
||||||
const directories = await directoryArg.listDirectories();
|
|
||||||
return directories.find((directory) => {
|
|
||||||
return directory.name === dirNameToSearch;
|
|
||||||
});
|
|
||||||
};
|
|
||||||
let wantedDirectory: Directory;
|
|
||||||
for (const dirNameToSearch of dirNameArray) {
|
|
||||||
const directoryToSearchIn = wantedDirectory ? wantedDirectory : this;
|
|
||||||
wantedDirectory = await getDirectory(directoryToSearchIn, dirNameToSearch);
|
|
||||||
}
|
|
||||||
return wantedDirectory;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* moves the directory
|
|
||||||
*/
|
|
||||||
public async move() {
|
|
||||||
// TODO
|
|
||||||
throw new Error('moving a directory is not yet implemented');
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* creates a file within this directory
|
|
||||||
* @param relativePathArg
|
|
||||||
*/
|
|
||||||
public async createEmptyFile(relativePathArg: string) {
|
|
||||||
const emtpyFile = await File.createFileFromString(this, relativePathArg, '');
|
|
||||||
}
|
|
||||||
|
|
||||||
// file operations
|
|
||||||
public async fastStore(pathArg: string, contentArg: string | Buffer) {
|
|
||||||
const path = plugins.path.join(this.getBasePath(), pathArg);
|
|
||||||
await this.bucketRef.fastStore(path, contentArg);
|
|
||||||
}
|
|
||||||
|
|
||||||
public async fastGet(pathArg: string) {
|
|
||||||
const path = plugins.path.join(this.getBasePath(), pathArg);
|
|
||||||
const result = await this.bucketRef.fastGet(path);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async fastGetStream(pathArg: string): Promise<plugins.smartrx.rxjs.ReplaySubject<Buffer>> {
|
|
||||||
const path = plugins.path.join(this.getBasePath(), pathArg);
|
|
||||||
const result = await this.bucketRef.fastGetStream(path);
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async fastRemove(pathArg: string) {
|
|
||||||
const path = plugins.path.join(this.getBasePath(), pathArg);
|
|
||||||
await this.bucketRef.fastRemove(path);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* deletes the directory with all its contents
|
|
||||||
*/
|
|
||||||
public async deleteWithAllContents() {
|
|
||||||
const deleteDirectory = async (directoryArg: Directory) => {
|
|
||||||
const childDirectories = await directoryArg.listDirectories();
|
|
||||||
if (childDirectories.length === 0) {
|
|
||||||
console.log('directory empty! Path complete!');
|
|
||||||
} else {
|
|
||||||
for (const childDir of childDirectories) {
|
|
||||||
await deleteDirectory(childDir);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const files = await directoryArg.listFiles();
|
|
||||||
for (const file of files) {
|
|
||||||
await directoryArg.fastRemove(file.name);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
await deleteDirectory(this);
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,93 +0,0 @@
|
|||||||
import * as plugins from './smartbucket.plugins.js';
|
|
||||||
import { Directory } from './smartbucket.classes.directory.js';
|
|
||||||
|
|
||||||
export interface IFileMetaData {
|
|
||||||
name: string;
|
|
||||||
fileType: string;
|
|
||||||
size: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
export class File {
|
|
||||||
// STATIC
|
|
||||||
public static async createFileFromString(
|
|
||||||
dirArg: Directory,
|
|
||||||
fileName: string,
|
|
||||||
fileContent: string
|
|
||||||
) {
|
|
||||||
await this.createFileFromBuffer(dirArg, fileName, Buffer.from(fileContent));
|
|
||||||
}
|
|
||||||
|
|
||||||
public static async createFileFromBuffer(
|
|
||||||
directoryRef: Directory,
|
|
||||||
fileName: string,
|
|
||||||
fileContent: Buffer
|
|
||||||
) {
|
|
||||||
const filePath = plugins.path.join(directoryRef.getBasePath(), fileName);
|
|
||||||
const streamIntake = new plugins.smartstream.StreamIntake();
|
|
||||||
const putPromise = directoryRef.bucketRef.smartbucketRef.minioClient
|
|
||||||
.putObject(this.name, filePath, streamIntake.getReadable())
|
|
||||||
.catch((e) => console.log(e));
|
|
||||||
streamIntake.pushData(fileContent);
|
|
||||||
streamIntake.signalEnd();
|
|
||||||
await putPromise;
|
|
||||||
}
|
|
||||||
|
|
||||||
// INSTANCE
|
|
||||||
public parentDirectoryRef: Directory;
|
|
||||||
public name: string;
|
|
||||||
|
|
||||||
public path: string;
|
|
||||||
public metaData: IFileMetaData;
|
|
||||||
|
|
||||||
constructor(directoryRefArg: Directory, fileName: string) {
|
|
||||||
this.parentDirectoryRef = directoryRefArg;
|
|
||||||
this.name = fileName;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async getContentAsString() {
|
|
||||||
const fileBuffer = await this.getContentAsBuffer();
|
|
||||||
return fileBuffer.toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
public async getContentAsBuffer() {
|
|
||||||
const done = plugins.smartpromise.defer();
|
|
||||||
const fileStream = await this.parentDirectoryRef.bucketRef.smartbucketRef.minioClient
|
|
||||||
.getObject(this.parentDirectoryRef.bucketRef.name, this.path)
|
|
||||||
.catch((e) => console.log(e));
|
|
||||||
let completeFile = Buffer.from('');
|
|
||||||
const duplexStream = plugins.smartstream.createDuplexStream<Buffer, Buffer>(
|
|
||||||
async (chunk) => {
|
|
||||||
completeFile = Buffer.concat([chunk]);
|
|
||||||
return chunk;
|
|
||||||
},
|
|
||||||
async (cb) => {
|
|
||||||
done.resolve();
|
|
||||||
return Buffer.from('');
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!fileStream) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
fileStream.pipe(duplexStream);
|
|
||||||
await done.promise;
|
|
||||||
return completeFile;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async streamContent() {
|
|
||||||
// TODO
|
|
||||||
throw new Error('not yet implemented');
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* removes this file
|
|
||||||
*/
|
|
||||||
public async remove() {
|
|
||||||
await this.parentDirectoryRef.bucketRef.smartbucketRef.minioClient.removeObject(
|
|
||||||
this.parentDirectoryRef.bucketRef.name,
|
|
||||||
this.path
|
|
||||||
);
|
|
||||||
await this.parentDirectoryRef.listFiles();
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,35 +0,0 @@
|
|||||||
import * as plugins from './smartbucket.plugins.js';
|
|
||||||
import { Bucket } from './smartbucket.classes.bucket.js';
|
|
||||||
|
|
||||||
export class SmartBucket {
|
|
||||||
public config: plugins.tsclass.storage.IS3Descriptor;
|
|
||||||
|
|
||||||
public minioClient: plugins.minio.Client;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* the constructor of SmartBucket
|
|
||||||
*/
|
|
||||||
constructor(configArg: plugins.tsclass.storage.IS3Descriptor) {
|
|
||||||
this.config = configArg;
|
|
||||||
this.minioClient = new plugins.minio.Client({
|
|
||||||
endPoint: this.config.endpoint,
|
|
||||||
port: configArg.port || 443,
|
|
||||||
useSSL: configArg.useSsl !== undefined ? configArg.useSsl : true,
|
|
||||||
accessKey: this.config.accessKey,
|
|
||||||
secretKey: this.config.accessSecret,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public async createBucket(bucketNameArg: string) {
|
|
||||||
const bucket = await Bucket.createBucketByName(this, bucketNameArg);
|
|
||||||
return bucket;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async removeBucket(bucketName: string) {
|
|
||||||
await Bucket.removeBucketByName(this, bucketName);
|
|
||||||
}
|
|
||||||
|
|
||||||
public async getBucketByName(bucketName: string) {
|
|
||||||
return Bucket.getBucketByName(this, bucketName);
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,24 +0,0 @@
|
|||||||
// node native
|
|
||||||
import * as path from 'path';
|
|
||||||
|
|
||||||
export { path };
|
|
||||||
|
|
||||||
// @push.rocks scope
|
|
||||||
import * as smartpath from '@push.rocks/smartpath';
|
|
||||||
import * as smartpromise from '@push.rocks/smartpromise';
|
|
||||||
import * as smartrx from '@push.rocks/smartrx';
|
|
||||||
import * as smartstream from '@push.rocks/smartstream';
|
|
||||||
|
|
||||||
export { smartpath, smartpromise, smartrx, smartstream };
|
|
||||||
|
|
||||||
// @tsclass
|
|
||||||
import * as tsclass from '@tsclass/tsclass';
|
|
||||||
|
|
||||||
export {
|
|
||||||
tsclass,
|
|
||||||
}
|
|
||||||
|
|
||||||
// third party scope
|
|
||||||
import * as minio from 'minio';
|
|
||||||
|
|
||||||
export { minio };
|
|
@@ -6,7 +6,8 @@
|
|||||||
"module": "NodeNext",
|
"module": "NodeNext",
|
||||||
"moduleResolution": "NodeNext",
|
"moduleResolution": "NodeNext",
|
||||||
"esModuleInterop": true,
|
"esModuleInterop": true,
|
||||||
"verbatimModuleSyntax": true
|
"verbatimModuleSyntax": true,
|
||||||
|
"strict": true
|
||||||
},
|
},
|
||||||
"exclude": [
|
"exclude": [
|
||||||
"dist_*/**/*.d.ts"
|
"dist_*/**/*.d.ts"
|
||||||
|
Reference in New Issue
Block a user