Compare commits
53 Commits
Author | SHA1 | Date | |
---|---|---|---|
b0d41fa9a0 | |||
34082c38a7 | |||
8d160cefb0 | |||
cec9c07b7c | |||
383a5204f4 | |||
c7f0c97341 | |||
e7f60465ff | |||
7db4d24817 | |||
dc599585b8 | |||
a22e32cd32 | |||
4647181807 | |||
99c3935d0c | |||
05523dc7a1 | |||
dc99cfa229 | |||
23f8dc55d0 | |||
ffaf0fc97a | |||
2a0425ff54 | |||
9adcdee0a0 | |||
786f8d4365 | |||
67244ba5cf | |||
a9bb31c2a2 | |||
bd8b05920f | |||
535d9f8520 | |||
8401fe1c0c | |||
08c3f674bf | |||
df0a439def | |||
7245b49c31 | |||
4b70edb947 | |||
9629a04da6 | |||
963463d40d | |||
ce58b99fc7 | |||
591c99736d | |||
559e3da47b | |||
a7ac870e05 | |||
d48c5e229a | |||
b9c384dd08 | |||
91c04b2364 | |||
b5dcc131e2 | |||
cb0ab2c9db | |||
2a17ee542e | |||
95e9d2f0ff | |||
1a71c76da3 | |||
e924511147 | |||
645ebbdd4d | |||
168148b2c9 | |||
1293fc4ca6 | |||
b040120813 | |||
5c2d92c041 | |||
eaf2e7e6bb | |||
1e1f65119c | |||
c70ee820d7 | |||
2a15362ced | |||
9d5cdadd89 |
95
changelog.md
Normal file
95
changelog.md
Normal file
@ -0,0 +1,95 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
## 2024-11-24 - 3.2.0 - feat(bucket)
|
||||||
|
Enhanced SmartBucket with trash management and metadata handling
|
||||||
|
|
||||||
|
- Added functionality to move files to a trash directory.
|
||||||
|
- Introduced methods to handle file metadata more robustly.
|
||||||
|
- Implemented a method to clean all contents from a bucket.
|
||||||
|
- Enhanced directory retrieval to handle non-existent directories with options.
|
||||||
|
- Improved handling of file paths and metadata within the storage system.
|
||||||
|
|
||||||
|
## 2024-11-18 - 3.1.0 - feat(file)
|
||||||
|
Added functionality to retrieve magic bytes from files and detect file types using magic bytes.
|
||||||
|
|
||||||
|
- Introduced method `getMagicBytes` in `File` and `Bucket` classes to retrieve a specific number of bytes from a file.
|
||||||
|
- Enhanced file type detection by utilizing magic bytes in `MetaData` class.
|
||||||
|
- Updated dependencies for better performance and compatibility.
|
||||||
|
|
||||||
|
## 2024-11-18 - 3.0.24 - fix(metadata)
|
||||||
|
Fix metadata handling to address type assertion and data retrieval.
|
||||||
|
|
||||||
|
- Fixed type assertion issues in `MetaData` class properties with type non-null assertions.
|
||||||
|
- Corrected the handling of JSON data retrieval in `MetaData.storeCustomMetaData` function.
|
||||||
|
|
||||||
|
## 2024-10-16 - 3.0.23 - fix(dependencies)
|
||||||
|
Update package dependencies for improved functionality and security.
|
||||||
|
|
||||||
|
- Updated @aws-sdk/client-s3 to version ^3.670.0 for enhanced S3 client capabilities.
|
||||||
|
- Updated @push.rocks/smartstream to version ^3.2.4.
|
||||||
|
- Updated the dev dependency @push.rocks/tapbundle to version ^5.3.0.
|
||||||
|
|
||||||
|
## 2024-07-28 - 3.0.22 - fix(dependencies)
|
||||||
|
Update dependencies and improve bucket retrieval logging
|
||||||
|
|
||||||
|
- Updated @aws-sdk/client-s3 to ^3.620.0
|
||||||
|
- Updated @git.zone/tsbuild to ^2.1.84
|
||||||
|
- Updated @git.zone/tsrun to ^1.2.49
|
||||||
|
- Updated @push.rocks/smartpromise to ^4.0.4
|
||||||
|
- Updated @tsclass/tsclass to ^4.1.2
|
||||||
|
- Added a log for when a bucket is not found by name in getBucketByName method
|
||||||
|
|
||||||
|
## 2024-07-04 - 3.0.21 - fix(test)
|
||||||
|
Update endpoint configuration in tests to use environment variable
|
||||||
|
|
||||||
|
- Modified `qenv.yml` to include `S3_ENDPOINT` as a required environment variable.
|
||||||
|
- Updated test files to fetch `S3_ENDPOINT` from environment instead of hardcoding.
|
||||||
|
|
||||||
|
## 2024-06-19 - 3.0.20 - Fix and Stability Updates
|
||||||
|
Improved overall stability and consistency.
|
||||||
|
|
||||||
|
## 2024-06-18 - 3.0.18 - Delete Functions Consistency
|
||||||
|
Ensured more consistency between delete methods and trash behavior.
|
||||||
|
|
||||||
|
## 2024-06-17 - 3.0.17 to 3.0.16 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-06-11 - 3.0.15 to 3.0.14 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-06-10 - 3.0.13 - Trash Feature Completion
|
||||||
|
Finished work on trash feature.
|
||||||
|
|
||||||
|
## 2024-06-09 - 3.0.12 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-06-08 - 3.0.11 to 3.0.10 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-06-03 - 3.0.10 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-05-29 - 3.0.9 - Update Description
|
||||||
|
Updated project description.
|
||||||
|
|
||||||
|
## 2024-05-27 - 3.0.8 to 3.0.6 - Pathing and Core Updates
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
- S3 paths' pathing differences now correctly handled with a reducePath method.
|
||||||
|
|
||||||
|
## 2024-05-21 - 3.0.5 to 3.0.4 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-05-17 - 3.0.3 to 3.0.2 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-05-17 - 3.0.0 - Major Release
|
||||||
|
Introduced breaking changes in core and significant improvements.
|
||||||
|
|
||||||
|
## 2024-05-05 - 2.0.5 - Breaking Changes
|
||||||
|
Introduced breaking changes in core functionality.
|
||||||
|
|
||||||
|
## 2024-04-14 - 2.0.4 - TSConfig Update
|
||||||
|
Updated TypeScript configuration.
|
||||||
|
|
||||||
|
## 2024-01-01 - 2.0.2 - Organization Scheme Update
|
||||||
|
Switched to the new organizational scheme.
|
@ -8,22 +8,28 @@
|
|||||||
"githost": "code.foss.global",
|
"githost": "code.foss.global",
|
||||||
"gitscope": "push.rocks",
|
"gitscope": "push.rocks",
|
||||||
"gitrepo": "smartbucket",
|
"gitrepo": "smartbucket",
|
||||||
"description": "A TypeScript library for simple cloud independent object storage with support for buckets, directories, and files.",
|
"description": "A TypeScript library offering simple and cloud-agnostic object storage with advanced features like bucket creation, file and directory management, and data streaming.",
|
||||||
"npmPackagename": "@push.rocks/smartbucket",
|
"npmPackagename": "@push.rocks/smartbucket",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
|
"TypeScript",
|
||||||
"cloud storage",
|
"cloud storage",
|
||||||
"object storage",
|
"object storage",
|
||||||
"minio",
|
"bucket creation",
|
||||||
|
"file management",
|
||||||
|
"directory management",
|
||||||
|
"data streaming",
|
||||||
|
"multi-cloud",
|
||||||
|
"API",
|
||||||
|
"unified storage",
|
||||||
"S3",
|
"S3",
|
||||||
"TypeScript",
|
"minio",
|
||||||
"smartstream",
|
"file locking",
|
||||||
"smartpromise",
|
"metadata",
|
||||||
"smartpath",
|
"buffer handling",
|
||||||
"smartrx",
|
"access key",
|
||||||
"buckets",
|
"secret key",
|
||||||
"files management",
|
"cloud agnostic"
|
||||||
"directories management"
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
4
package-lock.json
generated
4
package-lock.json
generated
@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "@push.rocks/smartbucket",
|
"name": "@push.rocks/smartbucket",
|
||||||
"version": "3.0.1",
|
"version": "3.2.0",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "@push.rocks/smartbucket",
|
"name": "@push.rocks/smartbucket",
|
||||||
"version": "3.0.1",
|
"version": "3.2.0",
|
||||||
"license": "UNLICENSED",
|
"license": "UNLICENSED",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@push.rocks/smartpath": "^5.0.18",
|
"@push.rocks/smartpath": "^5.0.18",
|
||||||
|
54
package.json
54
package.json
@ -1,7 +1,7 @@
|
|||||||
{
|
{
|
||||||
"name": "@push.rocks/smartbucket",
|
"name": "@push.rocks/smartbucket",
|
||||||
"version": "3.0.1",
|
"version": "3.2.0",
|
||||||
"description": "A TypeScript library for simple cloud independent object storage with support for buckets, directories, and files.",
|
"description": "A TypeScript library offering simple and cloud-agnostic object storage with advanced features like bucket creation, file and directory management, and data streaming.",
|
||||||
"main": "dist_ts/index.js",
|
"main": "dist_ts/index.js",
|
||||||
"typings": "dist_ts/index.d.ts",
|
"typings": "dist_ts/index.d.ts",
|
||||||
"type": "module",
|
"type": "module",
|
||||||
@ -12,19 +12,22 @@
|
|||||||
"build": "(tsbuild --web --allowimplicitany)"
|
"build": "(tsbuild --web --allowimplicitany)"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.1.76",
|
"@git.zone/tsbuild": "^2.1.84",
|
||||||
"@git.zone/tsrun": "^1.2.46",
|
"@git.zone/tsrun": "^1.2.49",
|
||||||
"@git.zone/tstest": "^1.0.90",
|
"@git.zone/tstest": "^1.0.90",
|
||||||
"@push.rocks/qenv": "^6.0.5",
|
"@push.rocks/qenv": "^6.1.0",
|
||||||
"@push.rocks/tapbundle": "^5.0.23"
|
"@push.rocks/tapbundle": "^5.5.3"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@aws-sdk/client-s3": "^3.699.0",
|
||||||
|
"@push.rocks/smartmime": "^2.0.4",
|
||||||
"@push.rocks/smartpath": "^5.0.18",
|
"@push.rocks/smartpath": "^5.0.18",
|
||||||
"@push.rocks/smartpromise": "^4.0.3",
|
"@push.rocks/smartpromise": "^4.0.4",
|
||||||
"@push.rocks/smartrx": "^3.0.7",
|
"@push.rocks/smartrx": "^3.0.7",
|
||||||
"@push.rocks/smartstream": "^3.0.37",
|
"@push.rocks/smartstream": "^3.2.5",
|
||||||
"@tsclass/tsclass": "^4.0.54",
|
"@push.rocks/smartstring": "^4.0.15",
|
||||||
"minio": "^8.0.0"
|
"@push.rocks/smartunique": "^3.0.9",
|
||||||
|
"@tsclass/tsclass": "^4.1.2"
|
||||||
},
|
},
|
||||||
"private": false,
|
"private": false,
|
||||||
"files": [
|
"files": [
|
||||||
@ -43,17 +46,28 @@
|
|||||||
"last 1 chrome versions"
|
"last 1 chrome versions"
|
||||||
],
|
],
|
||||||
"keywords": [
|
"keywords": [
|
||||||
|
"TypeScript",
|
||||||
"cloud storage",
|
"cloud storage",
|
||||||
"object storage",
|
"object storage",
|
||||||
"minio",
|
"bucket creation",
|
||||||
|
"file management",
|
||||||
|
"directory management",
|
||||||
|
"data streaming",
|
||||||
|
"multi-cloud",
|
||||||
|
"API",
|
||||||
|
"unified storage",
|
||||||
"S3",
|
"S3",
|
||||||
"TypeScript",
|
"minio",
|
||||||
"smartstream",
|
"file locking",
|
||||||
"smartpromise",
|
"metadata",
|
||||||
"smartpath",
|
"buffer handling",
|
||||||
"smartrx",
|
"access key",
|
||||||
"buckets",
|
"secret key",
|
||||||
"files management",
|
"cloud agnostic"
|
||||||
"directories management"
|
],
|
||||||
]
|
"homepage": "https://code.foss.global/push.rocks/smartbucket",
|
||||||
|
"repository": {
|
||||||
|
"type": "git",
|
||||||
|
"url": "https://code.foss.global/push.rocks/smartbucket.git"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
6177
pnpm-lock.yaml
generated
6177
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
3
qenv.yml
3
qenv.yml
@ -1,3 +1,4 @@
|
|||||||
required:
|
required:
|
||||||
- S3_KEY
|
- S3_KEY
|
||||||
- S3_SECRET
|
- S3_SECRET
|
||||||
|
- S3_ENDPOINT
|
227
readme.md
227
readme.md
@ -1,9 +1,10 @@
|
|||||||
# @push.rocks/smartbucket
|
# @push.rocks/smartbucket
|
||||||
simple cloud independent object storage
|
|
||||||
|
A TypeScript library for cloud-independent object storage, providing features like bucket creation, file and directory management, and data streaming.
|
||||||
|
|
||||||
## Install
|
## Install
|
||||||
|
|
||||||
To install `@push.rocks/smartbucket`, you need to have Node.js and npm (Node Package Manager) installed on your system. If you have them installed, you can add `@push.rocks/smartbucket` to your project by running the following command in your project's root directory:
|
To install `@push.rocks/smartbucket`, you need to have Node.js and npm (Node Package Manager) installed. If they are installed, you can add `@push.rocks/smartbucket` to your project by running the following command in your project's root directory:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
npm install @push.rocks/smartbucket --save
|
npm install @push.rocks/smartbucket --save
|
||||||
@ -13,15 +14,28 @@ This command will download and install `@push.rocks/smartbucket` along with its
|
|||||||
|
|
||||||
## Usage
|
## Usage
|
||||||
|
|
||||||
`@push.rocks/smartbucket` is a module designed to provide simple cloud-independent object storage functionality. It wraps various cloud storage providers such as AWS S3, Google Cloud Storage, and others, offering a unified API to manage storage buckets and objects within those buckets.
|
`@push.rocks/smartbucket` is a TypeScript module designed to provide simple cloud-independent object storage functionality. It wraps various cloud storage providers such as AWS S3, Google Cloud Storage, and others, offering a unified API to manage storage buckets and objects within those buckets.
|
||||||
|
|
||||||
To use `@push.rocks/smartbucket` in your project, you'll need to follow these general steps:
|
In this guide, we will delve into the usage of SmartBucket, covering its full range of features from setting up the library to advanced usage scenarios.
|
||||||
|
|
||||||
|
### Table of Contents
|
||||||
|
1. [Setting Up](#setting-up)
|
||||||
|
2. [Creating a New Bucket](#creating-a-new-bucket)
|
||||||
|
3. [Listing Buckets](#listing-buckets)
|
||||||
|
4. [Working with Files](#working-with-files)
|
||||||
|
- [Uploading Files](#uploading-files)
|
||||||
|
- [Downloading Files](#downloading-files)
|
||||||
|
- [Deleting Files](#deleting-files)
|
||||||
|
- [Streaming Files](#streaming-files)
|
||||||
|
5. [Working with Directories](#working-with-directories)
|
||||||
|
6. [Advanced Features](#advanced-features)
|
||||||
|
- [Bucket Policies](#bucket-policies)
|
||||||
|
- [Object Metadata](#object-metadata)
|
||||||
|
- [Cloud Agnostic](#cloud-agnostic)
|
||||||
|
|
||||||
### Setting Up
|
### Setting Up
|
||||||
|
|
||||||
First, ensure you are using ECMAScript modules (ESM) and TypeScript in your project for best compatibility.
|
First, ensure you are using ECMAScript modules (ESM) and TypeScript in your project for best compatibility. Here's how to import and initialize SmartBucket in a TypeScript file:
|
||||||
|
|
||||||
Here's how to import and initialize smartbucket in a TypeScript file:
|
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
import {
|
import {
|
||||||
@ -35,10 +49,12 @@ const mySmartBucket = new SmartBucket({
|
|||||||
accessKey: "yourAccessKey",
|
accessKey: "yourAccessKey",
|
||||||
accessSecret: "yourSecretKey",
|
accessSecret: "yourSecretKey",
|
||||||
endpoint: "yourEndpointURL",
|
endpoint: "yourEndpointURL",
|
||||||
|
port: 443, // Default is 443, can be customized for specific endpoint
|
||||||
|
useSsl: true // Defaults to true
|
||||||
});
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
Make sure to replace `"yourAccessKey"`, `"yourSecretKey"`, and `"yourEndpointURL"` with your actual credentials and endpoint URL.
|
Make sure to replace `"yourAccessKey"`, `"yourSecretKey"`, and `"yourEndpointURL"` with your actual credentials and endpoint URL. The `port` and `useSsl` options are optional and can be omitted if the defaults are acceptable.
|
||||||
|
|
||||||
### Creating a New Bucket
|
### Creating a New Bucket
|
||||||
|
|
||||||
@ -58,19 +74,15 @@ async function createBucket(bucketName: string) {
|
|||||||
createBucket("exampleBucket");
|
createBucket("exampleBucket");
|
||||||
```
|
```
|
||||||
|
|
||||||
**Important:** Bucket names must be unique across the storage service.
|
Bucket names must be unique across the storage service.
|
||||||
|
|
||||||
### Listing Buckets
|
### Listing Buckets
|
||||||
|
|
||||||
To list all buckets:
|
Currently, SmartBucket does not include a direct method to list all buckets, but you can access the underlying client provided by the cloud storage SDK to perform such operations, depending on the SDK's capabilities.
|
||||||
|
|
||||||
```typescript
|
### Working with Files
|
||||||
// Currently, SmartBucket does not include a direct method to list all buckets,
|
|
||||||
// but you can access the underlying client provided by the cloud storage SDK
|
|
||||||
// to perform such operations, depending on the SDK's capabilities.
|
|
||||||
```
|
|
||||||
|
|
||||||
### Uploading Objects to a Bucket
|
#### Uploading Files
|
||||||
|
|
||||||
To upload an object to a bucket:
|
To upload an object to a bucket:
|
||||||
|
|
||||||
@ -78,8 +90,10 @@ To upload an object to a bucket:
|
|||||||
async function uploadFile(bucketName: string, filePath: string, fileContent: Buffer | string) {
|
async function uploadFile(bucketName: string, filePath: string, fileContent: Buffer | string) {
|
||||||
const myBucket: Bucket = await mySmartBucket.getBucketByName(bucketName);
|
const myBucket: Bucket = await mySmartBucket.getBucketByName(bucketName);
|
||||||
if (myBucket) {
|
if (myBucket) {
|
||||||
await myBucket.fastStore(filePath, fileContent);
|
await myBucket.fastPut({ path: filePath, contents: fileContent });
|
||||||
console.log(`File uploaded to ${bucketName} at ${filePath}`);
|
console.log(`File uploaded to ${bucketName} at ${filePath}`);
|
||||||
|
} else {
|
||||||
|
console.error(`Bucket ${bucketName} does not exist.`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -87,7 +101,7 @@ async function uploadFile(bucketName: string, filePath: string, fileContent: Buf
|
|||||||
uploadFile("exampleBucket", "path/to/object.txt", "Hello, world!");
|
uploadFile("exampleBucket", "path/to/object.txt", "Hello, world!");
|
||||||
```
|
```
|
||||||
|
|
||||||
### Downloading Objects from a Bucket
|
#### Downloading Files
|
||||||
|
|
||||||
To download an object:
|
To download an object:
|
||||||
|
|
||||||
@ -95,8 +109,10 @@ To download an object:
|
|||||||
async function downloadFile(bucketName: string, filePath: string) {
|
async function downloadFile(bucketName: string, filePath: string) {
|
||||||
const myBucket: Bucket = await mySmartBucket.getBucketByName(bucketName);
|
const myBucket: Bucket = await mySmartBucket.getBucketByName(bucketName);
|
||||||
if (myBucket) {
|
if (myBucket) {
|
||||||
const fileContent: Buffer = await myBucket.fastGet(filePath);
|
const fileContent: Buffer = await myBucket.fastGet({ path: filePath });
|
||||||
console.log("Downloaded file content:", fileContent.toString());
|
console.log("Downloaded file content:", fileContent.toString());
|
||||||
|
} else {
|
||||||
|
console.error(`Bucket ${bucketName} does not exist.`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -104,7 +120,7 @@ async function downloadFile(bucketName: string, filePath: string) {
|
|||||||
downloadFile("exampleBucket", "path/to/object.txt");
|
downloadFile("exampleBucket", "path/to/object.txt");
|
||||||
```
|
```
|
||||||
|
|
||||||
### Deleting Objects
|
#### Deleting Files
|
||||||
|
|
||||||
To delete an object from a bucket:
|
To delete an object from a bucket:
|
||||||
|
|
||||||
@ -112,8 +128,10 @@ To delete an object from a bucket:
|
|||||||
async function deleteFile(bucketName: string, filePath: string) {
|
async function deleteFile(bucketName: string, filePath: string) {
|
||||||
const myBucket: Bucket = await mySmartBucket.getBucketByName(bucketName);
|
const myBucket: Bucket = await mySmartBucket.getBucketByName(bucketName);
|
||||||
if (myBucket) {
|
if (myBucket) {
|
||||||
await myBucket.fastRemove(filePath);
|
await myBucket.fastRemove({ path: filePath });
|
||||||
console.log(`File at ${filePath} deleted from ${bucketName}.`);
|
console.log(`File at ${filePath} deleted from ${bucketName}.`);
|
||||||
|
} else {
|
||||||
|
console.error(`Bucket ${bucketName} does not exist.`);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -121,19 +139,174 @@ async function deleteFile(bucketName: string, filePath: string) {
|
|||||||
deleteFile("exampleBucket", "path/to/object.txt");
|
deleteFile("exampleBucket", "path/to/object.txt");
|
||||||
```
|
```
|
||||||
|
|
||||||
|
#### Streaming Files
|
||||||
|
|
||||||
|
SmartBucket allows you to work with file streams, which can be useful for handling large files.
|
||||||
|
|
||||||
|
To read a file as a stream:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { ReplaySubject } from '@push.rocks/smartrx';
|
||||||
|
|
||||||
|
async function readFileStream(bucketName: string, filePath: string) {
|
||||||
|
const myBucket: Bucket = await mySmartBucket.getBucketByName(bucketName);
|
||||||
|
if (myBucket) {
|
||||||
|
const fileStream: ReplaySubject<Buffer> = await myBucket.fastGetStream({ path: filePath });
|
||||||
|
fileStream.subscribe({
|
||||||
|
next(chunk: Buffer) {
|
||||||
|
console.log("Chunk received:", chunk.toString());
|
||||||
|
},
|
||||||
|
complete() {
|
||||||
|
console.log("File read completed.");
|
||||||
|
},
|
||||||
|
error(err) {
|
||||||
|
console.error("Error reading file stream:", err);
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
console.error(`Bucket ${bucketName} does not exist.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use the function
|
||||||
|
readFileStream("exampleBucket", "path/to/object.txt");
|
||||||
|
```
|
||||||
|
|
||||||
|
To write a file as a stream:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
import { Readable } from 'stream';
|
||||||
|
|
||||||
|
async function writeFileStream(bucketName: string, filePath: string, readableStream: Readable) {
|
||||||
|
const myBucket: Bucket = await mySmartBucket.getBucketByName(bucketName);
|
||||||
|
if (myBucket) {
|
||||||
|
await myBucket.fastPutStream({ path: filePath, dataStream: readableStream });
|
||||||
|
console.log(`File streamed to ${bucketName} at ${filePath}`);
|
||||||
|
} else {
|
||||||
|
console.error(`Bucket ${bucketName} does not exist.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create a readable stream from a string
|
||||||
|
const readable = new Readable();
|
||||||
|
readable.push('Hello world streamed as a file!');
|
||||||
|
readable.push(null); // End of stream
|
||||||
|
|
||||||
|
// Use the function
|
||||||
|
writeFileStream("exampleBucket", "path/to/streamedObject.txt", readable);
|
||||||
|
```
|
||||||
|
|
||||||
### Working with Directories
|
### Working with Directories
|
||||||
|
|
||||||
`@push.rocks/smartbucket` abstracts directories within buckets for easier object management. You can create, list, and delete directories using the `Directory` class.
|
`@push.rocks/smartbucket` offers abstractions for directories within buckets for easier object management. You can create, list, and delete directories using the `Directory` class.
|
||||||
|
|
||||||
### Additional Features
|
To list the contents of a directory:
|
||||||
|
|
||||||
- **Bucket Policies:** Manage bucket policies to control access permissions.
|
```typescript
|
||||||
- **Object Metadata:** Retrieve and modify object metadata.
|
async function listDirectoryContents(bucketName: string, directoryPath: string) {
|
||||||
- **Cloud-Agnostic:** Designed to work with multiple cloud providers, allowing for easier migration or multi-cloud strategies.
|
const myBucket: Bucket = await mySmartBucket.getBucketByName(bucketName);
|
||||||
|
if (myBucket) {
|
||||||
|
const baseDirectory: Directory = await myBucket.getBaseDirectory();
|
||||||
|
const targetDirectory: Directory = await baseDirectory.getSubDirectoryByName(directoryPath);
|
||||||
|
|
||||||
|
console.log('Listing directories:');
|
||||||
|
const directories = await targetDirectory.listDirectories();
|
||||||
|
directories.forEach(dir => {
|
||||||
|
console.log(`- ${dir.name}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
console.log('Listing files:');
|
||||||
|
const files = await targetDirectory.listFiles();
|
||||||
|
files.forEach(file => {
|
||||||
|
console.log(`- ${file.name}`);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
console.error(`Bucket ${bucketName} does not exist.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use the function
|
||||||
|
listDirectoryContents("exampleBucket", "some/directory/path");
|
||||||
|
```
|
||||||
|
|
||||||
|
To create a file within a directory:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
async function createFileInDirectory(bucketName: string, directoryPath: string, fileName: string, fileContent: string) {
|
||||||
|
const myBucket: Bucket = await mySmartBucket.getBucketByName(bucketName);
|
||||||
|
if (myBucket) {
|
||||||
|
const baseDirectory: Directory = await myBucket.getBaseDirectory();
|
||||||
|
const targetDirectory: Directory = await baseDirectory.getSubDirectoryByName(directoryPath);
|
||||||
|
await targetDirectory.createEmptyFile(fileName); // Create an empty file
|
||||||
|
const file = new File({ directoryRefArg: targetDirectory, fileName });
|
||||||
|
await file.updateWithContents({ contents: fileContent });
|
||||||
|
console.log(`File created: ${fileName}`);
|
||||||
|
} else {
|
||||||
|
console.error(`Bucket ${bucketName} does not exist.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use the function
|
||||||
|
createFileInDirectory("exampleBucket", "some/directory", "newfile.txt", "Hello, world!");
|
||||||
|
```
|
||||||
|
|
||||||
|
### Advanced Features
|
||||||
|
|
||||||
|
#### Bucket Policies
|
||||||
|
|
||||||
|
Manage bucket policies to control access permissions. This feature depends on the policies provided by the storage service (e.g., AWS S3, MinIO).
|
||||||
|
|
||||||
|
#### Object Metadata
|
||||||
|
|
||||||
|
Retrieve and modify object metadata. Metadata can be useful for storing additional information about an object.
|
||||||
|
|
||||||
|
To retrieve metadata:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
async function getObjectMetadata(bucketName: string, filePath: string) {
|
||||||
|
const myBucket: Bucket = await mySmartBucket.getBucketByName(bucketName);
|
||||||
|
if (myBucket) {
|
||||||
|
const metadata = await mySmartBucket.minioClient.statObject(bucketName, filePath);
|
||||||
|
console.log("Object metadata:", metadata);
|
||||||
|
} else {
|
||||||
|
console.error(`Bucket ${bucketName} does not exist.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use the function
|
||||||
|
getObjectMetadata("exampleBucket", "path/to/object.txt");
|
||||||
|
```
|
||||||
|
|
||||||
|
To update metadata:
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
async function updateObjectMetadata(bucketName: string, filePath: string, newMetadata: { [key: string]: string }) {
|
||||||
|
const myBucket: Bucket = await mySmartBucket.getBucketByName(bucketName);
|
||||||
|
if (myBucket) {
|
||||||
|
await myBucket.copyObject({
|
||||||
|
objectKey: filePath,
|
||||||
|
nativeMetadata: newMetadata,
|
||||||
|
deleteExistingNativeMetadata: false,
|
||||||
|
});
|
||||||
|
console.log(`Metadata updated for ${filePath}`);
|
||||||
|
} else {
|
||||||
|
console.error(`Bucket ${bucketName} does not exist.`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Use the function
|
||||||
|
updateObjectMetadata("exampleBucket", "path/to/object.txt", {
|
||||||
|
customKey: "customValue"
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Cloud Agnostic
|
||||||
|
|
||||||
|
`@push.rocks/smartbucket` is designed to work with multiple cloud providers, allowing for easier migration or multi-cloud strategies. This means you can switch from one provider to another with minimal changes to your codebase.
|
||||||
|
|
||||||
Remember, each cloud provider has specific features and limitations. `@push.rocks/smartbucket` aims to abstract common functionalities, but always refer to the specific cloud provider's documentation for advanced features or limitations.
|
Remember, each cloud provider has specific features and limitations. `@push.rocks/smartbucket` aims to abstract common functionalities, but always refer to the specific cloud provider's documentation for advanced features or limitations.
|
||||||
|
|
||||||
> **Note:** This document focuses on basic operations to get you started with `@push.rocks/smartbucket`. For advanced usage, including streaming data, managing bucket policies, and handling large file uploads, refer to the detailed API documentation and examples.
|
This guide covers the basic to advanced scenarios of using `@push.rocks/smartbucket`. For further details, refer to the API documentation and examples.
|
||||||
|
|
||||||
## License and Legal Information
|
## License and Legal Information
|
||||||
|
|
||||||
|
0
test/helpers/prepare.ts
Normal file
0
test/helpers/prepare.ts
Normal file
7
test/test.metadata.ts
Normal file
7
test/test.metadata.ts
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
import { tap, expect } from '@push.rocks/tapbundle';
|
||||||
|
|
||||||
|
tap.test('test metadata functionality', async () => {
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
tap.start();
|
63
test/test.trash.ts
Normal file
63
test/test.trash.ts
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
|
||||||
|
import { jestExpect } from '@push.rocks/tapbundle/node';
|
||||||
|
import { Qenv } from '@push.rocks/qenv';
|
||||||
|
|
||||||
|
import * as smartbucket from '../ts/index.js';
|
||||||
|
|
||||||
|
const testQenv = new Qenv('./', './.nogit/');
|
||||||
|
|
||||||
|
let testSmartbucket: smartbucket.SmartBucket;
|
||||||
|
let myBucket: smartbucket.Bucket;
|
||||||
|
let baseDirectory: smartbucket.Directory;
|
||||||
|
|
||||||
|
tap.test('should create a valid smartbucket', async () => {
|
||||||
|
testSmartbucket = new smartbucket.SmartBucket({
|
||||||
|
accessKey: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSKEY'),
|
||||||
|
accessSecret: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSSECRET'),
|
||||||
|
endpoint: await testQenv.getEnvVarOnDemandStrict('S3_ENDPOINT'),
|
||||||
|
});
|
||||||
|
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
|
||||||
|
myBucket = await testSmartbucket.getBucketByNameStrict(await testQenv.getEnvVarOnDemandStrict('S3_BUCKET'),);
|
||||||
|
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
|
||||||
|
expect(myBucket.name).toEqual('test-pushrocks-smartbucket');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should clean all contents', async () => {
|
||||||
|
await myBucket.cleanAllContents();
|
||||||
|
expect(await myBucket.fastExists({ path: 'hithere/socool.txt' })).toBeFalse();
|
||||||
|
expect(await myBucket.fastExists({ path: 'trashtest/trashme.txt' })).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should delete a file into the normally', async () => {
|
||||||
|
const path = 'trashtest/trashme.txt';
|
||||||
|
const file = await myBucket.fastPut({
|
||||||
|
path,
|
||||||
|
contents: 'I\'m in the trash test content!',
|
||||||
|
});
|
||||||
|
const fileMetadata = await (await file.getMetaData()).metadataFile.getContents();
|
||||||
|
console.log(fileMetadata.toString());
|
||||||
|
expect(await file.getMetaData().then((meta) => meta.metadataFile.getJsonData())).toEqual({});
|
||||||
|
await file.delete({ mode: 'permanent' });
|
||||||
|
expect((await (await myBucket.getBaseDirectory()).listFiles()).length).toEqual(0);
|
||||||
|
expect((await (await myBucket.getBaseDirectory()).listDirectories()).length).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should put a file into the trash', async () => {
|
||||||
|
const path = 'trashtest/trashme.txt';
|
||||||
|
const file = await myBucket.fastPut({
|
||||||
|
path,
|
||||||
|
contents: 'I\'m in the trash test content!',
|
||||||
|
});
|
||||||
|
const fileMetadata = await (await file.getMetaData()).metadataFile.getContents();
|
||||||
|
console.log(fileMetadata.toString());
|
||||||
|
expect(await file.getMetaData().then((meta) => meta.metadataFile.getJsonData())).toEqual({});
|
||||||
|
await file.delete({ mode: 'trash' });
|
||||||
|
jestExpect(await file.getMetaData().then((meta) => meta.metadataFile.getJsonData())).toEqual({
|
||||||
|
custom_recycle: {
|
||||||
|
deletedAt: jestExpect.any(Number),
|
||||||
|
originalPath: "trashtest/trashme.txt",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
40
test/test.ts
40
test/test.ts
@ -11,24 +11,28 @@ let baseDirectory: smartbucket.Directory;
|
|||||||
|
|
||||||
tap.test('should create a valid smartbucket', async () => {
|
tap.test('should create a valid smartbucket', async () => {
|
||||||
testSmartbucket = new smartbucket.SmartBucket({
|
testSmartbucket = new smartbucket.SmartBucket({
|
||||||
accessKey: await testQenv.getEnvVarOnDemand('S3_KEY'),
|
accessKey: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSKEY'),
|
||||||
accessSecret: await testQenv.getEnvVarOnDemand('S3_SECRET'),
|
accessSecret: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSSECRET'),
|
||||||
endpoint: 's3.eu-central-1.wasabisys.com',
|
endpoint: await testQenv.getEnvVarOnDemandStrict('S3_ENDPOINT'),
|
||||||
});
|
});
|
||||||
|
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
|
||||||
|
myBucket = await testSmartbucket.getBucketByNameStrict(await testQenv.getEnvVarOnDemandStrict('S3_BUCKET'),);
|
||||||
|
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
|
||||||
|
expect(myBucket.name).toEqual('test-pushrocks-smartbucket');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should clean all contents', async () => {
|
||||||
|
await myBucket.cleanAllContents();
|
||||||
|
expect(await myBucket.fastExists({ path: 'hithere/socool.txt' })).toBeFalse();
|
||||||
|
expect(await myBucket.fastExists({ path: 'trashtest/trashme.txt' })).toBeFalse();
|
||||||
});
|
});
|
||||||
|
|
||||||
tap.skip.test('should create testbucket', async () => {
|
tap.skip.test('should create testbucket', async () => {
|
||||||
// await testSmartbucket.createBucket('testzone');
|
// await testSmartbucket.createBucket('testzone2');
|
||||||
});
|
});
|
||||||
|
|
||||||
tap.skip.test('should remove testbucket', async () => {
|
tap.skip.test('should remove testbucket', async () => {
|
||||||
// await testSmartbucket.removeBucket('testzone');
|
// await testSmartbucket.removeBucket('testzone2');
|
||||||
});
|
|
||||||
|
|
||||||
tap.test('should get a bucket', async () => {
|
|
||||||
myBucket = await testSmartbucket.getBucketByName('testzone');
|
|
||||||
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
|
|
||||||
expect(myBucket.name).toEqual('testzone');
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// Fast operations
|
// Fast operations
|
||||||
@ -43,9 +47,12 @@ tap.test('should get data in bucket', async () => {
|
|||||||
const fileString = await myBucket.fastGet({
|
const fileString = await myBucket.fastGet({
|
||||||
path: 'hithere/socool.txt',
|
path: 'hithere/socool.txt',
|
||||||
});
|
});
|
||||||
const fileStringStream = await myBucket.fastGetStream({
|
const fileStringStream = await myBucket.fastGetStream(
|
||||||
path: 'hithere/socool.txt',
|
{
|
||||||
});
|
path: 'hithere/socool.txt',
|
||||||
|
},
|
||||||
|
'nodestream'
|
||||||
|
);
|
||||||
console.log(fileString);
|
console.log(fileString);
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -79,7 +86,7 @@ tap.test('prepare for directory style tests', async () => {
|
|||||||
contents: 'dir3/dir4/file1.txt content',
|
contents: 'dir3/dir4/file1.txt content',
|
||||||
});
|
});
|
||||||
await myBucket.fastPut({
|
await myBucket.fastPut({
|
||||||
path: 'file1.txt',
|
path: '/file1.txt',
|
||||||
contents: 'file1 content',
|
contents: 'file1 content',
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
@ -99,8 +106,9 @@ tap.test('should get base directory', async () => {
|
|||||||
tap.test('should correctly build paths for sub directories', async () => {
|
tap.test('should correctly build paths for sub directories', async () => {
|
||||||
const dir4 = await baseDirectory.getSubDirectoryByName('dir3/dir4');
|
const dir4 = await baseDirectory.getSubDirectoryByName('dir3/dir4');
|
||||||
expect(dir4).toBeInstanceOf(smartbucket.Directory);
|
expect(dir4).toBeInstanceOf(smartbucket.Directory);
|
||||||
const dir4BasePath = dir4.getBasePath();
|
const dir4BasePath = dir4?.getBasePath();
|
||||||
console.log(dir4BasePath);
|
console.log(dir4BasePath);
|
||||||
|
expect(dir4BasePath).toEqual('dir3/dir4/');
|
||||||
});
|
});
|
||||||
|
|
||||||
tap.test('clean up directory style tests', async () => {
|
tap.test('clean up directory style tests', async () => {
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
/**
|
/**
|
||||||
* autocreated commitinfo by @pushrocks/commitinfo
|
* autocreated commitinfo by @push.rocks/commitinfo
|
||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@push.rocks/smartbucket',
|
name: '@push.rocks/smartbucket',
|
||||||
version: '3.0.1',
|
version: '3.2.0',
|
||||||
description: 'A TypeScript library for simple cloud independent object storage with support for buckets, directories, and files.'
|
description: 'A TypeScript library offering simple and cloud-agnostic object storage with advanced features like bucket creation, file and directory management, and data streaming.'
|
||||||
}
|
}
|
||||||
|
517
ts/classes.bucket.ts
Normal file
517
ts/classes.bucket.ts
Normal file
@ -0,0 +1,517 @@
|
|||||||
|
// classes.bucket.ts
|
||||||
|
|
||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import * as helpers from './helpers.js';
|
||||||
|
import * as interfaces from './interfaces.js';
|
||||||
|
import { SmartBucket } from './classes.smartbucket.js';
|
||||||
|
import { Directory } from './classes.directory.js';
|
||||||
|
import { File } from './classes.file.js';
|
||||||
|
import { Trash } from './classes.trash.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The bucket class exposes the basic functionality of a bucket.
|
||||||
|
* The functions of the bucket alone are enough to
|
||||||
|
* operate in S3 basic fashion on blobs of data.
|
||||||
|
*/
|
||||||
|
export class Bucket {
|
||||||
|
public static async getBucketByName(smartbucketRef: SmartBucket, bucketNameArg: string) {
|
||||||
|
const command = new plugins.s3.ListBucketsCommand({});
|
||||||
|
const buckets = await smartbucketRef.s3Client.send(command);
|
||||||
|
const foundBucket = buckets.Buckets.find((bucket) => bucket.Name === bucketNameArg);
|
||||||
|
|
||||||
|
if (foundBucket) {
|
||||||
|
console.log(`bucket with name ${bucketNameArg} exists.`);
|
||||||
|
console.log(`Taking this as base for new Bucket instance`);
|
||||||
|
return new this(smartbucketRef, bucketNameArg);
|
||||||
|
} else {
|
||||||
|
console.log(`did not find bucket by name: ${bucketNameArg}`);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async createBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
|
||||||
|
const command = new plugins.s3.CreateBucketCommand({ Bucket: bucketName });
|
||||||
|
await smartbucketRef.s3Client.send(command).catch((e) => console.log(e));
|
||||||
|
return new Bucket(smartbucketRef, bucketName);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static async removeBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
|
||||||
|
const command = new plugins.s3.DeleteBucketCommand({ Bucket: bucketName });
|
||||||
|
await smartbucketRef.s3Client.send(command).catch((e) => console.log(e));
|
||||||
|
}
|
||||||
|
|
||||||
|
public smartbucketRef: SmartBucket;
|
||||||
|
public name: string;
|
||||||
|
|
||||||
|
constructor(smartbucketRef: SmartBucket, bucketName: string) {
|
||||||
|
this.smartbucketRef = smartbucketRef;
|
||||||
|
this.name = bucketName;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gets the base directory of the bucket
|
||||||
|
*/
|
||||||
|
public async getBaseDirectory(): Promise<Directory> {
|
||||||
|
return new Directory(this, null!, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gets the trash directory
|
||||||
|
*/
|
||||||
|
public async getTrash(): Promise<Trash> {
|
||||||
|
const trash = new Trash(this);
|
||||||
|
return trash;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getDirectoryFromPath(
|
||||||
|
pathDescriptorArg: interfaces.IPathDecriptor
|
||||||
|
): Promise<Directory> {
|
||||||
|
if (!pathDescriptorArg.path && !pathDescriptorArg.directory) {
|
||||||
|
return this.getBaseDirectory();
|
||||||
|
}
|
||||||
|
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
|
||||||
|
const baseDirectory = await this.getBaseDirectory();
|
||||||
|
return await baseDirectory.getSubDirectoryByNameStrict(checkPath, {
|
||||||
|
getEmptyDirectory: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// ===============
|
||||||
|
// Fast Operations
|
||||||
|
// ===============
|
||||||
|
|
||||||
|
/**
|
||||||
|
* store file
|
||||||
|
*/
|
||||||
|
public async fastPut(
|
||||||
|
optionsArg: interfaces.IPathDecriptor & {
|
||||||
|
contents: string | Buffer;
|
||||||
|
overwrite?: boolean;
|
||||||
|
}
|
||||||
|
): Promise<File> {
|
||||||
|
try {
|
||||||
|
const reducedPath = await helpers.reducePathDescriptorToPath(optionsArg);
|
||||||
|
const exists = await this.fastExists({ path: reducedPath });
|
||||||
|
|
||||||
|
if (exists && !optionsArg.overwrite) {
|
||||||
|
console.error(`Object already exists at path '${reducedPath}' in bucket '${this.name}'.`);
|
||||||
|
return;
|
||||||
|
} else if (exists && optionsArg.overwrite) {
|
||||||
|
console.log(
|
||||||
|
`Overwriting existing object at path '${reducedPath}' in bucket '${this.name}'.`
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
console.log(`Creating new object at path '${reducedPath}' in bucket '${this.name}'.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const command = new plugins.s3.PutObjectCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Key: reducedPath,
|
||||||
|
Body: optionsArg.contents,
|
||||||
|
});
|
||||||
|
await this.smartbucketRef.s3Client.send(command);
|
||||||
|
|
||||||
|
console.log(`Object '${reducedPath}' has been successfully stored in bucket '${this.name}'.`);
|
||||||
|
const parsedPath = plugins.path.parse(reducedPath);
|
||||||
|
return new File({
|
||||||
|
directoryRefArg: await this.getDirectoryFromPath({
|
||||||
|
path: parsedPath.dir,
|
||||||
|
}),
|
||||||
|
fileName: parsedPath.base,
|
||||||
|
});
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
`Error storing object at path '${optionsArg.path}' in bucket '${this.name}':`,
|
||||||
|
error
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get file
|
||||||
|
*/
|
||||||
|
public async fastGet(optionsArg: { path: string }): Promise<Buffer> {
|
||||||
|
const done = plugins.smartpromise.defer();
|
||||||
|
let completeFile: Buffer;
|
||||||
|
const replaySubject = await this.fastGetReplaySubject(optionsArg);
|
||||||
|
const subscription = replaySubject.subscribe({
|
||||||
|
next: (chunk) => {
|
||||||
|
if (completeFile) {
|
||||||
|
completeFile = Buffer.concat([completeFile, chunk]);
|
||||||
|
} else {
|
||||||
|
completeFile = chunk;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
complete: () => {
|
||||||
|
done.resolve();
|
||||||
|
subscription.unsubscribe();
|
||||||
|
},
|
||||||
|
error: (err) => {
|
||||||
|
console.log(err);
|
||||||
|
},
|
||||||
|
});
|
||||||
|
await done.promise;
|
||||||
|
return completeFile;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* good when time to first byte is important
|
||||||
|
* and multiple subscribers are expected
|
||||||
|
* @param optionsArg
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
public async fastGetReplaySubject(optionsArg: {
|
||||||
|
path: string;
|
||||||
|
}): Promise<plugins.smartrx.rxjs.ReplaySubject<Buffer>> {
|
||||||
|
const command = new plugins.s3.GetObjectCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Key: optionsArg.path,
|
||||||
|
});
|
||||||
|
const response = await this.smartbucketRef.s3Client.send(command);
|
||||||
|
const replaySubject = new plugins.smartrx.rxjs.ReplaySubject<Buffer>();
|
||||||
|
|
||||||
|
// Convert the stream to a format that supports piping
|
||||||
|
const stream = response.Body as any; // SdkStreamMixin includes readable stream
|
||||||
|
if (typeof stream.pipe === 'function') {
|
||||||
|
const duplexStream = new plugins.smartstream.SmartDuplex<Buffer, void>({
|
||||||
|
writeFunction: async (chunk) => {
|
||||||
|
replaySubject.next(chunk);
|
||||||
|
return;
|
||||||
|
},
|
||||||
|
finalFunction: async (cb) => {
|
||||||
|
replaySubject.complete();
|
||||||
|
return;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
stream.pipe(duplexStream);
|
||||||
|
}
|
||||||
|
|
||||||
|
return replaySubject;
|
||||||
|
}
|
||||||
|
|
||||||
|
public fastGetStream(
|
||||||
|
optionsArg: {
|
||||||
|
path: string;
|
||||||
|
},
|
||||||
|
typeArg: 'webstream'
|
||||||
|
): Promise<ReadableStream>;
|
||||||
|
public async fastGetStream(
|
||||||
|
optionsArg: {
|
||||||
|
path: string;
|
||||||
|
},
|
||||||
|
typeArg: 'nodestream'
|
||||||
|
): Promise<plugins.stream.Readable>;
|
||||||
|
|
||||||
|
public async fastGetStream(
|
||||||
|
optionsArg: { path: string },
|
||||||
|
typeArg: 'webstream' | 'nodestream' = 'nodestream'
|
||||||
|
): Promise<ReadableStream | plugins.stream.Readable> {
|
||||||
|
const command = new plugins.s3.GetObjectCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Key: optionsArg.path,
|
||||||
|
});
|
||||||
|
const response = await this.smartbucketRef.s3Client.send(command);
|
||||||
|
const stream = response.Body as any; // SdkStreamMixin includes readable stream
|
||||||
|
|
||||||
|
const duplexStream = new plugins.smartstream.SmartDuplex<Buffer, Buffer>({
|
||||||
|
writeFunction: async (chunk) => {
|
||||||
|
return chunk;
|
||||||
|
},
|
||||||
|
finalFunction: async (cb) => {
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
if (typeof stream.pipe === 'function') {
|
||||||
|
stream.pipe(duplexStream);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeArg === 'nodestream') {
|
||||||
|
return duplexStream;
|
||||||
|
}
|
||||||
|
if (typeArg === 'webstream') {
|
||||||
|
return (await duplexStream.getWebStreams()).readable;
|
||||||
|
}
|
||||||
|
throw new Error('unknown typeArg');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* store file as stream
|
||||||
|
*/
|
||||||
|
public async fastPutStream(optionsArg: {
|
||||||
|
path: string;
|
||||||
|
readableStream: plugins.stream.Readable | ReadableStream;
|
||||||
|
nativeMetadata?: { [key: string]: string };
|
||||||
|
overwrite?: boolean;
|
||||||
|
}): Promise<void> {
|
||||||
|
try {
|
||||||
|
const exists = await this.fastExists({ path: optionsArg.path });
|
||||||
|
|
||||||
|
if (exists && !optionsArg.overwrite) {
|
||||||
|
console.error(
|
||||||
|
`Object already exists at path '${optionsArg.path}' in bucket '${this.name}'.`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
} else if (exists && optionsArg.overwrite) {
|
||||||
|
console.log(
|
||||||
|
`Overwriting existing object at path '${optionsArg.path}' in bucket '${this.name}'.`
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
console.log(`Creating new object at path '${optionsArg.path}' in bucket '${this.name}'.`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const command = new plugins.s3.PutObjectCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Key: optionsArg.path,
|
||||||
|
Body: optionsArg.readableStream,
|
||||||
|
Metadata: optionsArg.nativeMetadata,
|
||||||
|
});
|
||||||
|
await this.smartbucketRef.s3Client.send(command);
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`Object '${optionsArg.path}' has been successfully stored in bucket '${this.name}'.`
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
`Error storing object at path '${optionsArg.path}' in bucket '${this.name}':`,
|
||||||
|
error
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async fastCopy(optionsArg: {
|
||||||
|
sourcePath: string;
|
||||||
|
destinationPath?: string;
|
||||||
|
targetBucket?: Bucket;
|
||||||
|
nativeMetadata?: { [key: string]: string };
|
||||||
|
deleteExistingNativeMetadata?: boolean;
|
||||||
|
}): Promise<void> {
|
||||||
|
try {
|
||||||
|
const targetBucketName = optionsArg.targetBucket ? optionsArg.targetBucket.name : this.name;
|
||||||
|
|
||||||
|
// Retrieve current object information to use in copy conditions
|
||||||
|
const currentObjInfo = await this.smartbucketRef.s3Client.send(
|
||||||
|
new plugins.s3.HeadObjectCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Key: optionsArg.sourcePath,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
|
||||||
|
// Prepare new metadata
|
||||||
|
const newNativeMetadata = {
|
||||||
|
...(optionsArg.deleteExistingNativeMetadata ? {} : currentObjInfo.Metadata),
|
||||||
|
...optionsArg.nativeMetadata,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Define the copy operation
|
||||||
|
const copySource = `${this.name}/${optionsArg.sourcePath}`;
|
||||||
|
const command = new plugins.s3.CopyObjectCommand({
|
||||||
|
Bucket: targetBucketName,
|
||||||
|
CopySource: copySource,
|
||||||
|
Key: optionsArg.destinationPath || optionsArg.sourcePath,
|
||||||
|
Metadata: newNativeMetadata,
|
||||||
|
MetadataDirective: optionsArg.deleteExistingNativeMetadata ? 'REPLACE' : 'COPY',
|
||||||
|
});
|
||||||
|
await this.smartbucketRef.s3Client.send(command);
|
||||||
|
} catch (err) {
|
||||||
|
console.error('Error updating metadata:', err);
|
||||||
|
throw err; // rethrow to allow caller to handle
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Move object from one path to another within the same bucket or to another bucket
|
||||||
|
*/
|
||||||
|
public async fastMove(optionsArg: {
|
||||||
|
sourcePath: string;
|
||||||
|
destinationPath: string;
|
||||||
|
targetBucket?: Bucket;
|
||||||
|
overwrite?: boolean;
|
||||||
|
}): Promise<void> {
|
||||||
|
try {
|
||||||
|
const destinationBucket = optionsArg.targetBucket || this;
|
||||||
|
const exists = await destinationBucket.fastExists({
|
||||||
|
path: optionsArg.destinationPath,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (exists && !optionsArg.overwrite) {
|
||||||
|
console.error(
|
||||||
|
`Object already exists at destination path '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`
|
||||||
|
);
|
||||||
|
return;
|
||||||
|
} else if (exists && optionsArg.overwrite) {
|
||||||
|
console.log(
|
||||||
|
`Overwriting existing object at destination path '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
console.log(
|
||||||
|
`Moving object to path '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.fastCopy(optionsArg);
|
||||||
|
await this.fastRemove({ path: optionsArg.sourcePath });
|
||||||
|
|
||||||
|
console.log(
|
||||||
|
`Object '${optionsArg.sourcePath}' has been successfully moved to '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
`Error moving object from '${optionsArg.sourcePath}' to '${optionsArg.destinationPath}':`,
|
||||||
|
error
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* removeObject
|
||||||
|
*/
|
||||||
|
public async fastRemove(optionsArg: { path: string }) {
|
||||||
|
const command = new plugins.s3.DeleteObjectCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Key: optionsArg.path,
|
||||||
|
});
|
||||||
|
await this.smartbucketRef.s3Client.send(command);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* check whether file exists
|
||||||
|
* @param optionsArg
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
public async fastExists(optionsArg: { path: string }): Promise<boolean> {
|
||||||
|
try {
|
||||||
|
const command = new plugins.s3.HeadObjectCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Key: optionsArg.path,
|
||||||
|
});
|
||||||
|
await this.smartbucketRef.s3Client.send(command);
|
||||||
|
console.log(`Object '${optionsArg.path}' exists in bucket '${this.name}'.`);
|
||||||
|
return true;
|
||||||
|
} catch (error) {
|
||||||
|
if (error.name === 'NotFound') {
|
||||||
|
console.log(`Object '${optionsArg.path}' does not exist in bucket '${this.name}'.`);
|
||||||
|
return false;
|
||||||
|
} else {
|
||||||
|
console.error('Error checking object existence:', error);
|
||||||
|
throw error; // Rethrow if it's not a NotFound error to handle unexpected issues
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* deletes this bucket
|
||||||
|
*/
|
||||||
|
public async delete() {
|
||||||
|
await this.smartbucketRef.s3Client.send(
|
||||||
|
new plugins.s3.DeleteBucketCommand({ Bucket: this.name })
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async fastStat(pathDescriptor: interfaces.IPathDecriptor) {
|
||||||
|
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||||
|
const command = new plugins.s3.HeadObjectCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Key: checkPath,
|
||||||
|
});
|
||||||
|
return this.smartbucketRef.s3Client.send(command);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async isDirectory(pathDescriptor: interfaces.IPathDecriptor): Promise<boolean> {
|
||||||
|
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||||
|
const command = new plugins.s3.ListObjectsV2Command({
|
||||||
|
Bucket: this.name,
|
||||||
|
Prefix: checkPath,
|
||||||
|
Delimiter: '/',
|
||||||
|
});
|
||||||
|
const { CommonPrefixes } = await this.smartbucketRef.s3Client.send(command);
|
||||||
|
return !!CommonPrefixes && CommonPrefixes.length > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async isFile(pathDescriptor: interfaces.IPathDecriptor): Promise<boolean> {
|
||||||
|
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||||
|
const command = new plugins.s3.ListObjectsV2Command({
|
||||||
|
Bucket: this.name,
|
||||||
|
Prefix: checkPath,
|
||||||
|
Delimiter: '/',
|
||||||
|
});
|
||||||
|
const { Contents } = await this.smartbucketRef.s3Client.send(command);
|
||||||
|
return !!Contents && Contents.length > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getMagicBytes(optionsArg: { path: string; length: number }): Promise<Buffer> {
|
||||||
|
try {
|
||||||
|
const command = new plugins.s3.GetObjectCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Key: optionsArg.path,
|
||||||
|
Range: `bytes=0-${optionsArg.length - 1}`,
|
||||||
|
});
|
||||||
|
const response = await this.smartbucketRef.s3Client.send(command);
|
||||||
|
const chunks = [];
|
||||||
|
const stream = response.Body as any; // SdkStreamMixin includes readable stream
|
||||||
|
|
||||||
|
for await (const chunk of stream) {
|
||||||
|
chunks.push(chunk);
|
||||||
|
}
|
||||||
|
return Buffer.concat(chunks);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
`Error retrieving magic bytes from object at path '${optionsArg.path}' in bucket '${this.name}':`,
|
||||||
|
error
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async cleanAllContents(): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Define the command type explicitly
|
||||||
|
const listCommandInput: plugins.s3.ListObjectsV2CommandInput = {
|
||||||
|
Bucket: this.name,
|
||||||
|
};
|
||||||
|
|
||||||
|
let isTruncated = true;
|
||||||
|
let continuationToken: string | undefined = undefined;
|
||||||
|
|
||||||
|
while (isTruncated) {
|
||||||
|
// Add the continuation token to the input if present
|
||||||
|
const listCommand = new plugins.s3.ListObjectsV2Command({
|
||||||
|
...listCommandInput,
|
||||||
|
ContinuationToken: continuationToken,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Explicitly type the response
|
||||||
|
const response: plugins.s3.ListObjectsV2Output =
|
||||||
|
await this.smartbucketRef.s3Client.send(listCommand);
|
||||||
|
|
||||||
|
console.log(`Cleaning contents of bucket '${this.name}': Now deleting ${response.Contents?.length} items...`);
|
||||||
|
|
||||||
|
if (response.Contents && response.Contents.length > 0) {
|
||||||
|
// Delete objects in batches, mapping each item to { Key: string }
|
||||||
|
const deleteCommand = new plugins.s3.DeleteObjectsCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Delete: {
|
||||||
|
Objects: response.Contents.map((item) => ({ Key: item.Key! })),
|
||||||
|
Quiet: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.smartbucketRef.s3Client.send(deleteCommand);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update continuation token and truncation status
|
||||||
|
isTruncated = response.IsTruncated || false;
|
||||||
|
continuationToken = response.NextContinuationToken;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`All contents in bucket '${this.name}' have been deleted.`);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error cleaning contents of bucket '${this.name}':`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
371
ts/classes.directory.ts
Normal file
371
ts/classes.directory.ts
Normal file
@ -0,0 +1,371 @@
|
|||||||
|
// classes.directory.ts
|
||||||
|
|
||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import { Bucket } from './classes.bucket.js';
|
||||||
|
import { File } from './classes.file.js';
|
||||||
|
import * as helpers from './helpers.js';
|
||||||
|
|
||||||
|
export class Directory {
|
||||||
|
public bucketRef: Bucket;
|
||||||
|
public parentDirectoryRef: Directory;
|
||||||
|
public name: string;
|
||||||
|
|
||||||
|
public tree: string[];
|
||||||
|
public files: string[];
|
||||||
|
public folders: string[];
|
||||||
|
|
||||||
|
constructor(bucketRefArg: Bucket, parentDirectory: Directory, name: string) {
|
||||||
|
this.bucketRef = bucketRefArg;
|
||||||
|
this.parentDirectoryRef = parentDirectory;
|
||||||
|
this.name = name;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* returns an array of parent directories
|
||||||
|
*/
|
||||||
|
public getParentDirectories(): Directory[] {
|
||||||
|
let parentDirectories: Directory[] = [];
|
||||||
|
if (this.parentDirectoryRef) {
|
||||||
|
parentDirectories.push(this.parentDirectoryRef);
|
||||||
|
parentDirectories = parentDirectories.concat(this.parentDirectoryRef.getParentDirectories());
|
||||||
|
}
|
||||||
|
return parentDirectories;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* returns the directory level
|
||||||
|
*/
|
||||||
|
public getDirectoryLevel(): number {
|
||||||
|
return this.getParentDirectories().length;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* updates the base path
|
||||||
|
*/
|
||||||
|
public getBasePath(): string {
|
||||||
|
const parentDirectories = this.getParentDirectories();
|
||||||
|
let basePath = '';
|
||||||
|
for (const parentDir of parentDirectories) {
|
||||||
|
if (!parentDir.name && !basePath) {
|
||||||
|
basePath = this.name + '/';
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (parentDir.name && !basePath) {
|
||||||
|
basePath = parentDir.name + '/' + this.name + '/';
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (parentDir.name && basePath) {
|
||||||
|
basePath = parentDir.name + '/' + basePath;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return basePath;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gets a file by name
|
||||||
|
*/
|
||||||
|
public async getFile(optionsArg: {
|
||||||
|
path: string;
|
||||||
|
createWithContents?: string | Buffer;
|
||||||
|
getFromTrash?: boolean;
|
||||||
|
}): Promise<File | null> {
|
||||||
|
const pathDescriptor = {
|
||||||
|
directory: this,
|
||||||
|
path: optionsArg.path,
|
||||||
|
};
|
||||||
|
const exists = await this.bucketRef.fastExists({
|
||||||
|
path: await helpers.reducePathDescriptorToPath(pathDescriptor),
|
||||||
|
});
|
||||||
|
if (!exists && optionsArg.getFromTrash) {
|
||||||
|
const trash = await this.bucketRef.getTrash();
|
||||||
|
const trashedFile = await trash.getTrashedFileByOriginalName(pathDescriptor);
|
||||||
|
return trashedFile;
|
||||||
|
}
|
||||||
|
if (!exists && !optionsArg.createWithContents) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
if (!exists && optionsArg.createWithContents) {
|
||||||
|
await File.create({
|
||||||
|
directory: this,
|
||||||
|
name: optionsArg.path,
|
||||||
|
contents: optionsArg.createWithContents,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return new File({
|
||||||
|
directoryRefArg: this,
|
||||||
|
fileName: optionsArg.path,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gets a file strictly
|
||||||
|
* @param args
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
public async getFileStrict(...args: Parameters<Directory['getFile']>) {
|
||||||
|
const file = await this.getFile(...args);
|
||||||
|
if (!file) {
|
||||||
|
throw new Error(`File not found at path '${args[0].path}'`);
|
||||||
|
}
|
||||||
|
return file;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* lists all files
|
||||||
|
*/
|
||||||
|
public async listFiles(): Promise<File[]> {
|
||||||
|
const command = new plugins.s3.ListObjectsV2Command({
|
||||||
|
Bucket: this.bucketRef.name,
|
||||||
|
Prefix: this.getBasePath(),
|
||||||
|
Delimiter: '/',
|
||||||
|
});
|
||||||
|
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
|
||||||
|
const fileArray: File[] = [];
|
||||||
|
|
||||||
|
response.Contents?.forEach((item) => {
|
||||||
|
if (item.Key && !item.Key.endsWith('/')) {
|
||||||
|
const subtractedPath = item.Key.replace(this.getBasePath(), '');
|
||||||
|
if (!subtractedPath.includes('/')) {
|
||||||
|
fileArray.push(
|
||||||
|
new File({
|
||||||
|
directoryRefArg: this,
|
||||||
|
fileName: subtractedPath,
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return fileArray;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* lists all folders
|
||||||
|
*/
|
||||||
|
public async listDirectories(): Promise<Directory[]> {
|
||||||
|
try {
|
||||||
|
const command = new plugins.s3.ListObjectsV2Command({
|
||||||
|
Bucket: this.bucketRef.name,
|
||||||
|
Prefix: this.getBasePath(),
|
||||||
|
Delimiter: '/',
|
||||||
|
});
|
||||||
|
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
|
||||||
|
const directoryArray: Directory[] = [];
|
||||||
|
|
||||||
|
if (response.CommonPrefixes) {
|
||||||
|
response.CommonPrefixes.forEach((item) => {
|
||||||
|
if (item.Prefix) {
|
||||||
|
const subtractedPath = item.Prefix.replace(this.getBasePath(), '');
|
||||||
|
if (subtractedPath.endsWith('/')) {
|
||||||
|
const dirName = subtractedPath.slice(0, -1);
|
||||||
|
// Ensure the directory name is not empty (which would indicate the base directory itself)
|
||||||
|
if (dirName) {
|
||||||
|
directoryArray.push(new Directory(this.bucketRef, this, dirName));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return directoryArray;
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error listing directories:', error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gets an array that has all objects with a certain prefix
|
||||||
|
*/
|
||||||
|
public async getTreeArray() {
|
||||||
|
const command = new plugins.s3.ListObjectsV2Command({
|
||||||
|
Bucket: this.bucketRef.name,
|
||||||
|
Prefix: this.getBasePath(),
|
||||||
|
Delimiter: '/',
|
||||||
|
});
|
||||||
|
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
|
||||||
|
return response.Contents;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gets a sub directory by name
|
||||||
|
*/
|
||||||
|
public async getSubDirectoryByName(dirNameArg: string, optionsArg: {
|
||||||
|
getEmptyDirectory?: boolean;
|
||||||
|
createWithInitializerFile?: boolean;
|
||||||
|
} = {}): Promise<Directory | null> {
|
||||||
|
const dirNameArray = dirNameArg.split('/').filter(str => str.trim() !== "");
|
||||||
|
|
||||||
|
optionsArg = {
|
||||||
|
getEmptyDirectory: false,
|
||||||
|
createWithInitializerFile: false,
|
||||||
|
...optionsArg,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
const getDirectory = async (directoryArg: Directory, dirNameToSearch: string, isFinalDirectory: boolean) => {
|
||||||
|
const directories = await directoryArg.listDirectories();
|
||||||
|
let returnDirectory = directories.find((directory) => {
|
||||||
|
return directory.name === dirNameToSearch;
|
||||||
|
});
|
||||||
|
if (returnDirectory) {
|
||||||
|
return returnDirectory;
|
||||||
|
}
|
||||||
|
if (optionsArg.getEmptyDirectory || optionsArg.createWithInitializerFile) {
|
||||||
|
returnDirectory = new Directory(this.bucketRef, this, dirNameToSearch);
|
||||||
|
}
|
||||||
|
if (isFinalDirectory && optionsArg.createWithInitializerFile) {
|
||||||
|
returnDirectory?.createEmptyFile('00init.txt');
|
||||||
|
}
|
||||||
|
return returnDirectory || null;
|
||||||
|
};
|
||||||
|
|
||||||
|
let wantedDirectory: Directory | null = null;
|
||||||
|
let counter = 0;
|
||||||
|
for (const dirNameToSearch of dirNameArray) {
|
||||||
|
counter++;
|
||||||
|
const directoryToSearchIn = wantedDirectory ? wantedDirectory : this;
|
||||||
|
wantedDirectory = await getDirectory(directoryToSearchIn, dirNameToSearch, counter === dirNameArray.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
return wantedDirectory || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getSubDirectoryByNameStrict(...args: Parameters<Directory['getSubDirectoryByName']>) {
|
||||||
|
const directory = await this.getSubDirectoryByName(...args);
|
||||||
|
if (!directory) {
|
||||||
|
throw new Error(`Directory not found at path '${args[0]}'`);
|
||||||
|
}
|
||||||
|
return directory;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* moves the directory
|
||||||
|
*/
|
||||||
|
public async move() {
|
||||||
|
// TODO
|
||||||
|
throw new Error('Moving a directory is not yet implemented');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* creates an empty file within this directory
|
||||||
|
* @param relativePathArg
|
||||||
|
*/
|
||||||
|
public async createEmptyFile(relativePathArg: string) {
|
||||||
|
const emptyFile = await File.create({
|
||||||
|
directory: this,
|
||||||
|
name: relativePathArg,
|
||||||
|
contents: '',
|
||||||
|
});
|
||||||
|
return emptyFile;
|
||||||
|
}
|
||||||
|
|
||||||
|
// file operations
|
||||||
|
public async fastPut(optionsArg: { path: string; contents: string | Buffer }) {
|
||||||
|
const path = plugins.path.join(this.getBasePath(), optionsArg.path);
|
||||||
|
await this.bucketRef.fastPut({
|
||||||
|
path,
|
||||||
|
contents: optionsArg.contents,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public async fastGet(optionsArg: { path: string }) {
|
||||||
|
const path = plugins.path.join(this.getBasePath(), optionsArg.path);
|
||||||
|
const result = await this.bucketRef.fastGet({
|
||||||
|
path,
|
||||||
|
});
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
public fastGetStream(
|
||||||
|
optionsArg: {
|
||||||
|
path: string;
|
||||||
|
},
|
||||||
|
typeArg: 'webstream'
|
||||||
|
): Promise<ReadableStream>;
|
||||||
|
public async fastGetStream(
|
||||||
|
optionsArg: {
|
||||||
|
path: string;
|
||||||
|
},
|
||||||
|
typeArg: 'nodestream'
|
||||||
|
): Promise<plugins.stream.Readable>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* fastGetStream
|
||||||
|
* @param optionsArg
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
public async fastGetStream(
|
||||||
|
optionsArg: { path: string },
|
||||||
|
typeArg: 'webstream' | 'nodestream'
|
||||||
|
): Promise<ReadableStream | plugins.stream.Readable> {
|
||||||
|
const path = plugins.path.join(this.getBasePath(), optionsArg.path);
|
||||||
|
const result = await this.bucketRef.fastGetStream(
|
||||||
|
{
|
||||||
|
path,
|
||||||
|
},
|
||||||
|
typeArg as any
|
||||||
|
);
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* fast put stream
|
||||||
|
*/
|
||||||
|
public async fastPutStream(optionsArg: {
|
||||||
|
path: string;
|
||||||
|
stream: plugins.stream.Readable;
|
||||||
|
}): Promise<void> {
|
||||||
|
const path = plugins.path.join(this.getBasePath(), optionsArg.path);
|
||||||
|
await this.bucketRef.fastPutStream({
|
||||||
|
path,
|
||||||
|
readableStream: optionsArg.stream,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* removes a file within the directory
|
||||||
|
* uses file class to make sure effects for metadata etc. are handled correctly
|
||||||
|
* @param optionsArg
|
||||||
|
*/
|
||||||
|
public async fastRemove(optionsArg: {
|
||||||
|
path: string
|
||||||
|
/**
|
||||||
|
* wether the file should be placed into trash. Default is false.
|
||||||
|
*/
|
||||||
|
mode?: 'permanent' | 'trash';
|
||||||
|
}) {
|
||||||
|
const file = await this.getFile({
|
||||||
|
path: optionsArg.path,
|
||||||
|
});
|
||||||
|
await file.delete({
|
||||||
|
mode: optionsArg.mode ? optionsArg.mode : 'permanent',
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* deletes the directory with all its contents
|
||||||
|
*/
|
||||||
|
public async delete(optionsArg: {
|
||||||
|
mode?: 'permanent' | 'trash';
|
||||||
|
}) {
|
||||||
|
const deleteDirectory = async (directoryArg: Directory) => {
|
||||||
|
const childDirectories = await directoryArg.listDirectories();
|
||||||
|
if (childDirectories.length === 0) {
|
||||||
|
console.log('Directory empty! Path complete!');
|
||||||
|
} else {
|
||||||
|
for (const childDir of childDirectories) {
|
||||||
|
await deleteDirectory(childDir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const files = await directoryArg.listFiles();
|
||||||
|
for (const file of files) {
|
||||||
|
await file.delete({
|
||||||
|
mode: optionsArg.mode ? optionsArg.mode : 'permanent',
|
||||||
|
})
|
||||||
|
}
|
||||||
|
};
|
||||||
|
await deleteDirectory(this);
|
||||||
|
}
|
||||||
|
}
|
273
ts/classes.file.ts
Normal file
273
ts/classes.file.ts
Normal file
@ -0,0 +1,273 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import * as helpers from './helpers.js';
|
||||||
|
import * as interfaces from './interfaces.js';
|
||||||
|
import { Directory } from './classes.directory.js';
|
||||||
|
import { MetaData } from './classes.metadata.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* represents a file in a directory
|
||||||
|
*/
|
||||||
|
export class File {
|
||||||
|
// STATIC
|
||||||
|
|
||||||
|
/**
|
||||||
|
* creates a file in draft mode
|
||||||
|
* you need to call .save() to store it in s3
|
||||||
|
* @param optionsArg
|
||||||
|
*/
|
||||||
|
public static async create(optionsArg: {
|
||||||
|
directory: Directory;
|
||||||
|
name: string;
|
||||||
|
contents: Buffer | string | plugins.stream.Readable;
|
||||||
|
/**
|
||||||
|
* if contents are of type string, you can specify the encoding here
|
||||||
|
*/
|
||||||
|
encoding?: 'utf8' | 'binary';
|
||||||
|
}): Promise<File> {
|
||||||
|
const contents =
|
||||||
|
typeof optionsArg.contents === 'string'
|
||||||
|
? Buffer.from(optionsArg.contents, optionsArg.encoding)
|
||||||
|
: optionsArg.contents;
|
||||||
|
const file = new File({
|
||||||
|
directoryRefArg: optionsArg.directory,
|
||||||
|
fileName: optionsArg.name,
|
||||||
|
});
|
||||||
|
if (contents instanceof plugins.stream.Readable) {
|
||||||
|
await optionsArg.directory.fastPutStream({
|
||||||
|
path: optionsArg.name,
|
||||||
|
stream: contents,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await optionsArg.directory.fastPut({
|
||||||
|
path: optionsArg.name,
|
||||||
|
contents: contents,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return file;
|
||||||
|
}
|
||||||
|
|
||||||
|
// INSTANCE
|
||||||
|
public parentDirectoryRef: Directory;
|
||||||
|
public name: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get the full path to the file
|
||||||
|
* @returns the full path to the file
|
||||||
|
*/
|
||||||
|
public getBasePath(): string {
|
||||||
|
return plugins.path.join(this.parentDirectoryRef.getBasePath(), this.name);
|
||||||
|
}
|
||||||
|
|
||||||
|
constructor(optionsArg: { directoryRefArg: Directory; fileName: string }) {
|
||||||
|
this.parentDirectoryRef = optionsArg.directoryRefArg;
|
||||||
|
this.name = optionsArg.fileName;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getContentsAsString(): Promise<string> {
|
||||||
|
const fileBuffer = await this.getContents();
|
||||||
|
return fileBuffer.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getContents(): Promise<Buffer> {
|
||||||
|
const resultBuffer = await this.parentDirectoryRef.bucketRef.fastGet({
|
||||||
|
path: this.getBasePath(),
|
||||||
|
});
|
||||||
|
return resultBuffer;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getReadStream(typeArg: 'webstream'): Promise<ReadableStream>;
|
||||||
|
public async getReadStream(typeArg: 'nodestream'): Promise<plugins.stream.Readable>;
|
||||||
|
public async getReadStream(
|
||||||
|
typeArg: 'nodestream' | 'webstream'
|
||||||
|
): Promise<ReadableStream | plugins.stream.Readable> {
|
||||||
|
const readStream = this.parentDirectoryRef.bucketRef.fastGetStream(
|
||||||
|
{
|
||||||
|
path: this.getBasePath(),
|
||||||
|
},
|
||||||
|
typeArg as any
|
||||||
|
);
|
||||||
|
return readStream;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* deletes this file
|
||||||
|
*/
|
||||||
|
public async delete(optionsArg?: { mode: 'trash' | 'permanent' }) {
|
||||||
|
optionsArg = {
|
||||||
|
...{
|
||||||
|
mode: 'permanent',
|
||||||
|
},
|
||||||
|
...optionsArg,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (optionsArg.mode === 'permanent') {
|
||||||
|
await this.parentDirectoryRef.bucketRef.fastRemove({
|
||||||
|
path: this.getBasePath(),
|
||||||
|
});
|
||||||
|
if (!this.name.endsWith('.metadata')) {
|
||||||
|
if (await this.hasMetaData()) {
|
||||||
|
const metadata = await this.getMetaData();
|
||||||
|
await metadata.metadataFile.delete(optionsArg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else if (optionsArg.mode === 'trash') {
|
||||||
|
const metadata = await this.getMetaData();
|
||||||
|
await metadata.storeCustomMetaData({
|
||||||
|
key: 'recycle',
|
||||||
|
value: {
|
||||||
|
deletedAt: Date.now(),
|
||||||
|
originalPath: this.getBasePath(),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
const trash = await this.parentDirectoryRef.bucketRef.getTrash();
|
||||||
|
const trashDir = await trash.getTrashDir();
|
||||||
|
await this.move({
|
||||||
|
directory: trashDir,
|
||||||
|
path: await trash.getTrashKeyByOriginalBasePath(this.getBasePath()),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.parentDirectoryRef.listFiles();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* allows locking the file
|
||||||
|
* @param optionsArg
|
||||||
|
*/
|
||||||
|
public async lock(optionsArg?: { timeoutMillis?: number }) {
|
||||||
|
const metadata = await this.getMetaData();
|
||||||
|
await metadata.setLock({
|
||||||
|
lock: 'locked',
|
||||||
|
expires: Date.now() + (optionsArg?.timeoutMillis || 1000),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* actively unlocks a file
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
public async unlock(optionsArg?: {
|
||||||
|
/**
|
||||||
|
* unlock the file even if not locked from this instance
|
||||||
|
*/
|
||||||
|
force?: boolean;
|
||||||
|
}) {
|
||||||
|
const metadata = await this.getMetaData();
|
||||||
|
await metadata.removeLock({
|
||||||
|
force: optionsArg?.force,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public async updateWithContents(optionsArg: {
|
||||||
|
contents: Buffer | string | plugins.stream.Readable | ReadableStream;
|
||||||
|
encoding?: 'utf8' | 'binary';
|
||||||
|
}) {
|
||||||
|
if (
|
||||||
|
optionsArg.contents instanceof plugins.stream.Readable ||
|
||||||
|
optionsArg.contents instanceof ReadableStream
|
||||||
|
) {
|
||||||
|
await this.parentDirectoryRef.bucketRef.fastPutStream({
|
||||||
|
path: this.getBasePath(),
|
||||||
|
readableStream: optionsArg.contents,
|
||||||
|
overwrite: true,
|
||||||
|
});
|
||||||
|
} else if (Buffer.isBuffer(optionsArg.contents)) {
|
||||||
|
await this.parentDirectoryRef.bucketRef.fastPut({
|
||||||
|
path: this.getBasePath(),
|
||||||
|
contents: optionsArg.contents,
|
||||||
|
overwrite: true,
|
||||||
|
});
|
||||||
|
} else if (typeof optionsArg.contents === 'string') {
|
||||||
|
await this.parentDirectoryRef.bucketRef.fastPut({
|
||||||
|
path: this.getBasePath(),
|
||||||
|
contents: Buffer.from(optionsArg.contents, optionsArg.encoding),
|
||||||
|
overwrite: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* moves the file to another directory
|
||||||
|
*/
|
||||||
|
public async move(pathDescriptorArg: interfaces.IPathDecriptor) {
|
||||||
|
let moveToPath: string = '';
|
||||||
|
const isDirectory = await this.parentDirectoryRef.bucketRef.isDirectory(pathDescriptorArg);
|
||||||
|
if (isDirectory) {
|
||||||
|
moveToPath = await helpers.reducePathDescriptorToPath({
|
||||||
|
...pathDescriptorArg,
|
||||||
|
path: plugins.path.join(pathDescriptorArg.path!, this.name),
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
moveToPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
|
||||||
|
}
|
||||||
|
// lets move the file
|
||||||
|
await this.parentDirectoryRef.bucketRef.fastMove({
|
||||||
|
sourcePath: this.getBasePath(),
|
||||||
|
destinationPath: moveToPath,
|
||||||
|
overwrite: true,
|
||||||
|
});
|
||||||
|
// lets move the metadatafile
|
||||||
|
if (!this.name.endsWith('.metadata')) {
|
||||||
|
const metadata = await this.getMetaData();
|
||||||
|
await this.parentDirectoryRef.bucketRef.fastMove({
|
||||||
|
sourcePath: metadata.metadataFile.getBasePath(),
|
||||||
|
destinationPath: moveToPath + '.metadata',
|
||||||
|
overwrite: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// lets update references of this
|
||||||
|
const baseDirectory = await this.parentDirectoryRef.bucketRef.getBaseDirectory();
|
||||||
|
this.parentDirectoryRef = await baseDirectory.getSubDirectoryByNameStrict(
|
||||||
|
pathDescriptorArg.directory?.getBasePath()!
|
||||||
|
);
|
||||||
|
this.name = pathDescriptorArg.path!;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async hasMetaData(): Promise<boolean> {
|
||||||
|
if (!this.name.endsWith('.metadata')) {
|
||||||
|
const hasMetadataBool = MetaData.hasMetaData({
|
||||||
|
file: this,
|
||||||
|
});
|
||||||
|
return hasMetadataBool;
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* allows updating the metadata of a file
|
||||||
|
* @param updatedMetadata
|
||||||
|
*/
|
||||||
|
public async getMetaData() {
|
||||||
|
if (this.name.endsWith('.metadata')) {
|
||||||
|
throw new Error('metadata files cannot have metadata');
|
||||||
|
}
|
||||||
|
const metadata = await MetaData.createForFile({
|
||||||
|
file: this,
|
||||||
|
});
|
||||||
|
return metadata;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gets the contents as json
|
||||||
|
*/
|
||||||
|
public async getJsonData() {
|
||||||
|
const json = await this.getContentsAsString();
|
||||||
|
const parsed = await JSON.parse(json);
|
||||||
|
return parsed;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async writeJsonData(dataArg: any) {
|
||||||
|
await this.updateWithContents({
|
||||||
|
contents: JSON.stringify(dataArg),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getMagicBytes(optionsArg: { length: number }): Promise<Buffer> {
|
||||||
|
return this.parentDirectoryRef.bucketRef.getMagicBytes({
|
||||||
|
path: this.getBasePath(),
|
||||||
|
length: optionsArg.length,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
122
ts/classes.metadata.ts
Normal file
122
ts/classes.metadata.ts
Normal file
@ -0,0 +1,122 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
|
||||||
|
import { File } from './classes.file.js';
|
||||||
|
|
||||||
|
export class MetaData {
|
||||||
|
public static async hasMetaData(optionsArg: { file: File }) {
|
||||||
|
// lets find the existing metadata file
|
||||||
|
const existingFile = await optionsArg.file.parentDirectoryRef.getFile({
|
||||||
|
path: optionsArg.file.name + '.metadata',
|
||||||
|
});
|
||||||
|
return !!existingFile;
|
||||||
|
}
|
||||||
|
|
||||||
|
// static
|
||||||
|
public static async createForFile(optionsArg: { file: File }) {
|
||||||
|
const metaData = new MetaData();
|
||||||
|
metaData.fileRef = optionsArg.file;
|
||||||
|
|
||||||
|
// lets find the existing metadata file
|
||||||
|
metaData.metadataFile = await metaData.fileRef.parentDirectoryRef.getFileStrict({
|
||||||
|
path: metaData.fileRef.name + '.metadata',
|
||||||
|
createWithContents: '{}',
|
||||||
|
});
|
||||||
|
|
||||||
|
return metaData;
|
||||||
|
}
|
||||||
|
|
||||||
|
// instance
|
||||||
|
/**
|
||||||
|
* the file that contains the metadata
|
||||||
|
*/
|
||||||
|
metadataFile!: File;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* the file that the metadata is for
|
||||||
|
*/
|
||||||
|
fileRef!: File;
|
||||||
|
|
||||||
|
public async getFileType(optionsArg?: {
|
||||||
|
useFileExtension?: boolean;
|
||||||
|
useMagicBytes?: boolean;
|
||||||
|
}): Promise<plugins.smartmime.IFileTypeResult | undefined> {
|
||||||
|
if ((optionsArg && optionsArg.useFileExtension) || !optionsArg) {
|
||||||
|
const fileType = await plugins.smartmime.detectMimeType({
|
||||||
|
path: this.fileRef.name,
|
||||||
|
});
|
||||||
|
|
||||||
|
return fileType;
|
||||||
|
}
|
||||||
|
if (optionsArg && optionsArg.useMagicBytes) {
|
||||||
|
const fileType = await plugins.smartmime.detectMimeType({
|
||||||
|
buffer: await this.fileRef.getMagicBytes({
|
||||||
|
length: 100,
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
return fileType;
|
||||||
|
}
|
||||||
|
throw new Error('optionsArg.useFileExtension and optionsArg.useMagicBytes cannot both be false');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gets the size of the fileRef
|
||||||
|
*/
|
||||||
|
public async getSizeInBytes(): Promise<number> {
|
||||||
|
const stat = await this.fileRef.parentDirectoryRef.bucketRef.fastStat({
|
||||||
|
path: this.fileRef.getBasePath(),
|
||||||
|
});
|
||||||
|
return stat.ContentLength!;
|
||||||
|
}
|
||||||
|
|
||||||
|
private prefixCustomMetaData = 'custom_';
|
||||||
|
|
||||||
|
public async storeCustomMetaData<T = any>(optionsArg: { key: string; value: T }) {
|
||||||
|
const data = await this.metadataFile.getJsonData();
|
||||||
|
data[this.prefixCustomMetaData + optionsArg.key] = optionsArg.value;
|
||||||
|
await this.metadataFile.writeJsonData(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getCustomMetaData<T = any>(optionsArg: { key: string }): Promise<T> {
|
||||||
|
const data = await this.metadataFile.getJsonData();
|
||||||
|
return data[this.prefixCustomMetaData + optionsArg.key];
|
||||||
|
}
|
||||||
|
|
||||||
|
public async deleteCustomMetaData(optionsArg: { key: string }) {
|
||||||
|
const data = await this.metadataFile.getJsonData();
|
||||||
|
delete data[this.prefixCustomMetaData + optionsArg.key];
|
||||||
|
await this.metadataFile.writeJsonData(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* set a lock on the ref file
|
||||||
|
* @param optionsArg
|
||||||
|
*/
|
||||||
|
public async setLock(optionsArg: { lock: string; expires: number }) {
|
||||||
|
const data = await this.metadataFile.getJsonData();
|
||||||
|
data.lock = optionsArg.lock;
|
||||||
|
data.lockExpires = optionsArg.expires;
|
||||||
|
await this.metadataFile.writeJsonData(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* remove the lock on the ref file
|
||||||
|
* @param optionsArg
|
||||||
|
*/
|
||||||
|
public async removeLock(optionsArg: { force: boolean }) {
|
||||||
|
const data = await this.metadataFile.getJsonData();
|
||||||
|
delete data.lock;
|
||||||
|
delete data.lockExpires;
|
||||||
|
await this.metadataFile.writeJsonData(data);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async checkLocked(): Promise<boolean> {
|
||||||
|
const data = await this.metadataFile.getJsonData();
|
||||||
|
return data.lock && data.lockExpires > Date.now();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getLockInfo(): Promise<{ lock: string; expires: number }> {
|
||||||
|
const data = await this.metadataFile.getJsonData();
|
||||||
|
return { lock: data.lock, expires: data.lockExpires };
|
||||||
|
}
|
||||||
|
}
|
55
ts/classes.smartbucket.ts
Normal file
55
ts/classes.smartbucket.ts
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
// classes.smartbucket.ts
|
||||||
|
|
||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import { Bucket } from './classes.bucket.js';
|
||||||
|
|
||||||
|
export class SmartBucket {
|
||||||
|
public config: plugins.tsclass.storage.IS3Descriptor;
|
||||||
|
|
||||||
|
public s3Client: plugins.s3.S3Client;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* the constructor of SmartBucket
|
||||||
|
*/
|
||||||
|
/**
|
||||||
|
* the constructor of SmartBucket
|
||||||
|
*/
|
||||||
|
constructor(configArg: plugins.tsclass.storage.IS3Descriptor) {
|
||||||
|
this.config = configArg;
|
||||||
|
|
||||||
|
const protocol = configArg.useSsl === false ? 'http' : 'https';
|
||||||
|
const port = configArg.port ? `:${configArg.port}` : '';
|
||||||
|
const endpoint = `${protocol}://${configArg.endpoint}${port}`;
|
||||||
|
|
||||||
|
this.s3Client = new plugins.s3.S3Client({
|
||||||
|
endpoint,
|
||||||
|
region: configArg.region || 'us-east-1',
|
||||||
|
credentials: {
|
||||||
|
accessKeyId: configArg.accessKey,
|
||||||
|
secretAccessKey: configArg.accessSecret,
|
||||||
|
},
|
||||||
|
forcePathStyle: true, // Necessary for S3-compatible storage like MinIO or Wasabi
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
public async createBucket(bucketNameArg: string) {
|
||||||
|
const bucket = await Bucket.createBucketByName(this, bucketNameArg);
|
||||||
|
return bucket;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async removeBucket(bucketName: string) {
|
||||||
|
await Bucket.removeBucketByName(this, bucketName);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getBucketByName(bucketNameArg: string) {
|
||||||
|
return Bucket.getBucketByName(this, bucketNameArg);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getBucketByNameStrict(...args: Parameters<SmartBucket['getBucketByName']>) {
|
||||||
|
const bucket = await this.getBucketByName(...args);
|
||||||
|
if (!bucket) {
|
||||||
|
throw new Error(`Bucket ${args[0]} does not exist.`);
|
||||||
|
}
|
||||||
|
return bucket;
|
||||||
|
}
|
||||||
|
}
|
30
ts/classes.trash.ts
Normal file
30
ts/classes.trash.ts
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import * as interfaces from './interfaces.js';
|
||||||
|
import * as helpers from './helpers.js';
|
||||||
|
import type { Bucket } from './classes.bucket.js';
|
||||||
|
import type { Directory } from './classes.directory.js';
|
||||||
|
import type { File } from './classes.file.js';
|
||||||
|
|
||||||
|
|
||||||
|
export class Trash {
|
||||||
|
public bucketRef: Bucket;
|
||||||
|
|
||||||
|
constructor(bucketRefArg: Bucket) {
|
||||||
|
this.bucketRef = bucketRefArg;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getTrashDir() {
|
||||||
|
return this.bucketRef.getDirectoryFromPath({ path: '.trash' });
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getTrashedFileByOriginalName(pathDescriptor: interfaces.IPathDecriptor): Promise<File> {
|
||||||
|
const trashDir = await this.getTrashDir();
|
||||||
|
const originalPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||||
|
const trashKey = await this.getTrashKeyByOriginalBasePath(originalPath);
|
||||||
|
return trashDir.getFileStrict({ path: trashKey });
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getTrashKeyByOriginalBasePath (originalPath: string): Promise<string> {
|
||||||
|
return plugins.smartstring.base64.encode(originalPath);
|
||||||
|
}
|
||||||
|
}
|
22
ts/helpers.ts
Normal file
22
ts/helpers.ts
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
import * as plugins from './plugins.js';
|
||||||
|
import * as interfaces from './interfaces.js';
|
||||||
|
|
||||||
|
export const reducePathDescriptorToPath = async (pathDescriptorArg: interfaces.IPathDecriptor): Promise<string> => {
|
||||||
|
let returnPath = ``
|
||||||
|
if (pathDescriptorArg.directory) {
|
||||||
|
if (pathDescriptorArg.path && plugins.path.isAbsolute(pathDescriptorArg.path)) {
|
||||||
|
console.warn('Directory is being ignored when path is absolute.');
|
||||||
|
returnPath = pathDescriptorArg.path;
|
||||||
|
} else if (pathDescriptorArg.path) {
|
||||||
|
returnPath = plugins.path.join(pathDescriptorArg.directory.getBasePath(), pathDescriptorArg.path);
|
||||||
|
}
|
||||||
|
} else if (pathDescriptorArg.path) {
|
||||||
|
returnPath = pathDescriptorArg.path;
|
||||||
|
} else {
|
||||||
|
throw new Error('You must specify either a path or a directory.');
|
||||||
|
}
|
||||||
|
if (returnPath.startsWith('/')) {
|
||||||
|
returnPath = returnPath.substring(1);
|
||||||
|
}
|
||||||
|
return returnPath;
|
||||||
|
}
|
@ -1,4 +1,4 @@
|
|||||||
export * from './smartbucket.classes.smartbucket.js';
|
export * from './classes.smartbucket.js';
|
||||||
export * from './smartbucket.classes.bucket.js';
|
export * from './classes.bucket.js';
|
||||||
export * from './smartbucket.classes.directory.js';
|
export * from './classes.directory.js';
|
||||||
export * from './smartbucket.classes.file.js';
|
export * from './classes.file.js';
|
||||||
|
6
ts/interfaces.ts
Normal file
6
ts/interfaces.ts
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
import type { Directory } from "./classes.directory.js";
|
||||||
|
|
||||||
|
export interface IPathDecriptor {
|
||||||
|
path?: string;
|
||||||
|
directory?: Directory;
|
||||||
|
}
|
@ -1,3 +1,5 @@
|
|||||||
|
// plugins.ts
|
||||||
|
|
||||||
// node native
|
// node native
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
import * as stream from 'stream';
|
import * as stream from 'stream';
|
||||||
@ -5,12 +7,15 @@ import * as stream from 'stream';
|
|||||||
export { path, stream };
|
export { path, stream };
|
||||||
|
|
||||||
// @push.rocks scope
|
// @push.rocks scope
|
||||||
|
import * as smartmime from '@push.rocks/smartmime';
|
||||||
import * as smartpath from '@push.rocks/smartpath';
|
import * as smartpath from '@push.rocks/smartpath';
|
||||||
import * as smartpromise from '@push.rocks/smartpromise';
|
import * as smartpromise from '@push.rocks/smartpromise';
|
||||||
import * as smartrx from '@push.rocks/smartrx';
|
import * as smartrx from '@push.rocks/smartrx';
|
||||||
import * as smartstream from '@push.rocks/smartstream';
|
import * as smartstream from '@push.rocks/smartstream';
|
||||||
|
import * as smartstring from '@push.rocks/smartstring';
|
||||||
|
import * as smartunique from '@push.rocks/smartunique';
|
||||||
|
|
||||||
export { smartpath, smartpromise, smartrx, smartstream };
|
export { smartmime, smartpath, smartpromise, smartrx, smartstream, smartstring, smartunique };
|
||||||
|
|
||||||
// @tsclass
|
// @tsclass
|
||||||
import * as tsclass from '@tsclass/tsclass';
|
import * as tsclass from '@tsclass/tsclass';
|
||||||
@ -20,6 +25,8 @@ export {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// third party scope
|
// third party scope
|
||||||
import * as minio from 'minio';
|
import * as s3 from '@aws-sdk/client-s3';
|
||||||
|
|
||||||
export { minio };
|
export {
|
||||||
|
s3,
|
||||||
|
}
|
@ -1,218 +0,0 @@
|
|||||||
import * as plugins from './smartbucket.plugins.js';
|
|
||||||
import { SmartBucket } from './smartbucket.classes.smartbucket.js';
|
|
||||||
import { Directory } from './smartbucket.classes.directory.js';
|
|
||||||
|
|
||||||
export class Bucket {
|
|
||||||
public static async getBucketByName(smartbucketRef: SmartBucket, bucketNameArg: string) {
|
|
||||||
const buckets = await smartbucketRef.minioClient.listBuckets();
|
|
||||||
const foundBucket = buckets.find((bucket) => {
|
|
||||||
return bucket.name === bucketNameArg;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (foundBucket) {
|
|
||||||
console.log(`bucket with name ${bucketNameArg} exists.`);
|
|
||||||
console.log(`Taking this as base for new Bucket instance`);
|
|
||||||
return new this(smartbucketRef, bucketNameArg);
|
|
||||||
} else {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static async createBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
|
|
||||||
await smartbucketRef.minioClient.makeBucket(bucketName, 'ams3').catch((e) => console.log(e));
|
|
||||||
return new Bucket(smartbucketRef, bucketName);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static async removeBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
|
|
||||||
await smartbucketRef.minioClient.removeBucket(bucketName).catch((e) => console.log(e));
|
|
||||||
}
|
|
||||||
|
|
||||||
public smartbucketRef: SmartBucket;
|
|
||||||
public name: string;
|
|
||||||
|
|
||||||
constructor(smartbucketRef: SmartBucket, bucketName: string) {
|
|
||||||
this.smartbucketRef = smartbucketRef;
|
|
||||||
this.name = bucketName;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* gets the base directory of the bucket
|
|
||||||
*/
|
|
||||||
public async getBaseDirectory() {
|
|
||||||
return new Directory(this, null, '');
|
|
||||||
}
|
|
||||||
|
|
||||||
// ===============
|
|
||||||
// Fast Operations
|
|
||||||
// ===============
|
|
||||||
|
|
||||||
/**
|
|
||||||
* store file
|
|
||||||
*/
|
|
||||||
public async fastPut(optionsArg: {
|
|
||||||
path: string;
|
|
||||||
contents: string | Buffer;
|
|
||||||
}): Promise<void> {
|
|
||||||
const streamIntake = new plugins.smartstream.StreamIntake();
|
|
||||||
const putPromise = this.smartbucketRef.minioClient
|
|
||||||
.putObject(this.name, optionsArg.path, streamIntake)
|
|
||||||
.catch((e) => console.log(e));
|
|
||||||
streamIntake.pushData(optionsArg.contents);
|
|
||||||
streamIntake.signalEnd();
|
|
||||||
const response = await putPromise;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* get file
|
|
||||||
*/
|
|
||||||
public async fastGet(optionsArg: Parameters<typeof this.fastGetStream>[0]): Promise<Buffer> {
|
|
||||||
const done = plugins.smartpromise.defer();
|
|
||||||
let completeFile: Buffer;
|
|
||||||
const replaySubject = await this.fastGetStream(optionsArg);
|
|
||||||
const subscription = replaySubject.subscribe({
|
|
||||||
next: (chunk) => {
|
|
||||||
if (completeFile) {
|
|
||||||
completeFile = Buffer.concat([completeFile, chunk]);
|
|
||||||
} else {
|
|
||||||
completeFile = chunk;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
complete: () => {
|
|
||||||
done.resolve();
|
|
||||||
subscription.unsubscribe();
|
|
||||||
},
|
|
||||||
error: (err) => {
|
|
||||||
console.log(err);
|
|
||||||
},
|
|
||||||
});
|
|
||||||
await done.promise;
|
|
||||||
return completeFile;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async fastGetStream(optionsArg: {
|
|
||||||
path: string;
|
|
||||||
}): Promise<plugins.smartrx.rxjs.ReplaySubject<Buffer>> {
|
|
||||||
const fileStream = await this.smartbucketRef.minioClient
|
|
||||||
.getObject(this.name, optionsArg.path)
|
|
||||||
.catch((e) => console.log(e));
|
|
||||||
const replaySubject = new plugins.smartrx.rxjs.ReplaySubject<Buffer>();
|
|
||||||
const duplexStream = new plugins.smartstream.SmartDuplex<Buffer, void>({
|
|
||||||
writeFunction: async (chunk) => {
|
|
||||||
replaySubject.next(chunk);
|
|
||||||
return;
|
|
||||||
},
|
|
||||||
finalFunction: async (cb) => {
|
|
||||||
replaySubject.complete();
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!fileStream) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const smartstream = new plugins.smartstream.StreamWrapper([
|
|
||||||
fileStream,
|
|
||||||
duplexStream,
|
|
||||||
]);
|
|
||||||
smartstream.run();
|
|
||||||
return replaySubject;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* store file as stream
|
|
||||||
*/
|
|
||||||
public async fastPutStream(optionsArg: {
|
|
||||||
path: string;
|
|
||||||
dataStream: plugins.stream.Readable;
|
|
||||||
nativeMetadata?: { [key: string]: string };
|
|
||||||
}): Promise<void> {
|
|
||||||
await this.smartbucketRef.minioClient.putObject(
|
|
||||||
this.name,
|
|
||||||
optionsArg.path,
|
|
||||||
optionsArg.dataStream,
|
|
||||||
null,
|
|
||||||
...(optionsArg.nativeMetadata
|
|
||||||
? (() => {
|
|
||||||
const returnObject: any = {};
|
|
||||||
return returnObject;
|
|
||||||
})()
|
|
||||||
: {})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
public async copyObject(optionsArg: {
|
|
||||||
/**
|
|
||||||
* the
|
|
||||||
*/
|
|
||||||
objectKey: string;
|
|
||||||
/**
|
|
||||||
* in case you want to copy to another bucket specify it here
|
|
||||||
*/
|
|
||||||
targetBucket?: Bucket;
|
|
||||||
targetBucketKey?: string;
|
|
||||||
/**
|
|
||||||
* metadata will be merged with existing metadata
|
|
||||||
*/
|
|
||||||
nativeMetadata?: { [key: string]: string };
|
|
||||||
deleteExistingNativeMetadata?: boolean;
|
|
||||||
}): Promise<void> {
|
|
||||||
try {
|
|
||||||
const targetBucketName = optionsArg.targetBucket ? optionsArg.targetBucket.name : this.name;
|
|
||||||
|
|
||||||
// Retrieve current object information to use in copy conditions
|
|
||||||
const currentObjInfo = await this.smartbucketRef.minioClient.statObject(
|
|
||||||
targetBucketName,
|
|
||||||
optionsArg.objectKey
|
|
||||||
);
|
|
||||||
|
|
||||||
// Setting up copy conditions
|
|
||||||
const copyConditions = new plugins.minio.CopyConditions();
|
|
||||||
|
|
||||||
// Prepare new metadata
|
|
||||||
const newNativeMetadata = {
|
|
||||||
...(optionsArg.deleteExistingNativeMetadata ? {} : currentObjInfo.metaData),
|
|
||||||
...optionsArg.nativeMetadata,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Define the copy operation as a Promise
|
|
||||||
// TODO: check on issue here: https://github.com/minio/minio-js/issues/1286
|
|
||||||
await this.smartbucketRef.minioClient.copyObject(
|
|
||||||
this.name,
|
|
||||||
optionsArg.objectKey,
|
|
||||||
`/${targetBucketName}/${optionsArg.objectKey}`,
|
|
||||||
copyConditions
|
|
||||||
);
|
|
||||||
} catch (err) {
|
|
||||||
console.error('Error updating metadata:', err);
|
|
||||||
throw err; // rethrow to allow caller to handle
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* removeObject
|
|
||||||
*/
|
|
||||||
public async fastRemove(optionsArg: {
|
|
||||||
path: string;
|
|
||||||
}) {
|
|
||||||
await this.smartbucketRef.minioClient.removeObject(this.name, optionsArg.path);
|
|
||||||
}
|
|
||||||
|
|
||||||
public async doesObjectExist(optionsArg: {
|
|
||||||
path: string;
|
|
||||||
}): Promise<boolean> {
|
|
||||||
try {
|
|
||||||
await this.smartbucketRef.minioClient.statObject(this.name, optionsArg.path);
|
|
||||||
console.log(`Object '${optionsArg.path}' exists in bucket '${this.name}'.`);
|
|
||||||
return true;
|
|
||||||
} catch (error) {
|
|
||||||
if (error.code === 'NotFound') {
|
|
||||||
console.log(`Object '${optionsArg.path}' does not exist in bucket '${this.name}'.`);
|
|
||||||
return false;
|
|
||||||
} else {
|
|
||||||
console.error('Error checking object existence:', error);
|
|
||||||
throw error; // Rethrow if it's not a NotFound error to handle unexpected issues
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,248 +0,0 @@
|
|||||||
import * as plugins from './smartbucket.plugins.js';
|
|
||||||
import { Bucket } from './smartbucket.classes.bucket.js';
|
|
||||||
import { File } from './smartbucket.classes.file.js';
|
|
||||||
|
|
||||||
export class Directory {
|
|
||||||
public bucketRef: Bucket;
|
|
||||||
public parentDirectoryRef: Directory;
|
|
||||||
public name: string;
|
|
||||||
|
|
||||||
public tree: string[];
|
|
||||||
public files: string[];
|
|
||||||
public folders: string[];
|
|
||||||
|
|
||||||
constructor(bucketRefArg: Bucket, parentDiretory: Directory, name: string) {
|
|
||||||
this.bucketRef = bucketRefArg;
|
|
||||||
this.parentDirectoryRef = parentDiretory;
|
|
||||||
this.name = name;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* returns an array of parent directories
|
|
||||||
*/
|
|
||||||
public getParentDirectories(): Directory[] {
|
|
||||||
let parentDirectories: Directory[] = [];
|
|
||||||
if (this.parentDirectoryRef) {
|
|
||||||
parentDirectories.push(this.parentDirectoryRef);
|
|
||||||
parentDirectories = parentDirectories.concat(this.parentDirectoryRef.getParentDirectories());
|
|
||||||
}
|
|
||||||
return parentDirectories;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* returns the directory level
|
|
||||||
*/
|
|
||||||
public getDirectoryLevel(): number {
|
|
||||||
return this.getParentDirectories().length;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* updates the base path
|
|
||||||
*/
|
|
||||||
public getBasePath(): string {
|
|
||||||
const parentDirectories = this.getParentDirectories();
|
|
||||||
let basePath = '';
|
|
||||||
for (const parentDir of parentDirectories) {
|
|
||||||
if (!parentDir.name && !basePath) {
|
|
||||||
basePath = this.name + '/';
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (parentDir.name && !basePath) {
|
|
||||||
basePath = parentDir.name + '/' + this.name + '/';
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if (parentDir.name && basePath) {
|
|
||||||
basePath = parentDir.name + '/' + basePath;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return basePath;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* lists all files
|
|
||||||
*/
|
|
||||||
public async listFiles(): Promise<File[]> {
|
|
||||||
const done = plugins.smartpromise.defer();
|
|
||||||
const fileNameStream = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2(
|
|
||||||
this.bucketRef.name,
|
|
||||||
this.getBasePath(),
|
|
||||||
false
|
|
||||||
);
|
|
||||||
const fileArray: File[] = [];
|
|
||||||
const duplexStream = new plugins.smartstream.SmartDuplex<plugins.minio.BucketItem, void>({
|
|
||||||
objectMode: true,
|
|
||||||
writeFunction: async (bucketItem) => {
|
|
||||||
if (bucketItem.prefix) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (!bucketItem.name) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let subtractedPath = bucketItem.name.replace(this.getBasePath(), '');
|
|
||||||
if (subtractedPath.startsWith('/')) {
|
|
||||||
subtractedPath = subtractedPath.substr(1);
|
|
||||||
}
|
|
||||||
if (!subtractedPath.includes('/')) {
|
|
||||||
fileArray.push(
|
|
||||||
new File({
|
|
||||||
directoryRefArg: this,
|
|
||||||
fileName: subtractedPath,
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
finalFunction: async (tools) => {
|
|
||||||
done.resolve();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
fileNameStream.pipe(duplexStream);
|
|
||||||
await done.promise;
|
|
||||||
return fileArray;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* lists all folders
|
|
||||||
*/
|
|
||||||
public async listDirectories(): Promise<Directory[]> {
|
|
||||||
const done = plugins.smartpromise.defer();
|
|
||||||
const basePath = this.getBasePath();
|
|
||||||
const completeDirStream = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2(
|
|
||||||
this.bucketRef.name,
|
|
||||||
this.getBasePath(),
|
|
||||||
false
|
|
||||||
);
|
|
||||||
const directoryArray: Directory[] = [];
|
|
||||||
const duplexStream = new plugins.smartstream.SmartDuplex<plugins.minio.BucketItem, void>({
|
|
||||||
objectMode: true,
|
|
||||||
writeFunction: async (bucketItem) => {
|
|
||||||
if (bucketItem.name) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
let subtractedPath = bucketItem.prefix.replace(this.getBasePath(), '');
|
|
||||||
if (subtractedPath.startsWith('/')) {
|
|
||||||
subtractedPath = subtractedPath.substr(1);
|
|
||||||
}
|
|
||||||
if (subtractedPath.includes('/')) {
|
|
||||||
const dirName = subtractedPath.split('/')[0];
|
|
||||||
if (directoryArray.find((directory) => directory.name === dirName)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
directoryArray.push(new Directory(this.bucketRef, this, dirName));
|
|
||||||
}
|
|
||||||
},
|
|
||||||
finalFunction: async (tools) => {
|
|
||||||
done.resolve();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
completeDirStream.pipe(duplexStream);
|
|
||||||
await done.promise;
|
|
||||||
return directoryArray;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* gets an array that has all objects with a certain prefix;
|
|
||||||
*/
|
|
||||||
public async getTreeArray() {
|
|
||||||
const treeArray = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2(
|
|
||||||
this.bucketRef.name,
|
|
||||||
this.getBasePath(),
|
|
||||||
true
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* gets a sub directory
|
|
||||||
*/
|
|
||||||
public async getSubDirectoryByName(dirNameArg: string): Promise<Directory> {
|
|
||||||
const dirNameArray = dirNameArg.split('/');
|
|
||||||
|
|
||||||
const getDirectory = async (directoryArg: Directory, dirNameToSearch: string) => {
|
|
||||||
const directories = await directoryArg.listDirectories();
|
|
||||||
return directories.find((directory) => {
|
|
||||||
return directory.name === dirNameToSearch;
|
|
||||||
});
|
|
||||||
};
|
|
||||||
let wantedDirectory: Directory;
|
|
||||||
for (const dirNameToSearch of dirNameArray) {
|
|
||||||
const directoryToSearchIn = wantedDirectory ? wantedDirectory : this;
|
|
||||||
wantedDirectory = await getDirectory(directoryToSearchIn, dirNameToSearch);
|
|
||||||
}
|
|
||||||
return wantedDirectory;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* moves the directory
|
|
||||||
*/
|
|
||||||
public async move() {
|
|
||||||
// TODO
|
|
||||||
throw new Error('moving a directory is not yet implemented');
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* creates a file within this directory
|
|
||||||
* @param relativePathArg
|
|
||||||
*/
|
|
||||||
public async createEmptyFile(relativePathArg: string) {
|
|
||||||
const emtpyFile = await File.create({
|
|
||||||
directory: this,
|
|
||||||
name: relativePathArg,
|
|
||||||
contents: '',
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
// file operations
|
|
||||||
public async fastPut(optionsArg: { path: string; contents: string | Buffer }) {
|
|
||||||
const path = plugins.path.join(this.getBasePath(), optionsArg.path);
|
|
||||||
await this.bucketRef.fastPut({
|
|
||||||
path,
|
|
||||||
contents: optionsArg.contents,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public async fastGet(optionsArg: { path: string }) {
|
|
||||||
const path = plugins.path.join(this.getBasePath(), optionsArg.path);
|
|
||||||
const result = await this.bucketRef.fastGet({
|
|
||||||
path,
|
|
||||||
});
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async fastGetStream(pathArg: string): Promise<plugins.smartrx.rxjs.ReplaySubject<Buffer>> {
|
|
||||||
const path = plugins.path.join(this.getBasePath(), pathArg);
|
|
||||||
const result = await this.bucketRef.fastGetStream({
|
|
||||||
path,
|
|
||||||
});
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async fastRemove(optionsArg: { path: string }) {
|
|
||||||
const path = plugins.path.join(this.getBasePath(), optionsArg.path);
|
|
||||||
await this.bucketRef.fastRemove({
|
|
||||||
path,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* deletes the directory with all its contents
|
|
||||||
*/
|
|
||||||
public async delete() {
|
|
||||||
const deleteDirectory = async (directoryArg: Directory) => {
|
|
||||||
const childDirectories = await directoryArg.listDirectories();
|
|
||||||
if (childDirectories.length === 0) {
|
|
||||||
console.log('directory empty! Path complete!');
|
|
||||||
} else {
|
|
||||||
for (const childDir of childDirectories) {
|
|
||||||
await deleteDirectory(childDir);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const files = await directoryArg.listFiles();
|
|
||||||
for (const file of files) {
|
|
||||||
await directoryArg.fastRemove({
|
|
||||||
path: file.name,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
await deleteDirectory(this);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,140 +0,0 @@
|
|||||||
import * as plugins from './smartbucket.plugins.js';
|
|
||||||
import { Directory } from './smartbucket.classes.directory.js';
|
|
||||||
|
|
||||||
export interface IFileMetaData {
|
|
||||||
name: string;
|
|
||||||
fileType: string;
|
|
||||||
size: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* represents a file in a directory
|
|
||||||
*/
|
|
||||||
export class File {
|
|
||||||
// STATIC
|
|
||||||
|
|
||||||
/**
|
|
||||||
* creates a file in draft mode
|
|
||||||
* you need to call .save() to store it in s3
|
|
||||||
* @param optionsArg
|
|
||||||
*/
|
|
||||||
public static async create(optionsArg: {
|
|
||||||
directory: Directory;
|
|
||||||
name: string;
|
|
||||||
contents: Buffer | string | plugins.stream.Readable;
|
|
||||||
/**
|
|
||||||
* if contents are of type string, you can specify the encoding here
|
|
||||||
*/
|
|
||||||
encoding?: 'utf8' | 'binary';
|
|
||||||
}): Promise<File> {
|
|
||||||
const contents =
|
|
||||||
typeof optionsArg.contents === 'string'
|
|
||||||
? Buffer.from(optionsArg.contents, optionsArg.encoding)
|
|
||||||
: optionsArg.contents;
|
|
||||||
const file = new File({
|
|
||||||
directoryRefArg: optionsArg.directory,
|
|
||||||
fileName: optionsArg.name,
|
|
||||||
});
|
|
||||||
if (contents instanceof plugins.stream.Readable) {} else {
|
|
||||||
await optionsArg.directory.fastPut({
|
|
||||||
path: optionsArg.name,
|
|
||||||
contents: contents,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return file;
|
|
||||||
}
|
|
||||||
|
|
||||||
// INSTANCE
|
|
||||||
public parentDirectoryRef: Directory;
|
|
||||||
public name: string;
|
|
||||||
|
|
||||||
public path: string;
|
|
||||||
public metaData: IFileMetaData;
|
|
||||||
|
|
||||||
constructor(optionsArg: { directoryRefArg: Directory; fileName: string }) {
|
|
||||||
this.parentDirectoryRef = optionsArg.directoryRefArg;
|
|
||||||
this.name = optionsArg.fileName;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async getContentAsString() {
|
|
||||||
const fileBuffer = await this.getContentAsBuffer();
|
|
||||||
return fileBuffer.toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
public async getContentAsBuffer() {
|
|
||||||
const done = plugins.smartpromise.defer();
|
|
||||||
const fileStream = await this.parentDirectoryRef.bucketRef.smartbucketRef.minioClient
|
|
||||||
.getObject(this.parentDirectoryRef.bucketRef.name, this.path)
|
|
||||||
.catch((e) => console.log(e));
|
|
||||||
let completeFile = Buffer.from('');
|
|
||||||
const duplexStream = new plugins.smartstream.SmartDuplex<Buffer, Buffer>(
|
|
||||||
{
|
|
||||||
writeFunction: async (chunk) => {
|
|
||||||
completeFile = Buffer.concat([chunk]);
|
|
||||||
return chunk;
|
|
||||||
},
|
|
||||||
finalFunction: async (cb) => {
|
|
||||||
done.resolve();
|
|
||||||
return Buffer.from('');
|
|
||||||
},
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
if (!fileStream) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
fileStream.pipe(duplexStream);
|
|
||||||
await done.promise;
|
|
||||||
return completeFile;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async readStreaming() {
|
|
||||||
// TODO
|
|
||||||
throw new Error('not yet implemented');
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* removes this file
|
|
||||||
*/
|
|
||||||
public async remove() {
|
|
||||||
await this.parentDirectoryRef.bucketRef.smartbucketRef.minioClient.removeObject(
|
|
||||||
this.parentDirectoryRef.bucketRef.name,
|
|
||||||
this.path
|
|
||||||
);
|
|
||||||
await this.parentDirectoryRef.listFiles();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* deletes the file
|
|
||||||
*/
|
|
||||||
public async delete() {}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* allows locking the file
|
|
||||||
* @param optionsArg
|
|
||||||
*/
|
|
||||||
public async lock(optionsArg?: { timeoutMillis?: number }) {}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* actively unlocks a file
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
public async unlock(optionsArg?: {
|
|
||||||
/**
|
|
||||||
* unlock the file even if not locked from this instance
|
|
||||||
*/
|
|
||||||
force?: boolean;
|
|
||||||
}) {}
|
|
||||||
|
|
||||||
public async updateWithContents(optionsArg: {
|
|
||||||
contents: Buffer | string | plugins.stream.Readable;
|
|
||||||
encoding?: 'utf8' | 'binary';
|
|
||||||
}) {}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* allows updating the metadata of a file
|
|
||||||
* @param updatedMetadata
|
|
||||||
*/
|
|
||||||
public async updateMetaData(updatedMetadata: any) {}
|
|
||||||
}
|
|
@ -1,35 +0,0 @@
|
|||||||
import * as plugins from './smartbucket.plugins.js';
|
|
||||||
import { Bucket } from './smartbucket.classes.bucket.js';
|
|
||||||
|
|
||||||
export class SmartBucket {
|
|
||||||
public config: plugins.tsclass.storage.IS3Descriptor;
|
|
||||||
|
|
||||||
public minioClient: plugins.minio.Client;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* the constructor of SmartBucket
|
|
||||||
*/
|
|
||||||
constructor(configArg: plugins.tsclass.storage.IS3Descriptor) {
|
|
||||||
this.config = configArg;
|
|
||||||
this.minioClient = new plugins.minio.Client({
|
|
||||||
endPoint: this.config.endpoint,
|
|
||||||
port: configArg.port || 443,
|
|
||||||
useSSL: configArg.useSsl !== undefined ? configArg.useSsl : true,
|
|
||||||
accessKey: this.config.accessKey,
|
|
||||||
secretKey: this.config.accessSecret,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
public async createBucket(bucketNameArg: string) {
|
|
||||||
const bucket = await Bucket.createBucketByName(this, bucketNameArg);
|
|
||||||
return bucket;
|
|
||||||
}
|
|
||||||
|
|
||||||
public async removeBucket(bucketName: string) {
|
|
||||||
await Bucket.removeBucketByName(this, bucketName);
|
|
||||||
}
|
|
||||||
|
|
||||||
public async getBucketByName(bucketName: string) {
|
|
||||||
return Bucket.getBucketByName(this, bucketName);
|
|
||||||
}
|
|
||||||
}
|
|
@ -6,7 +6,8 @@
|
|||||||
"module": "NodeNext",
|
"module": "NodeNext",
|
||||||
"moduleResolution": "NodeNext",
|
"moduleResolution": "NodeNext",
|
||||||
"esModuleInterop": true,
|
"esModuleInterop": true,
|
||||||
"verbatimModuleSyntax": true
|
"verbatimModuleSyntax": true,
|
||||||
|
"strict": true
|
||||||
},
|
},
|
||||||
"exclude": [
|
"exclude": [
|
||||||
"dist_*/**/*.d.ts"
|
"dist_*/**/*.d.ts"
|
||||||
|
Reference in New Issue
Block a user