Compare commits

..

50 Commits

Author SHA1 Message Date
0b396f19cf 3.2.1 2024-11-24 02:28:48 +01:00
6ab77ece6e fix(metadata): Fix metadata handling for deleted files 2024-11-24 02:28:48 +01:00
b7a1f2087c Merge pull request 'fix: use overwrite to make metadata files work' (#2) from fix/smartbucket-trash into master
Reviewed-on: #2
2024-11-24 01:27:58 +00:00
b0d41fa9a0 3.2.0 2024-11-24 02:25:08 +01:00
34082c38a7 feat(bucket): Enhanced SmartBucket with trash management and metadata handling 2024-11-24 02:25:08 +01:00
8d160cefb0 fix: use overwrite to make metadata files work
During a delete the metadata file is updated. As the overwrite property was not set, the metadata
couldn't be updated and caused issues.
2024-11-18 21:08:39 +00:00
cec9c07b7c 3.1.0 2024-11-18 15:07:47 +01:00
383a5204f4 feat(file): Added functionality to retrieve magic bytes from files and detect file types using magic bytes. 2024-11-18 15:07:46 +01:00
c7f0c97341 3.0.24 2024-11-18 11:24:11 +01:00
e7f60465ff fix(metadata): Fix metadata handling to address type assertion and data retrieval. 2024-11-18 11:24:11 +01:00
7db4d24817 3.0.23 2024-10-16 10:27:27 +02:00
dc599585b8 fix(dependencies): Update package dependencies for improved functionality and security. 2024-10-16 10:27:27 +02:00
a22e32cd32 3.0.22 2024-07-28 12:46:39 +02:00
4647181807 fix(dependencies): Update dependencies and improve bucket retrieval logging 2024-07-28 12:46:39 +02:00
99c3935d0c 3.0.21 2024-07-04 18:39:28 +02:00
05523dc7a1 fix(test): Update endpoint configuration in tests to use environment variable 2024-07-04 18:39:27 +02:00
dc99cfa229 3.0.20 2024-06-19 18:28:53 +02:00
23f8dc55d0 fix(core): update 2024-06-19 18:28:52 +02:00
ffaf0fc97a 3.0.19 2024-06-18 18:44:59 +02:00
2a0425ff54 fix(delete functions): ensure more consistency between methods and trash behaviour 2024-06-18 18:44:58 +02:00
9adcdee0a0 3.0.18 2024-06-17 20:00:58 +02:00
786f8d4365 fix(core): update 2024-06-17 20:00:57 +02:00
67244ba5cf 3.0.17 2024-06-17 19:57:56 +02:00
a9bb31c2a2 fix(core): update 2024-06-17 19:57:56 +02:00
bd8b05920f 3.0.16 2024-06-17 16:01:36 +02:00
535d9f8520 fix(core): update 2024-06-17 16:01:35 +02:00
8401fe1c0c 3.0.15 2024-06-11 17:21:22 +02:00
08c3f674bf fix(core): update 2024-06-11 17:21:22 +02:00
df0a439def 3.0.14 2024-06-11 17:20:49 +02:00
7245b49c31 fix(core): update 2024-06-11 17:20:48 +02:00
4b70edb947 finish trash 2024-06-10 16:47:20 +02:00
9629a04da6 3.0.13 2024-06-09 16:32:33 +02:00
963463d40d fix(core): update 2024-06-09 16:32:32 +02:00
ce58b99fc7 3.0.12 2024-06-09 16:02:34 +02:00
591c99736d fix(core): update 2024-06-09 16:02:33 +02:00
559e3da47b 3.0.11 2024-06-08 19:13:25 +02:00
a7ac870e05 fix(core): update 2024-06-08 19:13:24 +02:00
d48c5e229a 3.0.10 2024-06-03 21:35:08 +02:00
b9c384dd08 fix(core): update 2024-06-03 21:35:08 +02:00
91c04b2364 update description 2024-05-29 14:11:54 +02:00
b5dcc131e2 3.0.9 2024-05-27 17:34:27 +02:00
cb0ab2c9db fix(core): update 2024-05-27 17:34:26 +02:00
2a17ee542e 3.0.8 2024-05-27 14:34:13 +02:00
95e9d2f0ff fix(core): update 2024-05-27 14:34:12 +02:00
1a71c76da3 3.0.7 2024-05-27 12:56:26 +02:00
e924511147 fix(s3 paths): pathing differences now correctly handled in a reducePath method. 2024-05-27 12:56:25 +02:00
645ebbdd4d 3.0.6 2024-05-21 18:47:00 +02:00
168148b2c9 fix(core): update 2024-05-21 18:46:59 +02:00
1293fc4ca6 3.0.5 2024-05-21 18:42:55 +02:00
b040120813 fix(core): update 2024-05-21 18:42:55 +02:00
22 changed files with 5421 additions and 1892 deletions

100
changelog.md Normal file
View File

@ -0,0 +1,100 @@
# Changelog
## 2024-11-24 - 3.2.1 - fix(metadata)
Fix metadata handling for deleted files
- Ensured metadata is correctly stored and managed when files are deleted into the trash.
## 2024-11-24 - 3.2.0 - feat(bucket)
Enhanced SmartBucket with trash management and metadata handling
- Added functionality to move files to a trash directory.
- Introduced methods to handle file metadata more robustly.
- Implemented a method to clean all contents from a bucket.
- Enhanced directory retrieval to handle non-existent directories with options.
- Improved handling of file paths and metadata within the storage system.
## 2024-11-18 - 3.1.0 - feat(file)
Added functionality to retrieve magic bytes from files and detect file types using magic bytes.
- Introduced method `getMagicBytes` in `File` and `Bucket` classes to retrieve a specific number of bytes from a file.
- Enhanced file type detection by utilizing magic bytes in `MetaData` class.
- Updated dependencies for better performance and compatibility.
## 2024-11-18 - 3.0.24 - fix(metadata)
Fix metadata handling to address type assertion and data retrieval.
- Fixed type assertion issues in `MetaData` class properties with type non-null assertions.
- Corrected the handling of JSON data retrieval in `MetaData.storeCustomMetaData` function.
## 2024-10-16 - 3.0.23 - fix(dependencies)
Update package dependencies for improved functionality and security.
- Updated @aws-sdk/client-s3 to version ^3.670.0 for enhanced S3 client capabilities.
- Updated @push.rocks/smartstream to version ^3.2.4.
- Updated the dev dependency @push.rocks/tapbundle to version ^5.3.0.
## 2024-07-28 - 3.0.22 - fix(dependencies)
Update dependencies and improve bucket retrieval logging
- Updated @aws-sdk/client-s3 to ^3.620.0
- Updated @git.zone/tsbuild to ^2.1.84
- Updated @git.zone/tsrun to ^1.2.49
- Updated @push.rocks/smartpromise to ^4.0.4
- Updated @tsclass/tsclass to ^4.1.2
- Added a log for when a bucket is not found by name in getBucketByName method
## 2024-07-04 - 3.0.21 - fix(test)
Update endpoint configuration in tests to use environment variable
- Modified `qenv.yml` to include `S3_ENDPOINT` as a required environment variable.
- Updated test files to fetch `S3_ENDPOINT` from environment instead of hardcoding.
## 2024-06-19 - 3.0.20 - Fix and Stability Updates
Improved overall stability and consistency.
## 2024-06-18 - 3.0.18 - Delete Functions Consistency
Ensured more consistency between delete methods and trash behavior.
## 2024-06-17 - 3.0.17 to 3.0.16 - Fix and Update
Routine updates and fixes performed.
## 2024-06-11 - 3.0.15 to 3.0.14 - Fix and Update
Routine updates and fixes performed.
## 2024-06-10 - 3.0.13 - Trash Feature Completion
Finished work on trash feature.
## 2024-06-09 - 3.0.12 - Fix and Update
Routine updates and fixes performed.
## 2024-06-08 - 3.0.11 to 3.0.10 - Fix and Update
Routine updates and fixes performed.
## 2024-06-03 - 3.0.10 - Fix and Update
Routine updates and fixes performed.
## 2024-05-29 - 3.0.9 - Update Description
Updated project description.
## 2024-05-27 - 3.0.8 to 3.0.6 - Pathing and Core Updates
Routine updates and fixes performed.
- S3 paths' pathing differences now correctly handled with a reducePath method.
## 2024-05-21 - 3.0.5 to 3.0.4 - Fix and Update
Routine updates and fixes performed.
## 2024-05-17 - 3.0.3 to 3.0.2 - Fix and Update
Routine updates and fixes performed.
## 2024-05-17 - 3.0.0 - Major Release
Introduced breaking changes in core and significant improvements.
## 2024-05-05 - 2.0.5 - Breaking Changes
Introduced breaking changes in core functionality.
## 2024-04-14 - 2.0.4 - TSConfig Update
Updated TypeScript configuration.
## 2024-01-01 - 2.0.2 - Organization Scheme Update
Switched to the new organizational scheme.

View File

@ -8,25 +8,28 @@
"githost": "code.foss.global", "githost": "code.foss.global",
"gitscope": "push.rocks", "gitscope": "push.rocks",
"gitrepo": "smartbucket", "gitrepo": "smartbucket",
"description": "A TypeScript library that offers simple, cloud-independent object storage with features like bucket creation, file management, and directory management.", "description": "A TypeScript library offering simple and cloud-agnostic object storage with advanced features like bucket creation, file and directory management, and data streaming.",
"npmPackagename": "@push.rocks/smartbucket", "npmPackagename": "@push.rocks/smartbucket",
"license": "MIT", "license": "MIT",
"keywords": [ "keywords": [
"TypeScript",
"cloud storage", "cloud storage",
"object storage", "object storage",
"TypeScript", "bucket creation",
"S3",
"minio",
"file management", "file management",
"directory management", "directory management",
"bucket creation",
"data streaming", "data streaming",
"multi-cloud", "multi-cloud",
"API", "API",
"unified storage", "unified storage",
"S3",
"minio",
"file locking",
"metadata",
"buffer handling", "buffer handling",
"access key", "access key",
"secret key" "secret key",
"cloud agnostic"
] ]
} }
}, },

4
package-lock.json generated
View File

@ -1,12 +1,12 @@
{ {
"name": "@push.rocks/smartbucket", "name": "@push.rocks/smartbucket",
"version": "3.0.4", "version": "3.2.1",
"lockfileVersion": 3, "lockfileVersion": 3,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "@push.rocks/smartbucket", "name": "@push.rocks/smartbucket",
"version": "3.0.4", "version": "3.2.1",
"license": "UNLICENSED", "license": "UNLICENSED",
"dependencies": { "dependencies": {
"@push.rocks/smartpath": "^5.0.18", "@push.rocks/smartpath": "^5.0.18",

View File

@ -1,7 +1,7 @@
{ {
"name": "@push.rocks/smartbucket", "name": "@push.rocks/smartbucket",
"version": "3.0.4", "version": "3.2.1",
"description": "A TypeScript library that offers simple, cloud-independent object storage with features like bucket creation, file management, and directory management.", "description": "A TypeScript library offering simple and cloud-agnostic object storage with advanced features like bucket creation, file and directory management, and data streaming.",
"main": "dist_ts/index.js", "main": "dist_ts/index.js",
"typings": "dist_ts/index.d.ts", "typings": "dist_ts/index.d.ts",
"type": "module", "type": "module",
@ -12,20 +12,22 @@
"build": "(tsbuild --web --allowimplicitany)" "build": "(tsbuild --web --allowimplicitany)"
}, },
"devDependencies": { "devDependencies": {
"@git.zone/tsbuild": "^2.1.80", "@git.zone/tsbuild": "^2.1.84",
"@git.zone/tsrun": "^1.2.46", "@git.zone/tsrun": "^1.2.49",
"@git.zone/tstest": "^1.0.90", "@git.zone/tstest": "^1.0.90",
"@push.rocks/qenv": "^6.0.5", "@push.rocks/qenv": "^6.1.0",
"@push.rocks/tapbundle": "^5.0.23" "@push.rocks/tapbundle": "^5.5.3"
}, },
"dependencies": { "dependencies": {
"@push.rocks/smartmime": "^2.0.0", "@aws-sdk/client-s3": "^3.699.0",
"@push.rocks/smartmime": "^2.0.4",
"@push.rocks/smartpath": "^5.0.18", "@push.rocks/smartpath": "^5.0.18",
"@push.rocks/smartpromise": "^4.0.3", "@push.rocks/smartpromise": "^4.0.4",
"@push.rocks/smartrx": "^3.0.7", "@push.rocks/smartrx": "^3.0.7",
"@push.rocks/smartstream": "^3.0.38", "@push.rocks/smartstream": "^3.2.5",
"@tsclass/tsclass": "^4.0.54", "@push.rocks/smartstring": "^4.0.15",
"minio": "^8.0.0" "@push.rocks/smartunique": "^3.0.9",
"@tsclass/tsclass": "^4.1.2"
}, },
"private": false, "private": false,
"files": [ "files": [
@ -44,20 +46,28 @@
"last 1 chrome versions" "last 1 chrome versions"
], ],
"keywords": [ "keywords": [
"TypeScript",
"cloud storage", "cloud storage",
"object storage", "object storage",
"TypeScript", "bucket creation",
"S3",
"minio",
"file management", "file management",
"directory management", "directory management",
"bucket creation",
"data streaming", "data streaming",
"multi-cloud", "multi-cloud",
"API", "API",
"unified storage", "unified storage",
"S3",
"minio",
"file locking",
"metadata",
"buffer handling", "buffer handling",
"access key", "access key",
"secret key" "secret key",
] "cloud agnostic"
],
"homepage": "https://code.foss.global/push.rocks/smartbucket",
"repository": {
"type": "git",
"url": "https://code.foss.global/push.rocks/smartbucket.git"
}
} }

5839
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@ -1,3 +1,4 @@
required: required:
- S3_KEY - S3_KEY
- S3_SECRET - S3_SECRET
- S3_ENDPOINT

View File

@ -1,9 +1,10 @@
# @push.rocks/smartbucket # @push.rocks/smartbucket
A TypeScript library for simple cloud independent object storage with support for buckets, directories, and files.
A TypeScript library for cloud-independent object storage, providing features like bucket creation, file and directory management, and data streaming.
## Install ## Install
To install `@push.rocks/smartbucket`, you need to have Node.js and npm (Node Package Manager) installed on your system. If you have them installed, you can add `@push.rocks/smartbucket` to your project by running the following command in your project's root directory: To install `@push.rocks/smartbucket`, you need to have Node.js and npm (Node Package Manager) installed. If they are installed, you can add `@push.rocks/smartbucket` to your project by running the following command in your project's root directory:
```bash ```bash
npm install @push.rocks/smartbucket --save npm install @push.rocks/smartbucket --save
@ -13,7 +14,7 @@ This command will download and install `@push.rocks/smartbucket` along with its
## Usage ## Usage
`@push.rocks/smartbucket` is a TypeScript module designed to provide simple cloud-independent object storage functionality. It wraps various cloud storage providers such as AWS S3, Google Cloud Storage, and others, offering a unified API to manage storage buckets and objects within those buckets. `@push.rocks/smartbucket` is a TypeScript module designed to provide simple cloud-independent object storage functionality. It wraps various cloud storage providers such as AWS S3, Google Cloud Storage, and others, offering a unified API to manage storage buckets and objects within those buckets.
In this guide, we will delve into the usage of SmartBucket, covering its full range of features from setting up the library to advanced usage scenarios. In this guide, we will delve into the usage of SmartBucket, covering its full range of features from setting up the library to advanced usage scenarios.
@ -48,7 +49,7 @@ const mySmartBucket = new SmartBucket({
accessKey: "yourAccessKey", accessKey: "yourAccessKey",
accessSecret: "yourSecretKey", accessSecret: "yourSecretKey",
endpoint: "yourEndpointURL", endpoint: "yourEndpointURL",
port: 443, // Default is 443, could be customized for specific endpoint port: 443, // Default is 443, can be customized for specific endpoint
useSsl: true // Defaults to true useSsl: true // Defaults to true
}); });
``` ```
@ -189,7 +190,7 @@ async function writeFileStream(bucketName: string, filePath: string, readableStr
// Create a readable stream from a string // Create a readable stream from a string
const readable = new Readable(); const readable = new Readable();
readable.push('Hello world streamed as a file!'); readable.push('Hello world streamed as a file!');
readable.push(null); // Indicates end of the stream readable.push(null); // End of stream
// Use the function // Use the function
writeFileStream("exampleBucket", "path/to/streamedObject.txt", readable); writeFileStream("exampleBucket", "path/to/streamedObject.txt", readable);
@ -197,7 +198,9 @@ writeFileStream("exampleBucket", "path/to/streamedObject.txt", readable);
### Working with Directories ### Working with Directories
`@push.rocks/smartbucket` abstracts directories within buckets for easier object management. You can create, list, and delete directories using the `Directory` class. `@push.rocks/smartbucket` offers abstractions for directories within buckets for easier object management. You can create, list, and delete directories using the `Directory` class.
To list the contents of a directory:
```typescript ```typescript
async function listDirectoryContents(bucketName: string, directoryPath: string) { async function listDirectoryContents(bucketName: string, directoryPath: string) {
@ -205,6 +208,7 @@ async function listDirectoryContents(bucketName: string, directoryPath: string)
if (myBucket) { if (myBucket) {
const baseDirectory: Directory = await myBucket.getBaseDirectory(); const baseDirectory: Directory = await myBucket.getBaseDirectory();
const targetDirectory: Directory = await baseDirectory.getSubDirectoryByName(directoryPath); const targetDirectory: Directory = await baseDirectory.getSubDirectoryByName(directoryPath);
console.log('Listing directories:'); console.log('Listing directories:');
const directories = await targetDirectory.listDirectories(); const directories = await targetDirectory.listDirectories();
directories.forEach(dir => { directories.forEach(dir => {
@ -250,11 +254,11 @@ createFileInDirectory("exampleBucket", "some/directory", "newfile.txt", "Hello,
#### Bucket Policies #### Bucket Policies
Manage bucket policies to control access permissions. This feature is dependent on the policies provided by the storage service (e.g., AWS S3, MinIO). Manage bucket policies to control access permissions. This feature depends on the policies provided by the storage service (e.g., AWS S3, MinIO).
#### Object Metadata #### Object Metadata
You can retrieve and modify object metadata. Metadata can be useful for storing additional information about an object. Retrieve and modify object metadata. Metadata can be useful for storing additional information about an object.
To retrieve metadata: To retrieve metadata:

0
test/helpers/prepare.ts Normal file
View File

7
test/test.metadata.ts Normal file
View File

@ -0,0 +1,7 @@
import { tap, expect } from '@push.rocks/tapbundle';
tap.test('test metadata functionality', async () => {
})
tap.start();

63
test/test.trash.ts Normal file
View File

@ -0,0 +1,63 @@
import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
import { jestExpect } from '@push.rocks/tapbundle/node';
import { Qenv } from '@push.rocks/qenv';
import * as smartbucket from '../ts/index.js';
const testQenv = new Qenv('./', './.nogit/');
let testSmartbucket: smartbucket.SmartBucket;
let myBucket: smartbucket.Bucket;
let baseDirectory: smartbucket.Directory;
tap.test('should create a valid smartbucket', async () => {
testSmartbucket = new smartbucket.SmartBucket({
accessKey: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSKEY'),
accessSecret: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSSECRET'),
endpoint: await testQenv.getEnvVarOnDemandStrict('S3_ENDPOINT'),
});
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
myBucket = await testSmartbucket.getBucketByNameStrict(await testQenv.getEnvVarOnDemandStrict('S3_BUCKET'),);
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
expect(myBucket.name).toEqual('test-pushrocks-smartbucket');
});
tap.test('should clean all contents', async () => {
await myBucket.cleanAllContents();
expect(await myBucket.fastExists({ path: 'hithere/socool.txt' })).toBeFalse();
expect(await myBucket.fastExists({ path: 'trashtest/trashme.txt' })).toBeFalse();
});
tap.test('should delete a file into the normally', async () => {
const path = 'trashtest/trashme.txt';
const file = await myBucket.fastPut({
path,
contents: 'I\'m in the trash test content!',
});
const fileMetadata = await (await file.getMetaData()).metadataFile.getContents();
console.log(fileMetadata.toString());
expect(await file.getMetaData().then((meta) => meta.metadataFile.getJsonData())).toEqual({});
await file.delete({ mode: 'permanent' });
expect((await (await myBucket.getBaseDirectory()).listFiles()).length).toEqual(0);
expect((await (await myBucket.getBaseDirectory()).listDirectories()).length).toEqual(0);
});
tap.test('should put a file into the trash', async () => {
const path = 'trashtest/trashme.txt';
const file = await myBucket.fastPut({
path,
contents: 'I\'m in the trash test content!',
});
const fileMetadata = await (await file.getMetaData()).metadataFile.getContents();
console.log(fileMetadata.toString());
expect(await file.getMetaData().then((meta) => meta.metadataFile.getJsonData())).toEqual({});
await file.delete({ mode: 'trash' });
jestExpect(await file.getMetaData().then((meta) => meta.metadataFile.getJsonData())).toEqual({
custom_recycle: {
deletedAt: jestExpect.any(Number),
originalPath: "trashtest/trashme.txt",
},
});
});
export default tap.start();

View File

@ -11,24 +11,28 @@ let baseDirectory: smartbucket.Directory;
tap.test('should create a valid smartbucket', async () => { tap.test('should create a valid smartbucket', async () => {
testSmartbucket = new smartbucket.SmartBucket({ testSmartbucket = new smartbucket.SmartBucket({
accessKey: await testQenv.getEnvVarOnDemand('S3_KEY'), accessKey: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSKEY'),
accessSecret: await testQenv.getEnvVarOnDemand('S3_SECRET'), accessSecret: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSSECRET'),
endpoint: 's3.eu-central-1.wasabisys.com', endpoint: await testQenv.getEnvVarOnDemandStrict('S3_ENDPOINT'),
}); });
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
myBucket = await testSmartbucket.getBucketByNameStrict(await testQenv.getEnvVarOnDemandStrict('S3_BUCKET'),);
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
expect(myBucket.name).toEqual('test-pushrocks-smartbucket');
});
tap.test('should clean all contents', async () => {
await myBucket.cleanAllContents();
expect(await myBucket.fastExists({ path: 'hithere/socool.txt' })).toBeFalse();
expect(await myBucket.fastExists({ path: 'trashtest/trashme.txt' })).toBeFalse();
}); });
tap.skip.test('should create testbucket', async () => { tap.skip.test('should create testbucket', async () => {
// await testSmartbucket.createBucket('testzone'); // await testSmartbucket.createBucket('testzone2');
}); });
tap.skip.test('should remove testbucket', async () => { tap.skip.test('should remove testbucket', async () => {
// await testSmartbucket.removeBucket('testzone'); // await testSmartbucket.removeBucket('testzone2');
});
tap.test('should get a bucket', async () => {
myBucket = await testSmartbucket.getBucketByName('testzone');
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
expect(myBucket.name).toEqual('testzone');
}); });
// Fast operations // Fast operations
@ -43,9 +47,12 @@ tap.test('should get data in bucket', async () => {
const fileString = await myBucket.fastGet({ const fileString = await myBucket.fastGet({
path: 'hithere/socool.txt', path: 'hithere/socool.txt',
}); });
const fileStringStream = await myBucket.fastGetStream({ const fileStringStream = await myBucket.fastGetStream(
path: 'hithere/socool.txt', {
}); path: 'hithere/socool.txt',
},
'nodestream'
);
console.log(fileString); console.log(fileString);
}); });
@ -79,7 +86,7 @@ tap.test('prepare for directory style tests', async () => {
contents: 'dir3/dir4/file1.txt content', contents: 'dir3/dir4/file1.txt content',
}); });
await myBucket.fastPut({ await myBucket.fastPut({
path: 'file1.txt', path: '/file1.txt',
contents: 'file1 content', contents: 'file1 content',
}); });
}); });
@ -99,8 +106,9 @@ tap.test('should get base directory', async () => {
tap.test('should correctly build paths for sub directories', async () => { tap.test('should correctly build paths for sub directories', async () => {
const dir4 = await baseDirectory.getSubDirectoryByName('dir3/dir4'); const dir4 = await baseDirectory.getSubDirectoryByName('dir3/dir4');
expect(dir4).toBeInstanceOf(smartbucket.Directory); expect(dir4).toBeInstanceOf(smartbucket.Directory);
const dir4BasePath = dir4.getBasePath(); const dir4BasePath = dir4?.getBasePath();
console.log(dir4BasePath); console.log(dir4BasePath);
expect(dir4BasePath).toEqual('dir3/dir4/');
}); });
tap.test('clean up directory style tests', async () => { tap.test('clean up directory style tests', async () => {

View File

@ -1,8 +1,8 @@
/** /**
* autocreated commitinfo by @pushrocks/commitinfo * autocreated commitinfo by @push.rocks/commitinfo
*/ */
export const commitinfo = { export const commitinfo = {
name: '@push.rocks/smartbucket', name: '@push.rocks/smartbucket',
version: '3.0.4', version: '3.2.1',
description: 'A TypeScript library that offers simple, cloud-independent object storage with features like bucket creation, file management, and directory management.' description: 'A TypeScript library offering simple and cloud-agnostic object storage with advanced features like bucket creation, file and directory management, and data streaming.'
} }

View File

@ -1,30 +1,43 @@
// classes.bucket.ts
import * as plugins from './plugins.js'; import * as plugins from './plugins.js';
import * as helpers from './helpers.js';
import * as interfaces from './interfaces.js';
import { SmartBucket } from './classes.smartbucket.js'; import { SmartBucket } from './classes.smartbucket.js';
import { Directory } from './classes.directory.js'; import { Directory } from './classes.directory.js';
import { File } from './classes.file.js';
import { Trash } from './classes.trash.js';
/**
* The bucket class exposes the basic functionality of a bucket.
* The functions of the bucket alone are enough to
* operate in S3 basic fashion on blobs of data.
*/
export class Bucket { export class Bucket {
public static async getBucketByName(smartbucketRef: SmartBucket, bucketNameArg: string) { public static async getBucketByName(smartbucketRef: SmartBucket, bucketNameArg: string) {
const buckets = await smartbucketRef.minioClient.listBuckets(); const command = new plugins.s3.ListBucketsCommand({});
const foundBucket = buckets.find((bucket) => { const buckets = await smartbucketRef.s3Client.send(command);
return bucket.name === bucketNameArg; const foundBucket = buckets.Buckets.find((bucket) => bucket.Name === bucketNameArg);
});
if (foundBucket) { if (foundBucket) {
console.log(`bucket with name ${bucketNameArg} exists.`); console.log(`bucket with name ${bucketNameArg} exists.`);
console.log(`Taking this as base for new Bucket instance`); console.log(`Taking this as base for new Bucket instance`);
return new this(smartbucketRef, bucketNameArg); return new this(smartbucketRef, bucketNameArg);
} else { } else {
console.log(`did not find bucket by name: ${bucketNameArg}`);
return null; return null;
} }
} }
public static async createBucketByName(smartbucketRef: SmartBucket, bucketName: string) { public static async createBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
await smartbucketRef.minioClient.makeBucket(bucketName, 'ams3').catch((e) => console.log(e)); const command = new plugins.s3.CreateBucketCommand({ Bucket: bucketName });
await smartbucketRef.s3Client.send(command).catch((e) => console.log(e));
return new Bucket(smartbucketRef, bucketName); return new Bucket(smartbucketRef, bucketName);
} }
public static async removeBucketByName(smartbucketRef: SmartBucket, bucketName: string) { public static async removeBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
await smartbucketRef.minioClient.removeBucket(bucketName).catch((e) => console.log(e)); const command = new plugins.s3.DeleteBucketCommand({ Bucket: bucketName });
await smartbucketRef.s3Client.send(command).catch((e) => console.log(e));
} }
public smartbucketRef: SmartBucket; public smartbucketRef: SmartBucket;
@ -38,8 +51,29 @@ export class Bucket {
/** /**
* gets the base directory of the bucket * gets the base directory of the bucket
*/ */
public async getBaseDirectory() { public async getBaseDirectory(): Promise<Directory> {
return new Directory(this, null, ''); return new Directory(this, null!, '');
}
/**
* gets the trash directory
*/
public async getTrash(): Promise<Trash> {
const trash = new Trash(this);
return trash;
}
public async getDirectoryFromPath(
pathDescriptorArg: interfaces.IPathDecriptor
): Promise<Directory> {
if (!pathDescriptorArg.path && !pathDescriptorArg.directory) {
return this.getBaseDirectory();
}
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
const baseDirectory = await this.getBaseDirectory();
return await baseDirectory.getSubDirectoryByNameStrict(checkPath, {
getEmptyDirectory: true,
});
} }
// =============== // ===============
@ -49,46 +83,58 @@ export class Bucket {
/** /**
* store file * store file
*/ */
public async fastPut(optionsArg: { public async fastPut(
path: string; optionsArg: interfaces.IPathDecriptor & {
contents: string | Buffer; contents: string | Buffer;
overwrite?: boolean; overwrite?: boolean;
}): Promise<void> { }
): Promise<File> {
try { try {
// Check if the object already exists const reducedPath = await helpers.reducePathDescriptorToPath(optionsArg);
const exists = await this.fastExists({ path: optionsArg.path }); const exists = await this.fastExists({ path: reducedPath });
if (exists && !optionsArg.overwrite) { if (exists && !optionsArg.overwrite) {
console.error(`Object already exists at path '${optionsArg.path}' in bucket '${this.name}'.`); console.error(`Object already exists at path '${reducedPath}' in bucket '${this.name}'.`);
return; return;
} else if (exists && optionsArg.overwrite) { } else if (exists && optionsArg.overwrite) {
console.log(`Overwriting existing object at path '${optionsArg.path}' in bucket '${this.name}'.`); console.log(
`Overwriting existing object at path '${reducedPath}' in bucket '${this.name}'.`
);
} else { } else {
console.log(`Creating new object at path '${optionsArg.path}' in bucket '${this.name}'.`); console.log(`Creating new object at path '${reducedPath}' in bucket '${this.name}'.`);
} }
// Proceed with putting the object const command = new plugins.s3.PutObjectCommand({
const streamIntake = new plugins.smartstream.StreamIntake(); Bucket: this.name,
const putPromise = this.smartbucketRef.minioClient.putObject(this.name, optionsArg.path, streamIntake); Key: reducedPath,
streamIntake.pushData(optionsArg.contents); Body: optionsArg.contents,
streamIntake.signalEnd(); });
await putPromise; await this.smartbucketRef.s3Client.send(command);
console.log(`Object '${optionsArg.path}' has been successfully stored in bucket '${this.name}'.`); console.log(`Object '${reducedPath}' has been successfully stored in bucket '${this.name}'.`);
const parsedPath = plugins.path.parse(reducedPath);
return new File({
directoryRefArg: await this.getDirectoryFromPath({
path: parsedPath.dir,
}),
fileName: parsedPath.base,
});
} catch (error) { } catch (error) {
console.error(`Error storing object at path '${optionsArg.path}' in bucket '${this.name}':`, error); console.error(
`Error storing object at path '${optionsArg.path}' in bucket '${this.name}':`,
error
);
throw error; throw error;
} }
} }
/** /**
* get file * get file
*/ */
public async fastGet(optionsArg: Parameters<typeof this.fastGetStream>[0]): Promise<Buffer> { public async fastGet(optionsArg: { path: string }): Promise<Buffer> {
const done = plugins.smartpromise.defer(); const done = plugins.smartpromise.defer();
let completeFile: Buffer; let completeFile: Buffer;
const replaySubject = await this.fastGetStream(optionsArg); const replaySubject = await this.fastGetReplaySubject(optionsArg);
const subscription = replaySubject.subscribe({ const subscription = replaySubject.subscribe({
next: (chunk) => { next: (chunk) => {
if (completeFile) { if (completeFile) {
@ -109,93 +155,137 @@ export class Bucket {
return completeFile; return completeFile;
} }
public async fastGetStream(optionsArg: { /**
* good when time to first byte is important
* and multiple subscribers are expected
* @param optionsArg
* @returns
*/
public async fastGetReplaySubject(optionsArg: {
path: string; path: string;
}): Promise<plugins.smartrx.rxjs.ReplaySubject<Buffer>> { }): Promise<plugins.smartrx.rxjs.ReplaySubject<Buffer>> {
const fileStream = await this.smartbucketRef.minioClient const command = new plugins.s3.GetObjectCommand({
.getObject(this.name, optionsArg.path) Bucket: this.name,
.catch((e) => console.log(e)); Key: optionsArg.path,
const replaySubject = new plugins.smartrx.rxjs.ReplaySubject<Buffer>();
const duplexStream = new plugins.smartstream.SmartDuplex<Buffer, void>({
writeFunction: async (chunk) => {
replaySubject.next(chunk);
return;
},
finalFunction: async (cb) => {
replaySubject.complete();
return;
}
}); });
const response = await this.smartbucketRef.s3Client.send(command);
const replaySubject = new plugins.smartrx.rxjs.ReplaySubject<Buffer>();
if (!fileStream) { // Convert the stream to a format that supports piping
return null; const stream = response.Body as any; // SdkStreamMixin includes readable stream
if (typeof stream.pipe === 'function') {
const duplexStream = new plugins.smartstream.SmartDuplex<Buffer, void>({
writeFunction: async (chunk) => {
replaySubject.next(chunk);
return;
},
finalFunction: async (cb) => {
replaySubject.complete();
return;
},
});
stream.pipe(duplexStream);
} }
const smartstream = new plugins.smartstream.StreamWrapper([
fileStream,
duplexStream,
]);
smartstream.run();
return replaySubject; return replaySubject;
} }
public fastGetStream(
optionsArg: {
path: string;
},
typeArg: 'webstream'
): Promise<ReadableStream>;
public async fastGetStream(
optionsArg: {
path: string;
},
typeArg: 'nodestream'
): Promise<plugins.stream.Readable>;
public async fastGetStream(
optionsArg: { path: string },
typeArg: 'webstream' | 'nodestream' = 'nodestream'
): Promise<ReadableStream | plugins.stream.Readable> {
const command = new plugins.s3.GetObjectCommand({
Bucket: this.name,
Key: optionsArg.path,
});
const response = await this.smartbucketRef.s3Client.send(command);
const stream = response.Body as any; // SdkStreamMixin includes readable stream
const duplexStream = new plugins.smartstream.SmartDuplex<Buffer, Buffer>({
writeFunction: async (chunk) => {
return chunk;
},
finalFunction: async (cb) => {
return null;
},
});
if (typeof stream.pipe === 'function') {
stream.pipe(duplexStream);
}
if (typeArg === 'nodestream') {
return duplexStream;
}
if (typeArg === 'webstream') {
return (await duplexStream.getWebStreams()).readable;
}
throw new Error('unknown typeArg');
}
/** /**
* store file as stream * store file as stream
*/ */
public async fastPutStream(optionsArg: { public async fastPutStream(optionsArg: {
path: string; path: string;
dataStream: plugins.stream.Readable; readableStream: plugins.stream.Readable | ReadableStream;
nativeMetadata?: { [key: string]: string }; nativeMetadata?: { [key: string]: string };
overwrite?: boolean; overwrite?: boolean;
}): Promise<void> { }): Promise<void> {
try { try {
// Check if the object already exists
const exists = await this.fastExists({ path: optionsArg.path }); const exists = await this.fastExists({ path: optionsArg.path });
if (exists && !optionsArg.overwrite) { if (exists && !optionsArg.overwrite) {
console.error(`Object already exists at path '${optionsArg.path}' in bucket '${this.name}'.`); console.error(
`Object already exists at path '${optionsArg.path}' in bucket '${this.name}'.`
);
return; return;
} else if (exists && optionsArg.overwrite) { } else if (exists && optionsArg.overwrite) {
console.log(`Overwriting existing object at path '${optionsArg.path}' in bucket '${this.name}'.`); console.log(
`Overwriting existing object at path '${optionsArg.path}' in bucket '${this.name}'.`
);
} else { } else {
console.log(`Creating new object at path '${optionsArg.path}' in bucket '${this.name}'.`); console.log(`Creating new object at path '${optionsArg.path}' in bucket '${this.name}'.`);
} }
// Proceed with putting the object const command = new plugins.s3.PutObjectCommand({
await this.smartbucketRef.minioClient.putObject( Bucket: this.name,
this.name, Key: optionsArg.path,
optionsArg.path, Body: optionsArg.readableStream,
optionsArg.dataStream, Metadata: optionsArg.nativeMetadata,
null, });
...(optionsArg.nativeMetadata await this.smartbucketRef.s3Client.send(command);
? (() => {
const returnObject: any = {}; console.log(
return returnObject; `Object '${optionsArg.path}' has been successfully stored in bucket '${this.name}'.`
})()
: {})
); );
console.log(`Object '${optionsArg.path}' has been successfully stored in bucket '${this.name}'.`);
} catch (error) { } catch (error) {
console.error(`Error storing object at path '${optionsArg.path}' in bucket '${this.name}':`, error); console.error(
`Error storing object at path '${optionsArg.path}' in bucket '${this.name}':`,
error
);
throw error; throw error;
} }
} }
public async copyObject(optionsArg: { public async fastCopy(optionsArg: {
/** sourcePath: string;
* the destinationPath?: string;
*/
objectKey: string;
/**
* in case you want to copy to another bucket specify it here
*/
targetBucket?: Bucket; targetBucket?: Bucket;
targetBucketKey?: string;
/**
* metadata will be merged with existing metadata
*/
nativeMetadata?: { [key: string]: string }; nativeMetadata?: { [key: string]: string };
deleteExistingNativeMetadata?: boolean; deleteExistingNativeMetadata?: boolean;
}): Promise<void> { }): Promise<void> {
@ -203,28 +293,29 @@ export class Bucket {
const targetBucketName = optionsArg.targetBucket ? optionsArg.targetBucket.name : this.name; const targetBucketName = optionsArg.targetBucket ? optionsArg.targetBucket.name : this.name;
// Retrieve current object information to use in copy conditions // Retrieve current object information to use in copy conditions
const currentObjInfo = await this.smartbucketRef.minioClient.statObject( const currentObjInfo = await this.smartbucketRef.s3Client.send(
targetBucketName, new plugins.s3.HeadObjectCommand({
optionsArg.objectKey Bucket: this.name,
Key: optionsArg.sourcePath,
})
); );
// Setting up copy conditions
const copyConditions = new plugins.minio.CopyConditions();
// Prepare new metadata // Prepare new metadata
const newNativeMetadata = { const newNativeMetadata = {
...(optionsArg.deleteExistingNativeMetadata ? {} : currentObjInfo.metaData), ...(optionsArg.deleteExistingNativeMetadata ? {} : currentObjInfo.Metadata),
...optionsArg.nativeMetadata, ...optionsArg.nativeMetadata,
}; };
// Define the copy operation as a Promise // Define the copy operation
// TODO: check on issue here: https://github.com/minio/minio-js/issues/1286 const copySource = `${this.name}/${optionsArg.sourcePath}`;
await this.smartbucketRef.minioClient.copyObject( const command = new plugins.s3.CopyObjectCommand({
this.name, Bucket: targetBucketName,
optionsArg.objectKey, CopySource: copySource,
`/${targetBucketName}/${optionsArg.objectKey}`, Key: optionsArg.destinationPath || optionsArg.sourcePath,
copyConditions Metadata: newNativeMetadata,
); MetadataDirective: optionsArg.deleteExistingNativeMetadata ? 'REPLACE' : 'COPY',
});
await this.smartbucketRef.s3Client.send(command);
} catch (err) { } catch (err) {
console.error('Error updating metadata:', err); console.error('Error updating metadata:', err);
throw err; // rethrow to allow caller to handle throw err; // rethrow to allow caller to handle
@ -232,28 +323,77 @@ export class Bucket {
} }
/** /**
* removeObject * Move object from one path to another within the same bucket or to another bucket
*/ */
public async fastRemove(optionsArg: { public async fastMove(optionsArg: {
path: string; sourcePath: string;
}) { destinationPath: string;
await this.smartbucketRef.minioClient.removeObject(this.name, optionsArg.path); targetBucket?: Bucket;
overwrite?: boolean;
}): Promise<void> {
try {
const destinationBucket = optionsArg.targetBucket || this;
const exists = await destinationBucket.fastExists({
path: optionsArg.destinationPath,
});
if (exists && !optionsArg.overwrite) {
console.error(
`Object already exists at destination path '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`
);
return;
} else if (exists && optionsArg.overwrite) {
console.log(
`Overwriting existing object at destination path '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`
);
} else {
console.log(
`Moving object to path '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`
);
}
await this.fastCopy(optionsArg);
await this.fastRemove({ path: optionsArg.sourcePath });
console.log(
`Object '${optionsArg.sourcePath}' has been successfully moved to '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`
);
} catch (error) {
console.error(
`Error moving object from '${optionsArg.sourcePath}' to '${optionsArg.destinationPath}':`,
error
);
throw error;
}
} }
/** /**
* check wether file exists * removeObject
* @param optionsArg
* @returns
*/ */
public async fastExists(optionsArg: { public async fastRemove(optionsArg: { path: string }) {
path: string; const command = new plugins.s3.DeleteObjectCommand({
}): Promise<boolean> { Bucket: this.name,
Key: optionsArg.path,
});
await this.smartbucketRef.s3Client.send(command);
}
/**
* check whether file exists
* @param optionsArg
* @returns
*/
public async fastExists(optionsArg: { path: string }): Promise<boolean> {
try { try {
await this.smartbucketRef.minioClient.statObject(this.name, optionsArg.path); const command = new plugins.s3.HeadObjectCommand({
Bucket: this.name,
Key: optionsArg.path,
});
await this.smartbucketRef.s3Client.send(command);
console.log(`Object '${optionsArg.path}' exists in bucket '${this.name}'.`); console.log(`Object '${optionsArg.path}' exists in bucket '${this.name}'.`);
return true; return true;
} catch (error) { } catch (error) {
if (error.code === 'NotFound') { if (error.name === 'NotFound') {
console.log(`Object '${optionsArg.path}' does not exist in bucket '${this.name}'.`); console.log(`Object '${optionsArg.path}' does not exist in bucket '${this.name}'.`);
return false; return false;
} else { } else {
@ -263,9 +403,115 @@ export class Bucket {
} }
} }
public async fastStat(optionsArg: { /**
path: string; * deletes this bucket
}) { */
return this.smartbucketRef.minioClient.statObject(this.name, optionsArg.path); public async delete() {
await this.smartbucketRef.s3Client.send(
new plugins.s3.DeleteBucketCommand({ Bucket: this.name })
);
}
public async fastStat(pathDescriptor: interfaces.IPathDecriptor) {
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
const command = new plugins.s3.HeadObjectCommand({
Bucket: this.name,
Key: checkPath,
});
return this.smartbucketRef.s3Client.send(command);
}
public async isDirectory(pathDescriptor: interfaces.IPathDecriptor): Promise<boolean> {
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
const command = new plugins.s3.ListObjectsV2Command({
Bucket: this.name,
Prefix: checkPath,
Delimiter: '/',
});
const { CommonPrefixes } = await this.smartbucketRef.s3Client.send(command);
return !!CommonPrefixes && CommonPrefixes.length > 0;
}
public async isFile(pathDescriptor: interfaces.IPathDecriptor): Promise<boolean> {
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
const command = new plugins.s3.ListObjectsV2Command({
Bucket: this.name,
Prefix: checkPath,
Delimiter: '/',
});
const { Contents } = await this.smartbucketRef.s3Client.send(command);
return !!Contents && Contents.length > 0;
}
public async getMagicBytes(optionsArg: { path: string; length: number }): Promise<Buffer> {
try {
const command = new plugins.s3.GetObjectCommand({
Bucket: this.name,
Key: optionsArg.path,
Range: `bytes=0-${optionsArg.length - 1}`,
});
const response = await this.smartbucketRef.s3Client.send(command);
const chunks = [];
const stream = response.Body as any; // SdkStreamMixin includes readable stream
for await (const chunk of stream) {
chunks.push(chunk);
}
return Buffer.concat(chunks);
} catch (error) {
console.error(
`Error retrieving magic bytes from object at path '${optionsArg.path}' in bucket '${this.name}':`,
error
);
throw error;
}
}
public async cleanAllContents(): Promise<void> {
try {
// Define the command type explicitly
const listCommandInput: plugins.s3.ListObjectsV2CommandInput = {
Bucket: this.name,
};
let isTruncated = true;
let continuationToken: string | undefined = undefined;
while (isTruncated) {
// Add the continuation token to the input if present
const listCommand = new plugins.s3.ListObjectsV2Command({
...listCommandInput,
ContinuationToken: continuationToken,
});
// Explicitly type the response
const response: plugins.s3.ListObjectsV2Output =
await this.smartbucketRef.s3Client.send(listCommand);
console.log(`Cleaning contents of bucket '${this.name}': Now deleting ${response.Contents?.length} items...`);
if (response.Contents && response.Contents.length > 0) {
// Delete objects in batches, mapping each item to { Key: string }
const deleteCommand = new plugins.s3.DeleteObjectsCommand({
Bucket: this.name,
Delete: {
Objects: response.Contents.map((item) => ({ Key: item.Key! })),
Quiet: true,
},
});
await this.smartbucketRef.s3Client.send(deleteCommand);
}
// Update continuation token and truncation status
isTruncated = response.IsTruncated || false;
continuationToken = response.NextContinuationToken;
}
console.log(`All contents in bucket '${this.name}' have been deleted.`);
} catch (error) {
console.error(`Error cleaning contents of bucket '${this.name}':`, error);
throw error;
}
} }
} }

View File

@ -1,6 +1,9 @@
// classes.directory.ts
import * as plugins from './plugins.js'; import * as plugins from './plugins.js';
import { Bucket } from './classes.bucket.js'; import { Bucket } from './classes.bucket.js';
import { File } from './classes.file.js'; import { File } from './classes.file.js';
import * as helpers from './helpers.js';
export class Directory { export class Directory {
public bucketRef: Bucket; public bucketRef: Bucket;
@ -11,9 +14,9 @@ export class Directory {
public files: string[]; public files: string[];
public folders: string[]; public folders: string[];
constructor(bucketRefArg: Bucket, parentDiretory: Directory, name: string) { constructor(bucketRefArg: Bucket, parentDirectory: Directory, name: string) {
this.bucketRef = bucketRefArg; this.bucketRef = bucketRefArg;
this.parentDirectoryRef = parentDiretory; this.parentDirectoryRef = parentDirectory;
this.name = name; this.name = name;
} }
@ -63,52 +66,66 @@ export class Directory {
* gets a file by name * gets a file by name
*/ */
public async getFile(optionsArg: { public async getFile(optionsArg: {
name: string; path: string;
createWithContents?: string | Buffer; createWithContents?: string | Buffer;
}): Promise<File> { getFromTrash?: boolean;
// check wether the file exists }): Promise<File | null> {
const pathDescriptor = {
directory: this,
path: optionsArg.path,
};
const exists = await this.bucketRef.fastExists({ const exists = await this.bucketRef.fastExists({
path: this.getBasePath() + optionsArg.name, path: await helpers.reducePathDescriptorToPath(pathDescriptor),
}); });
if (!exists && optionsArg.getFromTrash) {
const trash = await this.bucketRef.getTrash();
const trashedFile = await trash.getTrashedFileByOriginalName(pathDescriptor);
return trashedFile;
}
if (!exists && !optionsArg.createWithContents) { if (!exists && !optionsArg.createWithContents) {
return null; return null;
} }
if (!exists && optionsArg.createWithContents) { if (!exists && optionsArg.createWithContents) {
await this.fastPut({ await File.create({
path: optionsArg.name, directory: this,
name: optionsArg.path,
contents: optionsArg.createWithContents, contents: optionsArg.createWithContents,
}); });
} }
return new File({ return new File({
directoryRefArg: this, directoryRefArg: this,
fileName: optionsArg.name, fileName: optionsArg.path,
}) });
}
/**
* gets a file strictly
* @param args
* @returns
*/
public async getFileStrict(...args: Parameters<Directory['getFile']>) {
const file = await this.getFile(...args);
if (!file) {
throw new Error(`File not found at path '${args[0].path}'`);
}
return file;
} }
/** /**
* lists all files * lists all files
*/ */
public async listFiles(): Promise<File[]> { public async listFiles(): Promise<File[]> {
const done = plugins.smartpromise.defer(); const command = new plugins.s3.ListObjectsV2Command({
const fileNameStream = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2( Bucket: this.bucketRef.name,
this.bucketRef.name, Prefix: this.getBasePath(),
this.getBasePath(), Delimiter: '/',
false });
); const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
const fileArray: File[] = []; const fileArray: File[] = [];
const duplexStream = new plugins.smartstream.SmartDuplex<plugins.minio.BucketItem, void>({
objectMode: true, response.Contents?.forEach((item) => {
writeFunction: async (bucketItem) => { if (item.Key && !item.Key.endsWith('/')) {
if (bucketItem.prefix) { const subtractedPath = item.Key.replace(this.getBasePath(), '');
return;
}
if (!bucketItem.name) {
return;
}
let subtractedPath = bucketItem.name.replace(this.getBasePath(), '');
if (subtractedPath.startsWith('/')) {
subtractedPath = subtractedPath.substr(1);
}
if (!subtractedPath.includes('/')) { if (!subtractedPath.includes('/')) {
fileArray.push( fileArray.push(
new File({ new File({
@ -117,13 +134,9 @@ export class Directory {
}) })
); );
} }
},
finalFunction: async (tools) => {
done.resolve();
} }
}); });
fileNameStream.pipe(duplexStream);
await done.promise;
return fileArray; return fileArray;
} }
@ -131,70 +144,100 @@ export class Directory {
* lists all folders * lists all folders
*/ */
public async listDirectories(): Promise<Directory[]> { public async listDirectories(): Promise<Directory[]> {
const done = plugins.smartpromise.defer(); try {
const basePath = this.getBasePath(); const command = new plugins.s3.ListObjectsV2Command({
const completeDirStream = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2( Bucket: this.bucketRef.name,
this.bucketRef.name, Prefix: this.getBasePath(),
this.getBasePath(), Delimiter: '/',
false });
); const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
const directoryArray: Directory[] = []; const directoryArray: Directory[] = [];
const duplexStream = new plugins.smartstream.SmartDuplex<plugins.minio.BucketItem, void>({
objectMode: true, if (response.CommonPrefixes) {
writeFunction: async (bucketItem) => { response.CommonPrefixes.forEach((item) => {
if (bucketItem.name) { if (item.Prefix) {
return; const subtractedPath = item.Prefix.replace(this.getBasePath(), '');
} if (subtractedPath.endsWith('/')) {
let subtractedPath = bucketItem.prefix.replace(this.getBasePath(), ''); const dirName = subtractedPath.slice(0, -1);
if (subtractedPath.startsWith('/')) { // Ensure the directory name is not empty (which would indicate the base directory itself)
subtractedPath = subtractedPath.substr(1); if (dirName) {
} directoryArray.push(new Directory(this.bucketRef, this, dirName));
if (subtractedPath.includes('/')) { }
const dirName = subtractedPath.split('/')[0]; }
if (directoryArray.find((directory) => directory.name === dirName)) {
return;
} }
directoryArray.push(new Directory(this.bucketRef, this, dirName)); });
}
},
finalFunction: async (tools) => {
done.resolve();
} }
});
completeDirStream.pipe(duplexStream); return directoryArray;
await done.promise; } catch (error) {
return directoryArray; console.error('Error listing directories:', error);
throw error;
}
} }
/** /**
* gets an array that has all objects with a certain prefix; * gets an array that has all objects with a certain prefix
*/ */
public async getTreeArray() { public async getTreeArray() {
const treeArray = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2( const command = new plugins.s3.ListObjectsV2Command({
this.bucketRef.name, Bucket: this.bucketRef.name,
this.getBasePath(), Prefix: this.getBasePath(),
true Delimiter: '/',
); });
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
return response.Contents;
} }
/** /**
* gets a sub directory * gets a sub directory by name
*/ */
public async getSubDirectoryByName(dirNameArg: string): Promise<Directory> { public async getSubDirectoryByName(dirNameArg: string, optionsArg: {
const dirNameArray = dirNameArg.split('/'); getEmptyDirectory?: boolean;
createWithInitializerFile?: boolean;
} = {}): Promise<Directory | null> {
const dirNameArray = dirNameArg.split('/').filter(str => str.trim() !== "");
const getDirectory = async (directoryArg: Directory, dirNameToSearch: string) => { optionsArg = {
getEmptyDirectory: false,
createWithInitializerFile: false,
...optionsArg,
}
const getDirectory = async (directoryArg: Directory, dirNameToSearch: string, isFinalDirectory: boolean) => {
const directories = await directoryArg.listDirectories(); const directories = await directoryArg.listDirectories();
return directories.find((directory) => { let returnDirectory = directories.find((directory) => {
return directory.name === dirNameToSearch; return directory.name === dirNameToSearch;
}); });
if (returnDirectory) {
return returnDirectory;
}
if (optionsArg.getEmptyDirectory || optionsArg.createWithInitializerFile) {
returnDirectory = new Directory(this.bucketRef, this, dirNameToSearch);
}
if (isFinalDirectory && optionsArg.createWithInitializerFile) {
returnDirectory?.createEmptyFile('00init.txt');
}
return returnDirectory || null;
}; };
let wantedDirectory: Directory;
let wantedDirectory: Directory | null = null;
let counter = 0;
for (const dirNameToSearch of dirNameArray) { for (const dirNameToSearch of dirNameArray) {
counter++;
const directoryToSearchIn = wantedDirectory ? wantedDirectory : this; const directoryToSearchIn = wantedDirectory ? wantedDirectory : this;
wantedDirectory = await getDirectory(directoryToSearchIn, dirNameToSearch); wantedDirectory = await getDirectory(directoryToSearchIn, dirNameToSearch, counter === dirNameArray.length);
} }
return wantedDirectory;
return wantedDirectory || null;
}
public async getSubDirectoryByNameStrict(...args: Parameters<Directory['getSubDirectoryByName']>) {
const directory = await this.getSubDirectoryByName(...args);
if (!directory) {
throw new Error(`Directory not found at path '${args[0]}'`);
}
return directory;
} }
/** /**
@ -202,19 +245,20 @@ export class Directory {
*/ */
public async move() { public async move() {
// TODO // TODO
throw new Error('moving a directory is not yet implemented'); throw new Error('Moving a directory is not yet implemented');
} }
/** /**
* creates a file within this directory * creates an empty file within this directory
* @param relativePathArg * @param relativePathArg
*/ */
public async createEmptyFile(relativePathArg: string) { public async createEmptyFile(relativePathArg: string) {
const emtpyFile = await File.create({ const emptyFile = await File.create({
directory: this, directory: this,
name: relativePathArg, name: relativePathArg,
contents: '', contents: '',
}); });
return emptyFile;
} }
// file operations // file operations
@ -234,29 +278,82 @@ export class Directory {
return result; return result;
} }
public async fastGetStream(pathArg: string): Promise<plugins.smartrx.rxjs.ReplaySubject<Buffer>> { public fastGetStream(
const path = plugins.path.join(this.getBasePath(), pathArg); optionsArg: {
const result = await this.bucketRef.fastGetStream({ path: string;
path, },
}); typeArg: 'webstream'
): Promise<ReadableStream>;
public async fastGetStream(
optionsArg: {
path: string;
},
typeArg: 'nodestream'
): Promise<plugins.stream.Readable>;
/**
* fastGetStream
* @param optionsArg
* @returns
*/
public async fastGetStream(
optionsArg: { path: string },
typeArg: 'webstream' | 'nodestream'
): Promise<ReadableStream | plugins.stream.Readable> {
const path = plugins.path.join(this.getBasePath(), optionsArg.path);
const result = await this.bucketRef.fastGetStream(
{
path,
},
typeArg as any
);
return result; return result;
} }
public async fastRemove(optionsArg: { path: string }) { /**
* fast put stream
*/
public async fastPutStream(optionsArg: {
path: string;
stream: plugins.stream.Readable;
}): Promise<void> {
const path = plugins.path.join(this.getBasePath(), optionsArg.path); const path = plugins.path.join(this.getBasePath(), optionsArg.path);
await this.bucketRef.fastRemove({ await this.bucketRef.fastPutStream({
path, path,
readableStream: optionsArg.stream,
});
}
/**
* removes a file within the directory
* uses file class to make sure effects for metadata etc. are handled correctly
* @param optionsArg
*/
public async fastRemove(optionsArg: {
path: string
/**
* wether the file should be placed into trash. Default is false.
*/
mode?: 'permanent' | 'trash';
}) {
const file = await this.getFile({
path: optionsArg.path,
});
await file.delete({
mode: optionsArg.mode ? optionsArg.mode : 'permanent',
}); });
} }
/** /**
* deletes the directory with all its contents * deletes the directory with all its contents
*/ */
public async delete() { public async delete(optionsArg: {
mode?: 'permanent' | 'trash';
}) {
const deleteDirectory = async (directoryArg: Directory) => { const deleteDirectory = async (directoryArg: Directory) => {
const childDirectories = await directoryArg.listDirectories(); const childDirectories = await directoryArg.listDirectories();
if (childDirectories.length === 0) { if (childDirectories.length === 0) {
console.log('directory empty! Path complete!'); console.log('Directory empty! Path complete!');
} else { } else {
for (const childDir of childDirectories) { for (const childDir of childDirectories) {
await deleteDirectory(childDir); await deleteDirectory(childDir);
@ -264,9 +361,9 @@ export class Directory {
} }
const files = await directoryArg.listFiles(); const files = await directoryArg.listFiles();
for (const file of files) { for (const file of files) {
await directoryArg.fastRemove({ await file.delete({
path: file.name, mode: optionsArg.mode ? optionsArg.mode : 'permanent',
}); })
} }
}; };
await deleteDirectory(this); await deleteDirectory(this);

View File

@ -1,8 +1,9 @@
import * as plugins from './plugins.js'; import * as plugins from './plugins.js';
import * as helpers from './helpers.js';
import * as interfaces from './interfaces.js';
import { Directory } from './classes.directory.js'; import { Directory } from './classes.directory.js';
import { MetaData } from './classes.metadata.js'; import { MetaData } from './classes.metadata.js';
/** /**
* represents a file in a directory * represents a file in a directory
*/ */
@ -31,7 +32,12 @@ export class File {
directoryRefArg: optionsArg.directory, directoryRefArg: optionsArg.directory,
fileName: optionsArg.name, fileName: optionsArg.name,
}); });
if (contents instanceof plugins.stream.Readable) {} else { if (contents instanceof plugins.stream.Readable) {
await optionsArg.directory.fastPutStream({
path: optionsArg.name,
stream: contents,
});
} else {
await optionsArg.directory.fastPut({ await optionsArg.directory.fastPut({
path: optionsArg.name, path: optionsArg.name,
contents: contents, contents: contents,
@ -44,9 +50,13 @@ export class File {
public parentDirectoryRef: Directory; public parentDirectoryRef: Directory;
public name: string; public name: string;
/**
* get the full path to the file
* @returns the full path to the file
*/
public getBasePath(): string { public getBasePath(): string {
return plugins.path.join(this.parentDirectoryRef.getBasePath(), this.name); return plugins.path.join(this.parentDirectoryRef.getBasePath(), this.name);
}; }
constructor(optionsArg: { directoryRefArg: Directory; fileName: string }) { constructor(optionsArg: { directoryRefArg: Directory; fileName: string }) {
this.parentDirectoryRef = optionsArg.directoryRefArg; this.parentDirectoryRef = optionsArg.directoryRefArg;
@ -65,33 +75,59 @@ export class File {
return resultBuffer; return resultBuffer;
} }
public async getReadStream() { public async getReadStream(typeArg: 'webstream'): Promise<ReadableStream>;
const readStream = this.parentDirectoryRef.bucketRef.fastGetStream({ public async getReadStream(typeArg: 'nodestream'): Promise<plugins.stream.Readable>;
path: this.getBasePath(), public async getReadStream(
}); typeArg: 'nodestream' | 'webstream'
): Promise<ReadableStream | plugins.stream.Readable> {
const readStream = this.parentDirectoryRef.bucketRef.fastGetStream(
{
path: this.getBasePath(),
},
typeArg as any
);
return readStream;
} }
/** /**
* removes this file * deletes this file
* for using recycling mechanics use .delete()
*/ */
public async remove() { public async delete(optionsArg?: { mode: 'trash' | 'permanent' }) {
await this.parentDirectoryRef.bucketRef.fastRemove({ optionsArg = {
path: this.getBasePath(), ...{
}); mode: 'permanent',
if (!this.name.endsWith('.metadata')) { },
...optionsArg,
};
if (optionsArg.mode === 'permanent') {
await this.parentDirectoryRef.bucketRef.fastRemove({ await this.parentDirectoryRef.bucketRef.fastRemove({
path: this.getBasePath() + '.metadata', path: this.getBasePath(),
});
if (!this.name.endsWith('.metadata')) {
if (await this.hasMetaData()) {
const metadata = await this.getMetaData();
await metadata.metadataFile.delete(optionsArg);
}
}
} else if (optionsArg.mode === 'trash') {
const metadata = await this.getMetaData();
await metadata.storeCustomMetaData({
key: 'recycle',
value: {
deletedAt: Date.now(),
originalPath: this.getBasePath(),
},
});
const trash = await this.parentDirectoryRef.bucketRef.getTrash();
const trashDir = await trash.getTrashDir();
await this.move({
directory: trashDir,
path: await trash.getTrashKeyByOriginalBasePath(this.getBasePath()),
}); });
} }
await this.parentDirectoryRef.listFiles();
}
/** await this.parentDirectoryRef.listFiles();
* deletes the file with recycling mechanics
*/
public async delete() {
await this.remove();
} }
/** /**
@ -102,7 +138,7 @@ export class File {
const metadata = await this.getMetaData(); const metadata = await this.getMetaData();
await metadata.setLock({ await metadata.setLock({
lock: 'locked', lock: 'locked',
expires: new Date(Date.now() + (optionsArg?.timeoutMillis || 1000)), expires: Date.now() + (optionsArg?.timeoutMillis || 1000),
}); });
} }
@ -116,39 +152,122 @@ export class File {
*/ */
force?: boolean; force?: boolean;
}) { }) {
const metadata = await this.getMetaData();
await metadata.removeLock({
force: optionsArg?.force,
});
} }
public async updateWithContents(optionsArg: { public async updateWithContents(optionsArg: {
contents: Buffer | string | plugins.stream.Readable; contents: Buffer | string | plugins.stream.Readable | ReadableStream;
encoding?: 'utf8' | 'binary'; encoding?: 'utf8' | 'binary';
}) { }) {
if (optionsArg.contents instanceof plugins.stream.Readable) { if (
optionsArg.contents instanceof plugins.stream.Readable ||
optionsArg.contents instanceof ReadableStream
) {
await this.parentDirectoryRef.bucketRef.fastPutStream({ await this.parentDirectoryRef.bucketRef.fastPutStream({
path: this.getBasePath(), path: this.getBasePath(),
dataStream: optionsArg.contents, readableStream: optionsArg.contents,
overwrite: true,
}); });
} else if (Buffer.isBuffer(optionsArg.contents)) { } else if (Buffer.isBuffer(optionsArg.contents)) {
await this.parentDirectoryRef.bucketRef.fastPut({ await this.parentDirectoryRef.bucketRef.fastPut({
path: this.getBasePath(), path: this.getBasePath(),
contents: optionsArg.contents, contents: optionsArg.contents,
overwrite: true,
}); });
} else if (typeof optionsArg.contents === 'string') { } else if (typeof optionsArg.contents === 'string') {
await this.parentDirectoryRef.bucketRef.fastPut({ await this.parentDirectoryRef.bucketRef.fastPut({
path: this.getBasePath(), path: this.getBasePath(),
contents: Buffer.from(optionsArg.contents, optionsArg.encoding), contents: Buffer.from(optionsArg.contents, optionsArg.encoding),
overwrite: true,
}); });
} }
} }
/**
* moves the file to another directory
*/
public async move(pathDescriptorArg: interfaces.IPathDecriptor) {
let moveToPath: string = '';
const isDirectory = await this.parentDirectoryRef.bucketRef.isDirectory(pathDescriptorArg);
if (isDirectory) {
moveToPath = await helpers.reducePathDescriptorToPath({
...pathDescriptorArg,
path: plugins.path.join(pathDescriptorArg.path!, this.name),
});
} else {
moveToPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
}
// lets move the file
await this.parentDirectoryRef.bucketRef.fastMove({
sourcePath: this.getBasePath(),
destinationPath: moveToPath,
overwrite: true,
});
// lets move the metadatafile
if (!this.name.endsWith('.metadata')) {
const metadata = await this.getMetaData();
await this.parentDirectoryRef.bucketRef.fastMove({
sourcePath: metadata.metadataFile.getBasePath(),
destinationPath: moveToPath + '.metadata',
overwrite: true,
});
}
// lets update references of this
const baseDirectory = await this.parentDirectoryRef.bucketRef.getBaseDirectory();
this.parentDirectoryRef = await baseDirectory.getSubDirectoryByNameStrict(
pathDescriptorArg.directory?.getBasePath()!
);
this.name = pathDescriptorArg.path!;
}
public async hasMetaData(): Promise<boolean> {
if (!this.name.endsWith('.metadata')) {
const hasMetadataBool = MetaData.hasMetaData({
file: this,
});
return hasMetadataBool;
} else {
return false;
}
}
/** /**
* allows updating the metadata of a file * allows updating the metadata of a file
* @param updatedMetadata * @param updatedMetadata
*/ */
public async getMetaData() { public async getMetaData() {
if (this.name.endsWith('.metadata')) {
throw new Error('metadata files cannot have metadata');
}
const metadata = await MetaData.createForFile({ const metadata = await MetaData.createForFile({
file: this, file: this,
}); });
return metadata; return metadata;
} }
/**
* gets the contents as json
*/
public async getJsonData() {
const json = await this.getContentsAsString();
const parsed = await JSON.parse(json);
return parsed;
}
public async writeJsonData(dataArg: any) {
await this.updateWithContents({
contents: JSON.stringify(dataArg),
});
}
public async getMagicBytes(optionsArg: { length: number }): Promise<Buffer> {
return this.parentDirectoryRef.bucketRef.getMagicBytes({
path: this.getBasePath(),
length: optionsArg.length,
});
}
} }

View File

@ -3,16 +3,22 @@ import * as plugins from './plugins.js';
import { File } from './classes.file.js'; import { File } from './classes.file.js';
export class MetaData { export class MetaData {
public static async hasMetaData(optionsArg: { file: File }) {
// lets find the existing metadata file
const existingFile = await optionsArg.file.parentDirectoryRef.getFile({
path: optionsArg.file.name + '.metadata',
});
return !!existingFile;
}
// static // static
public static async createForFile(optionsArg: { public static async createForFile(optionsArg: { file: File }) {
file: File;
}) {
const metaData = new MetaData(); const metaData = new MetaData();
metaData.fileRef = optionsArg.file; metaData.fileRef = optionsArg.file;
// lets find the existing metadata file // lets find the existing metadata file
metaData.metadataFile = await metaData.fileRef.parentDirectoryRef.getFile({ metaData.metadataFile = await metaData.fileRef.parentDirectoryRef.getFileStrict({
name: metaData.fileRef.name + '.metadata', path: metaData.fileRef.name + '.metadata',
createWithContents: '{}', createWithContents: '{}',
}); });
@ -23,22 +29,36 @@ export class MetaData {
/** /**
* the file that contains the metadata * the file that contains the metadata
*/ */
metadataFile: File; metadataFile!: File;
/** /**
* the file that the metadata is for * the file that the metadata is for
*/ */
fileRef: File; fileRef!: File;
public async getFileType(optionsArg?: { public async getFileType(optionsArg?: {
useFileExtension?: boolean; useFileExtension?: boolean;
useMagicBytes?: boolean; useMagicBytes?: boolean;
}): Promise<string> { }): Promise<plugins.smartmime.IFileTypeResult | undefined> {
if (optionsArg && optionsArg.useFileExtension || optionsArg.useFileExtension === undefined) { if ((optionsArg && optionsArg.useFileExtension) || !optionsArg) {
return plugins.path.extname(this.fileRef.name); const fileType = await plugins.smartmime.detectMimeType({
path: this.fileRef.name,
});
return fileType;
} }
}; if (optionsArg && optionsArg.useMagicBytes) {
const fileType = await plugins.smartmime.detectMimeType({
buffer: await this.fileRef.getMagicBytes({
length: 100,
})
});
return fileType;
}
throw new Error('optionsArg.useFileExtension and optionsArg.useMagicBytes cannot both be false');
}
/** /**
* gets the size of the fileRef * gets the size of the fileRef
*/ */
@ -46,60 +66,57 @@ export class MetaData {
const stat = await this.fileRef.parentDirectoryRef.bucketRef.fastStat({ const stat = await this.fileRef.parentDirectoryRef.bucketRef.fastStat({
path: this.fileRef.getBasePath(), path: this.fileRef.getBasePath(),
}); });
return stat.size; return stat.ContentLength!;
}; }
private prefixCustomMetaData = 'custom_'; private prefixCustomMetaData = 'custom_';
public async storeCustomMetaData<T = any>(optionsArg: { public async storeCustomMetaData<T = any>(optionsArg: { key: string; value: T }) {
key: string; const data = await this.metadataFile.getJsonData();
value: T; data[this.prefixCustomMetaData + optionsArg.key] = optionsArg.value;
}) { await this.metadataFile.writeJsonData(data);
const json = await this.metadataFile.getContentsAsString();
const parsed = await JSON.parse(json);
parsed[this.prefixCustomMetaData + optionsArg.key] = optionsArg.value;
await this.metadataFile.updateWithContents({
contents: JSON.stringify(parsed),
});
} }
public async getCustomMetaData<T = any>(optionsArg: { public async getCustomMetaData<T = any>(optionsArg: { key: string }): Promise<T> {
key: string; const data = await this.metadataFile.getJsonData();
}): Promise<T> { return data[this.prefixCustomMetaData + optionsArg.key];
const json = await this.metadataFile.getContentsAsString();
const parsed = await JSON.parse(json);
return parsed[this.prefixCustomMetaData + optionsArg.key];
} }
public async deleteCustomMetaData(optionsArg: { public async deleteCustomMetaData(optionsArg: { key: string }) {
key: string; const data = await this.metadataFile.getJsonData();
}) { delete data[this.prefixCustomMetaData + optionsArg.key];
const json = await this.metadataFile.getContentsAsString(); await this.metadataFile.writeJsonData(data);
const parsed = await JSON.parse(json);
delete parsed[this.prefixCustomMetaData + optionsArg.key];
await this.metadataFile.updateWithContents({
contents: JSON.stringify(parsed),
});
} }
/** /**
* set a lock on the ref file * set a lock on the ref file
* @param optionsArg * @param optionsArg
*/ */
public async setLock(optionsArg: { public async setLock(optionsArg: { lock: string; expires: number }) {
lock: string; const data = await this.metadataFile.getJsonData();
expires: Date; data.lock = optionsArg.lock;
}) { data.lockExpires = optionsArg.expires;
await this.metadataFile.writeJsonData(data);
} }
/** /**
* remove the lock on the ref file * remove the lock on the ref file
* @param optionsArg * @param optionsArg
*/ */
public async removeLock(optionsArg: { public async removeLock(optionsArg: { force: boolean }) {
force: boolean; const data = await this.metadataFile.getJsonData();
}) { delete data.lock;
delete data.lockExpires;
await this.metadataFile.writeJsonData(data);
} }
}
public async checkLocked(): Promise<boolean> {
const data = await this.metadataFile.getJsonData();
return data.lock && data.lockExpires > Date.now();
}
public async getLockInfo(): Promise<{ lock: string; expires: number }> {
const data = await this.metadataFile.getJsonData();
return { lock: data.lock, expires: data.lockExpires };
}
}

View File

@ -1,22 +1,34 @@
// classes.smartbucket.ts
import * as plugins from './plugins.js'; import * as plugins from './plugins.js';
import { Bucket } from './classes.bucket.js'; import { Bucket } from './classes.bucket.js';
export class SmartBucket { export class SmartBucket {
public config: plugins.tsclass.storage.IS3Descriptor; public config: plugins.tsclass.storage.IS3Descriptor;
public minioClient: plugins.minio.Client; public s3Client: plugins.s3.S3Client;
/**
* the constructor of SmartBucket
*/
/** /**
* the constructor of SmartBucket * the constructor of SmartBucket
*/ */
constructor(configArg: plugins.tsclass.storage.IS3Descriptor) { constructor(configArg: plugins.tsclass.storage.IS3Descriptor) {
this.config = configArg; this.config = configArg;
this.minioClient = new plugins.minio.Client({
endPoint: this.config.endpoint, const protocol = configArg.useSsl === false ? 'http' : 'https';
port: configArg.port || 443, const port = configArg.port ? `:${configArg.port}` : '';
useSSL: configArg.useSsl !== undefined ? configArg.useSsl : true, const endpoint = `${protocol}://${configArg.endpoint}${port}`;
accessKey: this.config.accessKey,
secretKey: this.config.accessSecret, this.s3Client = new plugins.s3.S3Client({
endpoint,
region: configArg.region || 'us-east-1',
credentials: {
accessKeyId: configArg.accessKey,
secretAccessKey: configArg.accessSecret,
},
forcePathStyle: true, // Necessary for S3-compatible storage like MinIO or Wasabi
}); });
} }
@ -29,7 +41,15 @@ export class SmartBucket {
await Bucket.removeBucketByName(this, bucketName); await Bucket.removeBucketByName(this, bucketName);
} }
public async getBucketByName(bucketName: string) { public async getBucketByName(bucketNameArg: string) {
return Bucket.getBucketByName(this, bucketName); return Bucket.getBucketByName(this, bucketNameArg);
}
public async getBucketByNameStrict(...args: Parameters<SmartBucket['getBucketByName']>) {
const bucket = await this.getBucketByName(...args);
if (!bucket) {
throw new Error(`Bucket ${args[0]} does not exist.`);
}
return bucket;
} }
} }

30
ts/classes.trash.ts Normal file
View File

@ -0,0 +1,30 @@
import * as plugins from './plugins.js';
import * as interfaces from './interfaces.js';
import * as helpers from './helpers.js';
import type { Bucket } from './classes.bucket.js';
import type { Directory } from './classes.directory.js';
import type { File } from './classes.file.js';
export class Trash {
public bucketRef: Bucket;
constructor(bucketRefArg: Bucket) {
this.bucketRef = bucketRefArg;
}
public async getTrashDir() {
return this.bucketRef.getDirectoryFromPath({ path: '.trash' });
}
public async getTrashedFileByOriginalName(pathDescriptor: interfaces.IPathDecriptor): Promise<File> {
const trashDir = await this.getTrashDir();
const originalPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
const trashKey = await this.getTrashKeyByOriginalBasePath(originalPath);
return trashDir.getFileStrict({ path: trashKey });
}
public async getTrashKeyByOriginalBasePath (originalPath: string): Promise<string> {
return plugins.smartstring.base64.encode(originalPath);
}
}

22
ts/helpers.ts Normal file
View File

@ -0,0 +1,22 @@
import * as plugins from './plugins.js';
import * as interfaces from './interfaces.js';
export const reducePathDescriptorToPath = async (pathDescriptorArg: interfaces.IPathDecriptor): Promise<string> => {
let returnPath = ``
if (pathDescriptorArg.directory) {
if (pathDescriptorArg.path && plugins.path.isAbsolute(pathDescriptorArg.path)) {
console.warn('Directory is being ignored when path is absolute.');
returnPath = pathDescriptorArg.path;
} else if (pathDescriptorArg.path) {
returnPath = plugins.path.join(pathDescriptorArg.directory.getBasePath(), pathDescriptorArg.path);
}
} else if (pathDescriptorArg.path) {
returnPath = pathDescriptorArg.path;
} else {
throw new Error('You must specify either a path or a directory.');
}
if (returnPath.startsWith('/')) {
returnPath = returnPath.substring(1);
}
return returnPath;
}

6
ts/interfaces.ts Normal file
View File

@ -0,0 +1,6 @@
import type { Directory } from "./classes.directory.js";
export interface IPathDecriptor {
path?: string;
directory?: Directory;
}

View File

@ -1,3 +1,5 @@
// plugins.ts
// node native // node native
import * as path from 'path'; import * as path from 'path';
import * as stream from 'stream'; import * as stream from 'stream';
@ -10,8 +12,10 @@ import * as smartpath from '@push.rocks/smartpath';
import * as smartpromise from '@push.rocks/smartpromise'; import * as smartpromise from '@push.rocks/smartpromise';
import * as smartrx from '@push.rocks/smartrx'; import * as smartrx from '@push.rocks/smartrx';
import * as smartstream from '@push.rocks/smartstream'; import * as smartstream from '@push.rocks/smartstream';
import * as smartstring from '@push.rocks/smartstring';
import * as smartunique from '@push.rocks/smartunique';
export { smartmime, smartpath, smartpromise, smartrx, smartstream }; export { smartmime, smartpath, smartpromise, smartrx, smartstream, smartstring, smartunique };
// @tsclass // @tsclass
import * as tsclass from '@tsclass/tsclass'; import * as tsclass from '@tsclass/tsclass';
@ -21,6 +25,8 @@ export {
} }
// third party scope // third party scope
import * as minio from 'minio'; import * as s3 from '@aws-sdk/client-s3';
export { minio }; export {
s3,
}

View File

@ -6,7 +6,8 @@
"module": "NodeNext", "module": "NodeNext",
"moduleResolution": "NodeNext", "moduleResolution": "NodeNext",
"esModuleInterop": true, "esModuleInterop": true,
"verbatimModuleSyntax": true "verbatimModuleSyntax": true,
"strict": true
}, },
"exclude": [ "exclude": [
"dist_*/**/*.d.ts" "dist_*/**/*.d.ts"