Compare commits

..

26 Commits

Author SHA1 Message Date
dc99cfa229 3.0.20 2024-06-19 18:28:53 +02:00
23f8dc55d0 fix(core): update 2024-06-19 18:28:52 +02:00
ffaf0fc97a 3.0.19 2024-06-18 18:44:59 +02:00
2a0425ff54 fix(delete functions): ensure more consistency between methods and trash behaviour 2024-06-18 18:44:58 +02:00
9adcdee0a0 3.0.18 2024-06-17 20:00:58 +02:00
786f8d4365 fix(core): update 2024-06-17 20:00:57 +02:00
67244ba5cf 3.0.17 2024-06-17 19:57:56 +02:00
a9bb31c2a2 fix(core): update 2024-06-17 19:57:56 +02:00
bd8b05920f 3.0.16 2024-06-17 16:01:36 +02:00
535d9f8520 fix(core): update 2024-06-17 16:01:35 +02:00
8401fe1c0c 3.0.15 2024-06-11 17:21:22 +02:00
08c3f674bf fix(core): update 2024-06-11 17:21:22 +02:00
df0a439def 3.0.14 2024-06-11 17:20:49 +02:00
7245b49c31 fix(core): update 2024-06-11 17:20:48 +02:00
4b70edb947 finish trash 2024-06-10 16:47:20 +02:00
9629a04da6 3.0.13 2024-06-09 16:32:33 +02:00
963463d40d fix(core): update 2024-06-09 16:32:32 +02:00
ce58b99fc7 3.0.12 2024-06-09 16:02:34 +02:00
591c99736d fix(core): update 2024-06-09 16:02:33 +02:00
559e3da47b 3.0.11 2024-06-08 19:13:25 +02:00
a7ac870e05 fix(core): update 2024-06-08 19:13:24 +02:00
d48c5e229a 3.0.10 2024-06-03 21:35:08 +02:00
b9c384dd08 fix(core): update 2024-06-03 21:35:08 +02:00
91c04b2364 update description 2024-05-29 14:11:54 +02:00
b5dcc131e2 3.0.9 2024-05-27 17:34:27 +02:00
cb0ab2c9db fix(core): update 2024-05-27 17:34:26 +02:00
15 changed files with 1837 additions and 570 deletions

View File

@ -8,29 +8,27 @@
"githost": "code.foss.global",
"gitscope": "push.rocks",
"gitrepo": "smartbucket",
"description": "A TypeScript library for cloud-independent object storage, providing features like bucket creation, file and directory management, and data streaming.",
"description": "A TypeScript library offering simple and cloud-agnostic object storage with advanced features like bucket creation, file and directory management, and data streaming.",
"npmPackagename": "@push.rocks/smartbucket",
"license": "MIT",
"keywords": [
"TypeScript",
"cloud storage",
"object storage",
"TypeScript",
"S3",
"minio",
"bucket creation",
"file management",
"directory management",
"bucket creation",
"data streaming",
"multi-cloud",
"API",
"unified storage",
"S3",
"minio",
"file locking",
"metadata",
"buffer handling",
"access key",
"secret key",
"metadata",
"file locking",
"file streaming",
"directory listing",
"cloud agnostic"
]
}

4
package-lock.json generated
View File

@ -1,12 +1,12 @@
{
"name": "@push.rocks/smartbucket",
"version": "3.0.8",
"version": "3.0.20",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"name": "@push.rocks/smartbucket",
"version": "3.0.8",
"version": "3.0.20",
"license": "UNLICENSED",
"dependencies": {
"@push.rocks/smartpath": "^5.0.18",

View File

@ -1,7 +1,7 @@
{
"name": "@push.rocks/smartbucket",
"version": "3.0.8",
"description": "A TypeScript library for cloud-independent object storage, providing features like bucket creation, file and directory management, and data streaming.",
"version": "3.0.20",
"description": "A TypeScript library offering simple and cloud-agnostic object storage with advanced features like bucket creation, file and directory management, and data streaming.",
"main": "dist_ts/index.js",
"typings": "dist_ts/index.d.ts",
"type": "module",
@ -19,13 +19,15 @@
"@push.rocks/tapbundle": "^5.0.23"
},
"dependencies": {
"@push.rocks/smartmime": "^2.0.0",
"@aws-sdk/client-s3": "^3.600.0",
"@push.rocks/smartmime": "^2.0.2",
"@push.rocks/smartpath": "^5.0.18",
"@push.rocks/smartpromise": "^4.0.3",
"@push.rocks/smartrx": "^3.0.7",
"@push.rocks/smartstream": "^3.0.38",
"@tsclass/tsclass": "^4.0.54",
"minio": "^8.0.0"
"@push.rocks/smartstream": "^3.0.44",
"@push.rocks/smartstring": "^4.0.15",
"@push.rocks/smartunique": "^3.0.9",
"@tsclass/tsclass": "^4.0.60"
},
"private": false,
"files": [
@ -44,25 +46,28 @@
"last 1 chrome versions"
],
"keywords": [
"TypeScript",
"cloud storage",
"object storage",
"TypeScript",
"S3",
"minio",
"bucket creation",
"file management",
"directory management",
"bucket creation",
"data streaming",
"multi-cloud",
"API",
"unified storage",
"S3",
"minio",
"file locking",
"metadata",
"buffer handling",
"access key",
"secret key",
"metadata",
"file locking",
"file streaming",
"directory listing",
"cloud agnostic"
]
],
"homepage": "https://code.foss.global/push.rocks/smartbucket",
"repository": {
"type": "git",
"url": "https://code.foss.global/push.rocks/smartbucket.git"
}
}

1480
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@ -1,10 +1,10 @@
# @push.rocks/smartbucket
A TypeScript library that offers simple, cloud-independent object storage with features like bucket creation, file management, and directory management.
A TypeScript library for cloud-independent object storage, providing features like bucket creation, file and directory management, and data streaming.
## Install
To install `@push.rocks/smartbucket`, you need to have Node.js and npm (Node Package Manager) installed on your system. If you have them installed, you can add `@push.rocks/smartbucket` to your project by running the following command in your project's root directory:
To install `@push.rocks/smartbucket`, you need to have Node.js and npm (Node Package Manager) installed. If they are installed, you can add `@push.rocks/smartbucket` to your project by running the following command in your project's root directory:
```bash
npm install @push.rocks/smartbucket --save
@ -14,7 +14,7 @@ This command will download and install `@push.rocks/smartbucket` along with its
## Usage
`@push.rocks/smartbucket` is a TypeScript module designed to provide simple cloud-independent object storage functionality. It wraps various cloud storage providers such as AWS S3, Google Cloud Storage, and others, offering a unified API to manage storage buckets and objects within those buckets.
`@push.rocks/smartbucket` is a TypeScript module designed to provide simple cloud-independent object storage functionality. It wraps various cloud storage providers such as AWS S3, Google Cloud Storage, and others, offering a unified API to manage storage buckets and objects within those buckets.
In this guide, we will delve into the usage of SmartBucket, covering its full range of features from setting up the library to advanced usage scenarios.
@ -49,7 +49,7 @@ const mySmartBucket = new SmartBucket({
accessKey: "yourAccessKey",
accessSecret: "yourSecretKey",
endpoint: "yourEndpointURL",
port: 443, // Default is 443, could be customized for specific endpoint
port: 443, // Default is 443, can be customized for specific endpoint
useSsl: true // Defaults to true
});
```
@ -190,7 +190,7 @@ async function writeFileStream(bucketName: string, filePath: string, readableStr
// Create a readable stream from a string
const readable = new Readable();
readable.push('Hello world streamed as a file!');
readable.push(null); // Indicates end of the stream
readable.push(null); // End of stream
// Use the function
writeFileStream("exampleBucket", "path/to/streamedObject.txt", readable);
@ -198,9 +198,9 @@ writeFileStream("exampleBucket", "path/to/streamedObject.txt", readable);
### Working with Directories
`@push.rocks/smartbucket` abstracts directories within buckets for easier object management. You can create, list, and delete directories using the `Directory` class.
`@push.rocks/smartbucket` offers abstractions for directories within buckets for easier object management. You can create, list, and delete directories using the `Directory` class.
Here's how to list the contents of a directory:
To list the contents of a directory:
```typescript
async function listDirectoryContents(bucketName: string, directoryPath: string) {
@ -254,11 +254,11 @@ createFileInDirectory("exampleBucket", "some/directory", "newfile.txt", "Hello,
#### Bucket Policies
Manage bucket policies to control access permissions. This feature is dependent on the policies provided by the storage service (e.g., AWS S3, MinIO).
Manage bucket policies to control access permissions. This feature depends on the policies provided by the storage service (e.g., AWS S3, MinIO).
#### Object Metadata
You can retrieve and modify object metadata. Metadata can be useful for storing additional information about an object.
Retrieve and modify object metadata. Metadata can be useful for storing additional information about an object.
To retrieve metadata:
@ -308,8 +308,6 @@ Remember, each cloud provider has specific features and limitations. `@push.rock
This guide covers the basic to advanced scenarios of using `@push.rocks/smartbucket`. For further details, refer to the API documentation and examples.
## License and Legal Information
This repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository.

24
test/test.trash.ts Normal file
View File

@ -0,0 +1,24 @@
import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
import { Qenv } from '@push.rocks/qenv';
import * as smartbucket from '../ts/index.js';
const testQenv = new Qenv('./', './.nogit/');
let testSmartbucket: smartbucket.SmartBucket;
let myBucket: smartbucket.Bucket;
let baseDirectory: smartbucket.Directory;
tap.test('should create a valid smartbucket', async () => {
testSmartbucket = new smartbucket.SmartBucket({
accessKey: await testQenv.getEnvVarOnDemand('S3_KEY'),
accessSecret: await testQenv.getEnvVarOnDemand('S3_SECRET'),
endpoint: 's3.eu-central-1.wasabisys.com',
});
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
myBucket = await testSmartbucket.getBucketByName('testzone');
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
expect(myBucket.name).toEqual('testzone');
});
export default tap.start();

View File

@ -15,22 +15,20 @@ tap.test('should create a valid smartbucket', async () => {
accessSecret: await testQenv.getEnvVarOnDemand('S3_SECRET'),
endpoint: 's3.eu-central-1.wasabisys.com',
});
});
tap.skip.test('should create testbucket', async () => {
// await testSmartbucket.createBucket('testzone');
});
tap.skip.test('should remove testbucket', async () => {
// await testSmartbucket.removeBucket('testzone');
});
tap.test('should get a bucket', async () => {
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
myBucket = await testSmartbucket.getBucketByName('testzone');
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
expect(myBucket.name).toEqual('testzone');
});
tap.skip.test('should create testbucket', async () => {
// await testSmartbucket.createBucket('testzone2');
});
tap.skip.test('should remove testbucket', async () => {
// await testSmartbucket.removeBucket('testzone2');
});
// Fast operations
tap.test('should store data in bucket fast', async () => {
await myBucket.fastPut({
@ -45,7 +43,7 @@ tap.test('should get data in bucket', async () => {
});
const fileStringStream = await myBucket.fastGetStream({
path: 'hithere/socool.txt',
});
}, 'nodestream');
console.log(fileString);
});

View File

@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@push.rocks/smartbucket',
version: '3.0.8',
description: 'A TypeScript library for cloud-independent object storage, providing features like bucket creation, file and directory management, and data streaming.'
version: '3.0.20',
description: 'A TypeScript library offering simple and cloud-agnostic object storage with advanced features like bucket creation, file and directory management, and data streaming.'
}

View File

@ -1,16 +1,23 @@
// classes.bucket.ts
import * as plugins from './plugins.js';
import * as helpers from './helpers.js';
import * as interfaces from './interfaces.js';
import { SmartBucket } from './classes.smartbucket.js';
import { Directory } from './classes.directory.js';
import { File } from './classes.file.js';
import { Trash } from './classes.trash.js';
/**
* The bucket class exposes the basic functionality of a bucket.
* The functions of the bucket alone are enough to
* operate in S3 basic fashion on blobs of data.
*/
export class Bucket {
public static async getBucketByName(smartbucketRef: SmartBucket, bucketNameArg: string) {
const buckets = await smartbucketRef.minioClient.listBuckets();
const foundBucket = buckets.find((bucket) => {
return bucket.name === bucketNameArg;
});
const command = new plugins.s3.ListBucketsCommand({});
const buckets = await smartbucketRef.s3Client.send(command);
const foundBucket = buckets.Buckets.find((bucket) => bucket.Name === bucketNameArg);
if (foundBucket) {
console.log(`bucket with name ${bucketNameArg} exists.`);
@ -22,12 +29,14 @@ export class Bucket {
}
public static async createBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
await smartbucketRef.minioClient.makeBucket(bucketName, 'ams3').catch((e) => console.log(e));
const command = new plugins.s3.CreateBucketCommand({ Bucket: bucketName });
await smartbucketRef.s3Client.send(command).catch((e) => console.log(e));
return new Bucket(smartbucketRef, bucketName);
}
public static async removeBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
await smartbucketRef.minioClient.removeBucket(bucketName).catch((e) => console.log(e));
const command = new plugins.s3.DeleteBucketCommand({ Bucket: bucketName });
await smartbucketRef.s3Client.send(command).catch((e) => console.log(e));
}
public smartbucketRef: SmartBucket;
@ -45,11 +54,21 @@ export class Bucket {
return new Directory(this, null, '');
}
public async getDirectoryFromPath(pathDescriptorArg: interfaces.IPathDecriptor): Promise<Directory> {
/**
* gets the trash directory
*/
public async getTrash(): Promise<Trash> {
const trash = new Trash(this);
return trash;
}
public async getDirectoryFromPath(
pathDescriptorArg: interfaces.IPathDecriptor
): Promise<Directory> {
if (!pathDescriptorArg.path && !pathDescriptorArg.directory) {
return this.getBaseDirectory();
}
let checkPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
const baseDirectory = await this.getBaseDirectory();
return await baseDirectory.getSubDirectoryByName(checkPath);
}
@ -61,34 +80,34 @@ export class Bucket {
/**
* store file
*/
public async fastPut(optionsArg: {
path: string;
contents: string | Buffer;
overwrite?: boolean;
}): Promise<File> {
public async fastPut(
optionsArg: interfaces.IPathDecriptor & {
contents: string | Buffer;
overwrite?: boolean;
}
): Promise<File> {
try {
const reducedPath = await helpers.reducePathDescriptorToPath({
path: optionsArg.path,
})
// Check if the object already exists
const reducedPath = await helpers.reducePathDescriptorToPath(optionsArg);
const exists = await this.fastExists({ path: reducedPath });
if (exists && !optionsArg.overwrite) {
console.error(`Object already exists at path '${reducedPath}' in bucket '${this.name}'.`);
return;
} else if (exists && optionsArg.overwrite) {
console.log(`Overwriting existing object at path '${reducedPath}' in bucket '${this.name}'.`);
console.log(
`Overwriting existing object at path '${reducedPath}' in bucket '${this.name}'.`
);
} else {
console.log(`Creating new object at path '${reducedPath}' in bucket '${this.name}'.`);
}
// Proceed with putting the object
const streamIntake = new plugins.smartstream.StreamIntake();
const putPromise = this.smartbucketRef.minioClient.putObject(this.name, reducedPath, streamIntake);
streamIntake.pushData(optionsArg.contents);
streamIntake.signalEnd();
await putPromise;
const command = new plugins.s3.PutObjectCommand({
Bucket: this.name,
Key: reducedPath,
Body: optionsArg.contents,
});
await this.smartbucketRef.s3Client.send(command);
console.log(`Object '${reducedPath}' has been successfully stored in bucket '${this.name}'.`);
const parsedPath = plugins.path.parse(reducedPath);
return new File({
@ -98,19 +117,21 @@ export class Bucket {
fileName: parsedPath.base,
});
} catch (error) {
console.error(`Error storing object at path '${optionsArg.path}' in bucket '${this.name}':`, error);
console.error(
`Error storing object at path '${optionsArg.path}' in bucket '${this.name}':`,
error
);
throw error;
}
}
/**
* get file
*/
public async fastGet(optionsArg: Parameters<typeof this.fastGetStream>[0]): Promise<Buffer> {
public async fastGet(optionsArg: { path: string }): Promise<Buffer> {
const done = plugins.smartpromise.defer();
let completeFile: Buffer;
const replaySubject = await this.fastGetStream(optionsArg);
const replaySubject = await this.fastGetReplaySubject(optionsArg);
const subscription = replaySubject.subscribe({
next: (chunk) => {
if (completeFile) {
@ -131,79 +152,131 @@ export class Bucket {
return completeFile;
}
public async fastGetStream(optionsArg: {
/**
* good when time to first byte is important
* and multiple subscribers are expected
* @param optionsArg
* @returns
*/
public async fastGetReplaySubject(optionsArg: {
path: string;
}): Promise<plugins.smartrx.rxjs.ReplaySubject<Buffer>> {
const fileStream = await this.smartbucketRef.minioClient
.getObject(this.name, optionsArg.path)
.catch((e) => console.log(e));
const replaySubject = new plugins.smartrx.rxjs.ReplaySubject<Buffer>();
const duplexStream = new plugins.smartstream.SmartDuplex<Buffer, void>({
writeFunction: async (chunk) => {
replaySubject.next(chunk);
return;
},
finalFunction: async (cb) => {
replaySubject.complete();
return;
}
const command = new plugins.s3.GetObjectCommand({
Bucket: this.name,
Key: optionsArg.path,
});
const response = await this.smartbucketRef.s3Client.send(command);
const replaySubject = new plugins.smartrx.rxjs.ReplaySubject<Buffer>();
if (!fileStream) {
return null;
// Convert the stream to a format that supports piping
const stream = response.Body as any; // SdkStreamMixin includes readable stream
if (typeof stream.pipe === 'function') {
const duplexStream = new plugins.smartstream.SmartDuplex<Buffer, void>({
writeFunction: async (chunk) => {
replaySubject.next(chunk);
return;
},
finalFunction: async (cb) => {
replaySubject.complete();
return;
},
});
stream.pipe(duplexStream);
}
const smartstream = new plugins.smartstream.StreamWrapper([
fileStream,
duplexStream,
]);
smartstream.run();
return replaySubject;
}
public fastGetStream(
optionsArg: {
path: string;
},
typeArg: 'webstream'
): Promise<ReadableStream>;
public async fastGetStream(
optionsArg: {
path: string;
},
typeArg: 'nodestream'
): Promise<plugins.stream.Readable>;
public async fastGetStream(
optionsArg: { path: string },
typeArg: 'webstream' | 'nodestream' = 'nodestream'
): Promise<ReadableStream | plugins.stream.Readable> {
const command = new plugins.s3.GetObjectCommand({
Bucket: this.name,
Key: optionsArg.path,
});
const response = await this.smartbucketRef.s3Client.send(command);
const stream = response.Body as any; // SdkStreamMixin includes readable stream
const duplexStream = new plugins.smartstream.SmartDuplex<Buffer, Buffer>({
writeFunction: async (chunk) => {
return chunk;
},
finalFunction: async (cb) => {
return null;
},
});
if (typeof stream.pipe === 'function') {
stream.pipe(duplexStream);
}
if (typeArg === 'nodestream') {
return duplexStream;
}
if (typeArg === 'webstream') {
return (await duplexStream.getWebStreams()).readable;
}
}
/**
* store file as stream
*/
public async fastPutStream(optionsArg: {
path: string;
dataStream: plugins.stream.Readable;
readableStream: plugins.stream.Readable | ReadableStream;
nativeMetadata?: { [key: string]: string };
overwrite?: boolean;
}): Promise<void> {
try {
// Check if the object already exists
const exists = await this.fastExists({ path: optionsArg.path });
if (exists && !optionsArg.overwrite) {
console.error(`Object already exists at path '${optionsArg.path}' in bucket '${this.name}'.`);
console.error(
`Object already exists at path '${optionsArg.path}' in bucket '${this.name}'.`
);
return;
} else if (exists && optionsArg.overwrite) {
console.log(`Overwriting existing object at path '${optionsArg.path}' in bucket '${this.name}'.`);
console.log(
`Overwriting existing object at path '${optionsArg.path}' in bucket '${this.name}'.`
);
} else {
console.log(`Creating new object at path '${optionsArg.path}' in bucket '${this.name}'.`);
}
// Proceed with putting the object
await this.smartbucketRef.minioClient.putObject(
this.name,
optionsArg.path,
optionsArg.dataStream,
null,
...(optionsArg.nativeMetadata
? (() => {
const returnObject: any = {};
return returnObject;
})()
: {})
const command = new plugins.s3.PutObjectCommand({
Bucket: this.name,
Key: optionsArg.path,
Body: optionsArg.readableStream,
Metadata: optionsArg.nativeMetadata,
});
await this.smartbucketRef.s3Client.send(command);
console.log(
`Object '${optionsArg.path}' has been successfully stored in bucket '${this.name}'.`
);
console.log(`Object '${optionsArg.path}' has been successfully stored in bucket '${this.name}'.`);
} catch (error) {
console.error(`Error storing object at path '${optionsArg.path}' in bucket '${this.name}':`, error);
console.error(
`Error storing object at path '${optionsArg.path}' in bucket '${this.name}':`,
error
);
throw error;
}
}
public async fastCopy(optionsArg: {
sourcePath: string;
@ -216,94 +289,105 @@ export class Bucket {
const targetBucketName = optionsArg.targetBucket ? optionsArg.targetBucket.name : this.name;
// Retrieve current object information to use in copy conditions
const currentObjInfo = await this.smartbucketRef.minioClient.statObject(
targetBucketName,
optionsArg.sourcePath
const currentObjInfo = await this.smartbucketRef.s3Client.send(
new plugins.s3.HeadObjectCommand({
Bucket: this.name,
Key: optionsArg.sourcePath,
})
);
// Setting up copy conditions
const copyConditions = new plugins.minio.CopyConditions();
// Prepare new metadata
const newNativeMetadata = {
...(optionsArg.deleteExistingNativeMetadata ? {} : currentObjInfo.metaData),
...(optionsArg.deleteExistingNativeMetadata ? {} : currentObjInfo.Metadata),
...optionsArg.nativeMetadata,
};
// Define the copy operation as a Promise
// TODO: check on issue here: https://github.com/minio/minio-js/issues/1286
await this.smartbucketRef.minioClient.copyObject(
this.name,
optionsArg.sourcePath,
`/${targetBucketName}/${optionsArg.destinationPath || optionsArg.sourcePath}`,
copyConditions
);
// Define the copy operation
const copySource = `${this.name}/${optionsArg.sourcePath}`;
const command = new plugins.s3.CopyObjectCommand({
Bucket: targetBucketName,
CopySource: copySource,
Key: optionsArg.destinationPath || optionsArg.sourcePath,
Metadata: newNativeMetadata,
MetadataDirective: optionsArg.deleteExistingNativeMetadata ? 'REPLACE' : 'COPY',
});
await this.smartbucketRef.s3Client.send(command);
} catch (err) {
console.error('Error updating metadata:', err);
throw err; // rethrow to allow caller to handle
}
}
/**
/**
* Move object from one path to another within the same bucket or to another bucket
*/
public async fastMove(optionsArg: {
sourcePath: string;
destinationPath: string;
targetBucket?: Bucket;
overwrite?: boolean;
}): Promise<void> {
try {
// Check if the destination object already exists
const destinationBucket = optionsArg.targetBucket || this;
const exists = await destinationBucket.fastExists({ path: optionsArg.destinationPath });
if (exists && !optionsArg.overwrite) {
console.error(`Object already exists at destination path '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`);
return;
} else if (exists && optionsArg.overwrite) {
console.log(`Overwriting existing object at destination path '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`);
} else {
console.log(`Moving object to path '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`);
}
// Proceed with copying the object to the new path
await this.fastCopy(optionsArg);
// Remove the original object after successful copy
await this.fastRemove({ path: optionsArg.sourcePath });
console.log(`Object '${optionsArg.sourcePath}' has been successfully moved to '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`);
} catch (error) {
console.error(`Error moving object from '${optionsArg.sourcePath}' to '${optionsArg.destinationPath}':`, error);
throw error;
public async fastMove(optionsArg: {
sourcePath: string;
destinationPath: string;
targetBucket?: Bucket;
overwrite?: boolean;
}): Promise<void> {
try {
const destinationBucket = optionsArg.targetBucket || this;
const exists = await destinationBucket.fastExists({ path: optionsArg.destinationPath });
if (exists && !optionsArg.overwrite) {
console.error(
`Object already exists at destination path '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`
);
return;
} else if (exists && optionsArg.overwrite) {
console.log(
`Overwriting existing object at destination path '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`
);
} else {
console.log(
`Moving object to path '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`
);
}
await this.fastCopy(optionsArg);
await this.fastRemove({ path: optionsArg.sourcePath });
console.log(
`Object '${optionsArg.sourcePath}' has been successfully moved to '${optionsArg.destinationPath}' in bucket '${destinationBucket.name}'.`
);
} catch (error) {
console.error(
`Error moving object from '${optionsArg.sourcePath}' to '${optionsArg.destinationPath}':`,
error
);
throw error;
}
}
/**
* removeObject
*/
public async fastRemove(optionsArg: {
path: string;
}) {
await this.smartbucketRef.minioClient.removeObject(this.name, optionsArg.path);
public async fastRemove(optionsArg: { path: string }) {
const command = new plugins.s3.DeleteObjectCommand({
Bucket: this.name,
Key: optionsArg.path,
});
await this.smartbucketRef.s3Client.send(command);
}
/**
* check wether file exists
* check whether file exists
* @param optionsArg
* @returns
* @returns
*/
public async fastExists(optionsArg: {
path: string;
}): Promise<boolean> {
public async fastExists(optionsArg: { path: string }): Promise<boolean> {
try {
await this.smartbucketRef.minioClient.statObject(this.name, optionsArg.path);
const command = new plugins.s3.HeadObjectCommand({
Bucket: this.name,
Key: optionsArg.path,
});
await this.smartbucketRef.s3Client.send(command);
console.log(`Object '${optionsArg.path}' exists in bucket '${this.name}'.`);
return true;
} catch (error) {
if (error.code === 'NotFound') {
if (error.name === 'NotFound') {
console.log(`Object '${optionsArg.path}' does not exist in bucket '${this.name}'.`);
return false;
} else {
@ -313,56 +397,43 @@ export class Bucket {
}
}
/**
* deletes this bucket
*/
public async delete() {
await this.smartbucketRef.s3Client.send(
new plugins.s3.DeleteBucketCommand({ Bucket: this.name })
);
}
public async fastStat(pathDescriptor: interfaces.IPathDecriptor) {
let checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
return this.smartbucketRef.minioClient.statObject(this.name, checkPath);
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
const command = new plugins.s3.HeadObjectCommand({
Bucket: this.name,
Key: checkPath,
});
return this.smartbucketRef.s3Client.send(command);
}
public async isDirectory(pathDescriptor: interfaces.IPathDecriptor): Promise<boolean> {
let checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
// lets check if the checkPath is a directory
const stream = this.smartbucketRef.minioClient.listObjectsV2(this.name, checkPath, true);
const done = plugins.smartpromise.defer<boolean>();
stream.on('data', (dataArg) => {
stream.destroy(); // Stop the stream early if we find at least one object
if (dataArg.prefix.startsWith(checkPath + '/')) {
done.resolve(true);
}
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
const command = new plugins.s3.ListObjectsV2Command({
Bucket: this.name,
Prefix: checkPath,
Delimiter: '/',
});
stream.on('end', () => {
done.resolve(false);
});
stream.on('error', (err) => {
done.reject(err);
});
return done.promise;
};
const response = await this.smartbucketRef.s3Client.send(command);
return response.CommonPrefixes.length > 0;
}
public async isFile(pathDescriptor: interfaces.IPathDecriptor): Promise<boolean> {
let checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
// lets check if the checkPath is a directory
const stream = this.smartbucketRef.minioClient.listObjectsV2(this.name, checkPath, true);
const done = plugins.smartpromise.defer<boolean>();
stream.on('data', (dataArg) => {
stream.destroy(); // Stop the stream early if we find at least one object
if (dataArg.prefix === checkPath) {
done.resolve(true);
}
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
const command = new plugins.s3.ListObjectsV2Command({
Bucket: this.name,
Prefix: checkPath,
Delimiter: '/',
});
stream.on('end', () => {
done.resolve(false);
});
stream.on('error', (err) => {
done.reject(err);
});
return done.promise;
const response = await this.smartbucketRef.s3Client.send(command);
return response.Contents.length > 0;
}
}

View File

@ -1,6 +1,9 @@
// classes.directory.ts
import * as plugins from './plugins.js';
import { Bucket } from './classes.bucket.js';
import { File } from './classes.file.js';
import * as helpers from './helpers.js';
export class Directory {
public bucketRef: Bucket;
@ -11,9 +14,9 @@ export class Directory {
public files: string[];
public folders: string[];
constructor(bucketRefArg: Bucket, parentDiretory: Directory, name: string) {
constructor(bucketRefArg: Bucket, parentDirectory: Directory, name: string) {
this.bucketRef = bucketRefArg;
this.parentDirectoryRef = parentDiretory;
this.parentDirectoryRef = parentDirectory;
this.name = name;
}
@ -63,52 +66,53 @@ export class Directory {
* gets a file by name
*/
public async getFile(optionsArg: {
name: string;
path: string;
createWithContents?: string | Buffer;
getFromTrash?: boolean;
}): Promise<File> {
// check wether the file exists
const pathDescriptor = {
directory: this,
path: optionsArg.path,
};
const exists = await this.bucketRef.fastExists({
path: this.getBasePath() + optionsArg.name,
path: await helpers.reducePathDescriptorToPath(pathDescriptor),
});
if (!exists && optionsArg.getFromTrash) {
const trash = await this.bucketRef.getTrash();
const trashedFile = await trash.getTrashedFileByOriginalName(pathDescriptor);
return trashedFile;
}
if (!exists && !optionsArg.createWithContents) {
return null;
}
if (!exists && optionsArg.createWithContents) {
await this.fastPut({
path: optionsArg.name,
await File.create({
directory: this,
name: optionsArg.path,
contents: optionsArg.createWithContents,
});
}
return new File({
directoryRefArg: this,
fileName: optionsArg.name,
})
fileName: optionsArg.path,
});
}
/**
* lists all files
*/
public async listFiles(): Promise<File[]> {
const done = plugins.smartpromise.defer();
const fileNameStream = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2(
this.bucketRef.name,
this.getBasePath(),
false
);
const command = new plugins.s3.ListObjectsV2Command({
Bucket: this.bucketRef.name,
Prefix: this.getBasePath(),
Delimiter: '/',
});
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
const fileArray: File[] = [];
const duplexStream = new plugins.smartstream.SmartDuplex<plugins.minio.BucketItem, void>({
objectMode: true,
writeFunction: async (bucketItem) => {
if (bucketItem.prefix) {
return;
}
if (!bucketItem.name) {
return;
}
let subtractedPath = bucketItem.name.replace(this.getBasePath(), '');
if (subtractedPath.startsWith('/')) {
subtractedPath = subtractedPath.substr(1);
}
response.Contents.forEach((item) => {
if (item.Key && !item.Key.endsWith('/')) {
const subtractedPath = item.Key.replace(this.getBasePath(), '');
if (!subtractedPath.includes('/')) {
fileArray.push(
new File({
@ -117,13 +121,9 @@ export class Directory {
})
);
}
},
finalFunction: async (tools) => {
done.resolve();
}
});
fileNameStream.pipe(duplexStream);
await done.promise;
return fileArray;
}
@ -131,54 +131,52 @@ export class Directory {
* lists all folders
*/
public async listDirectories(): Promise<Directory[]> {
const done = plugins.smartpromise.defer();
const basePath = this.getBasePath();
const completeDirStream = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2(
this.bucketRef.name,
this.getBasePath(),
false
);
const directoryArray: Directory[] = [];
const duplexStream = new plugins.smartstream.SmartDuplex<plugins.minio.BucketItem, void>({
objectMode: true,
writeFunction: async (bucketItem) => {
if (bucketItem.name) {
return;
}
let subtractedPath = bucketItem.prefix.replace(this.getBasePath(), '');
if (subtractedPath.startsWith('/')) {
subtractedPath = subtractedPath.substr(1);
}
if (subtractedPath.includes('/')) {
const dirName = subtractedPath.split('/')[0];
if (directoryArray.find((directory) => directory.name === dirName)) {
return;
try {
const command = new plugins.s3.ListObjectsV2Command({
Bucket: this.bucketRef.name,
Prefix: this.getBasePath(),
Delimiter: '/',
});
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
const directoryArray: Directory[] = [];
if (response.CommonPrefixes) {
response.CommonPrefixes.forEach((item) => {
if (item.Prefix) {
const subtractedPath = item.Prefix.replace(this.getBasePath(), '');
if (subtractedPath.endsWith('/')) {
const dirName = subtractedPath.slice(0, -1);
// Ensure the directory name is not empty (which would indicate the base directory itself)
if (dirName) {
directoryArray.push(new Directory(this.bucketRef, this, dirName));
}
}
}
directoryArray.push(new Directory(this.bucketRef, this, dirName));
}
},
finalFunction: async (tools) => {
done.resolve();
});
}
});
completeDirStream.pipe(duplexStream);
await done.promise;
return directoryArray;
return directoryArray;
} catch (error) {
console.error('Error listing directories:', error);
throw error;
}
}
/**
* gets an array that has all objects with a certain prefix;
* gets an array that has all objects with a certain prefix
*/
public async getTreeArray() {
const treeArray = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2(
this.bucketRef.name,
this.getBasePath(),
true
);
const command = new plugins.s3.ListObjectsV2Command({
Bucket: this.bucketRef.name,
Prefix: this.getBasePath(),
Delimiter: '/',
});
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
return response.Contents;
}
/**
* gets a sub directory
* gets a sub directory by name
*/
public async getSubDirectoryByName(dirNameArg: string): Promise<Directory> {
const dirNameArray = dirNameArg.split('/');
@ -189,11 +187,13 @@ export class Directory {
return directory.name === dirNameToSearch;
});
};
let wantedDirectory: Directory;
for (const dirNameToSearch of dirNameArray) {
const directoryToSearchIn = wantedDirectory ? wantedDirectory : this;
wantedDirectory = await getDirectory(directoryToSearchIn, dirNameToSearch);
}
return wantedDirectory;
}
@ -202,19 +202,20 @@ export class Directory {
*/
public async move() {
// TODO
throw new Error('moving a directory is not yet implemented');
throw new Error('Moving a directory is not yet implemented');
}
/**
* creates a file within this directory
* creates an empty file within this directory
* @param relativePathArg
*/
public async createEmptyFile(relativePathArg: string) {
const emtpyFile = await File.create({
const emptyFile = await File.create({
directory: this,
name: relativePathArg,
contents: '',
});
return emptyFile;
}
// file operations
@ -234,29 +235,82 @@ export class Directory {
return result;
}
public async fastGetStream(pathArg: string): Promise<plugins.smartrx.rxjs.ReplaySubject<Buffer>> {
const path = plugins.path.join(this.getBasePath(), pathArg);
const result = await this.bucketRef.fastGetStream({
path,
});
public fastGetStream(
optionsArg: {
path: string;
},
typeArg: 'webstream'
): Promise<ReadableStream>;
public async fastGetStream(
optionsArg: {
path: string;
},
typeArg: 'nodestream'
): Promise<plugins.stream.Readable>;
/**
* fastGetStream
* @param optionsArg
* @returns
*/
public async fastGetStream(
optionsArg: { path: string },
typeArg: 'webstream' | 'nodestream'
): Promise<ReadableStream | plugins.stream.Readable> {
const path = plugins.path.join(this.getBasePath(), optionsArg.path);
const result = await this.bucketRef.fastGetStream(
{
path,
},
typeArg as any
);
return result;
}
public async fastRemove(optionsArg: { path: string }) {
/**
* fast put stream
*/
public async fastPutStream(optionsArg: {
path: string;
stream: plugins.stream.Readable;
}): Promise<void> {
const path = plugins.path.join(this.getBasePath(), optionsArg.path);
await this.bucketRef.fastRemove({
await this.bucketRef.fastPutStream({
path,
readableStream: optionsArg.stream,
});
}
/**
* removes a file within the directory
* uses file class to make sure effects for metadata etc. are handled correctly
* @param optionsArg
*/
public async fastRemove(optionsArg: {
path: string
/**
* wether the file should be placed into trash. Default is false.
*/
mode?: 'permanent' | 'trash';
}) {
const file = await this.getFile({
path: optionsArg.path,
});
await file.delete({
mode: optionsArg.mode ? optionsArg.mode : 'permanent',
});
}
/**
* deletes the directory with all its contents
*/
public async delete() {
public async delete(optionsArg: {
mode?: 'permanent' | 'trash';
}) {
const deleteDirectory = async (directoryArg: Directory) => {
const childDirectories = await directoryArg.listDirectories();
if (childDirectories.length === 0) {
console.log('directory empty! Path complete!');
console.log('Directory empty! Path complete!');
} else {
for (const childDir of childDirectories) {
await deleteDirectory(childDir);
@ -264,9 +318,9 @@ export class Directory {
}
const files = await directoryArg.listFiles();
for (const file of files) {
await directoryArg.fastRemove({
path: file.name,
});
await file.delete({
mode: optionsArg.mode ? optionsArg.mode : 'permanent',
})
}
};
await deleteDirectory(this);

View File

@ -4,7 +4,6 @@ import * as interfaces from './interfaces.js';
import { Directory } from './classes.directory.js';
import { MetaData } from './classes.metadata.js';
/**
* represents a file in a directory
*/
@ -33,7 +32,12 @@ export class File {
directoryRefArg: optionsArg.directory,
fileName: optionsArg.name,
});
if (contents instanceof plugins.stream.Readable) {} else {
if (contents instanceof plugins.stream.Readable) {
await optionsArg.directory.fastPutStream({
path: optionsArg.name,
stream: contents,
});
} else {
await optionsArg.directory.fastPut({
path: optionsArg.name,
contents: contents,
@ -48,7 +52,7 @@ export class File {
public getBasePath(): string {
return plugins.path.join(this.parentDirectoryRef.getBasePath(), this.name);
};
}
constructor(optionsArg: { directoryRefArg: Directory; fileName: string }) {
this.parentDirectoryRef = optionsArg.directoryRefArg;
@ -67,35 +71,61 @@ export class File {
return resultBuffer;
}
public async getReadStream() {
const readStream = this.parentDirectoryRef.bucketRef.fastGetStream({
path: this.getBasePath(),
});
public async getReadStream(typeArg: 'webstream'): Promise<ReadableStream>;
public async getReadStream(typeArg: 'nodestream'): Promise<plugins.stream.Readable>;
public async getReadStream(
typeArg: 'nodestream' | 'webstream'
): Promise<ReadableStream | plugins.stream.Readable> {
const readStream = this.parentDirectoryRef.bucketRef.fastGetStream(
{
path: this.getBasePath(),
},
typeArg as any
);
return readStream;
}
/**
* removes this file
* for using recycling mechanics use .delete()
* deletes this file
*/
public async remove() {
await this.parentDirectoryRef.bucketRef.fastRemove({
path: this.getBasePath(),
});
if (!this.name.endsWith('.metadata')) {
public async delete(optionsArg?: {
mode: 'trash' | 'permanent';
}) {
optionsArg = {
... {
mode: 'permanent',
},
...optionsArg,
}
if (optionsArg.mode === 'permanent') {
await this.parentDirectoryRef.bucketRef.fastRemove({
path: this.getBasePath() + '.metadata',
path: this.getBasePath(),
});
if (!this.name.endsWith('.metadata')) {
const metadata = await this.getMetaData();
await metadata.metadataFile.delete(optionsArg);
}
} else if (optionsArg.mode === 'trash') {
const metadata = await this.getMetaData();
await metadata.storeCustomMetaData({
key: 'recycle',
value: {
deletedAt: Date.now(),
originalPath: this.getBasePath(),
},
});
const trash = await this.parentDirectoryRef.bucketRef.getTrash();
await this.move({
directory: await trash.getTrashDir(),
path: await trash.getTrashKeyByOriginalBasePath(this.getBasePath()),
});
}
await this.parentDirectoryRef.listFiles();
}
/**
* deletes the file with recycling mechanics
*/
public async delete() {
await this.remove();
}
/**
* allows locking the file
* @param optionsArg
@ -125,13 +155,16 @@ export class File {
}
public async updateWithContents(optionsArg: {
contents: Buffer | string | plugins.stream.Readable;
contents: Buffer | string | plugins.stream.Readable | ReadableStream;
encoding?: 'utf8' | 'binary';
}) {
if (optionsArg.contents instanceof plugins.stream.Readable) {
if (
optionsArg.contents instanceof plugins.stream.Readable ||
optionsArg.contents instanceof ReadableStream
) {
await this.parentDirectoryRef.bucketRef.fastPutStream({
path: this.getBasePath(),
dataStream: optionsArg.contents,
readableStream: optionsArg.contents,
});
} else if (Buffer.isBuffer(optionsArg.contents)) {
await this.parentDirectoryRef.bucketRef.fastPut({

View File

@ -10,7 +10,7 @@ export class MetaData {
// lets find the existing metadata file
metaData.metadataFile = await metaData.fileRef.parentDirectoryRef.getFile({
name: metaData.fileRef.name + '.metadata',
path: metaData.fileRef.name + '.metadata',
createWithContents: '{}',
});
@ -44,7 +44,7 @@ export class MetaData {
const stat = await this.fileRef.parentDirectoryRef.bucketRef.fastStat({
path: this.fileRef.getBasePath(),
});
return stat.size;
return stat.ContentLength;
}
private prefixCustomMetaData = 'custom_';

View File

@ -1,22 +1,34 @@
// classes.smartbucket.ts
import * as plugins from './plugins.js';
import { Bucket } from './classes.bucket.js';
export class SmartBucket {
public config: plugins.tsclass.storage.IS3Descriptor;
public minioClient: plugins.minio.Client;
public s3Client: plugins.s3.S3Client;
/**
* the constructor of SmartBucket
*/
/**
* the constructor of SmartBucket
*/
constructor(configArg: plugins.tsclass.storage.IS3Descriptor) {
this.config = configArg;
this.minioClient = new plugins.minio.Client({
endPoint: this.config.endpoint,
port: configArg.port || 443,
useSSL: configArg.useSsl !== undefined ? configArg.useSsl : true,
accessKey: this.config.accessKey,
secretKey: this.config.accessSecret,
const protocol = configArg.useSsl === false ? 'http' : 'https';
const port = configArg.port ? `:${configArg.port}` : '';
const endpoint = `${protocol}://${configArg.endpoint}${port}`;
this.s3Client = new plugins.s3.S3Client({
endpoint,
region: configArg.region || 'us-east-1',
credentials: {
accessKeyId: configArg.accessKey,
secretAccessKey: configArg.accessSecret,
},
forcePathStyle: true, // Necessary for S3-compatible storage like MinIO or Wasabi
});
}

30
ts/classes.trash.ts Normal file
View File

@ -0,0 +1,30 @@
import * as plugins from './plugins.js';
import * as interfaces from './interfaces.js';
import * as helpers from './helpers.js';
import type { Bucket } from './classes.bucket.js';
import type { Directory } from './classes.directory.js';
import type { File } from './classes.file.js';
export class Trash {
public bucketRef: Bucket;
constructor(bucketRefArg: Bucket) {
this.bucketRef = bucketRefArg;
}
public async getTrashDir() {
return this.bucketRef.getDirectoryFromPath({ path: '.trash' });
}
public async getTrashedFileByOriginalName(pathDescriptor: interfaces.IPathDecriptor): Promise<File> {
const trashDir = await this.getTrashDir();
const originalPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
const trashKey = await this.getTrashKeyByOriginalBasePath(originalPath);
return trashDir.getFile({ path: trashKey });
}
public async getTrashKeyByOriginalBasePath (originalPath: string): Promise<string> {
return plugins.smartstring.base64.encode(originalPath);
}
}

View File

@ -1,3 +1,5 @@
// plugins.ts
// node native
import * as path from 'path';
import * as stream from 'stream';
@ -10,8 +12,10 @@ import * as smartpath from '@push.rocks/smartpath';
import * as smartpromise from '@push.rocks/smartpromise';
import * as smartrx from '@push.rocks/smartrx';
import * as smartstream from '@push.rocks/smartstream';
import * as smartstring from '@push.rocks/smartstring';
import * as smartunique from '@push.rocks/smartunique';
export { smartmime, smartpath, smartpromise, smartrx, smartstream };
export { smartmime, smartpath, smartpromise, smartrx, smartstream, smartstring, smartunique };
// @tsclass
import * as tsclass from '@tsclass/tsclass';
@ -21,6 +25,8 @@ export {
}
// third party scope
import * as minio from 'minio';
import * as s3 from '@aws-sdk/client-s3';
export { minio };
export {
s3,
}