Compare commits
18 Commits
Author | SHA1 | Date | |
---|---|---|---|
7db4d24817 | |||
dc599585b8 | |||
a22e32cd32 | |||
4647181807 | |||
99c3935d0c | |||
05523dc7a1 | |||
dc99cfa229 | |||
23f8dc55d0 | |||
ffaf0fc97a | |||
2a0425ff54 | |||
9adcdee0a0 | |||
786f8d4365 | |||
67244ba5cf | |||
a9bb31c2a2 | |||
bd8b05920f | |||
535d9f8520 | |||
8401fe1c0c | |||
08c3f674bf |
73
changelog.md
Normal file
73
changelog.md
Normal file
@ -0,0 +1,73 @@
|
||||
# Changelog
|
||||
|
||||
## 2024-10-16 - 3.0.23 - fix(dependencies)
|
||||
Update package dependencies for improved functionality and security.
|
||||
|
||||
- Updated @aws-sdk/client-s3 to version ^3.670.0 for enhanced S3 client capabilities.
|
||||
- Updated @push.rocks/smartstream to version ^3.2.4.
|
||||
- Updated the dev dependency @push.rocks/tapbundle to version ^5.3.0.
|
||||
|
||||
## 2024-07-28 - 3.0.22 - fix(dependencies)
|
||||
Update dependencies and improve bucket retrieval logging
|
||||
|
||||
- Updated @aws-sdk/client-s3 to ^3.620.0
|
||||
- Updated @git.zone/tsbuild to ^2.1.84
|
||||
- Updated @git.zone/tsrun to ^1.2.49
|
||||
- Updated @push.rocks/smartpromise to ^4.0.4
|
||||
- Updated @tsclass/tsclass to ^4.1.2
|
||||
- Added a log for when a bucket is not found by name in getBucketByName method
|
||||
|
||||
## 2024-07-04 - 3.0.21 - fix(test)
|
||||
Update endpoint configuration in tests to use environment variable
|
||||
|
||||
- Modified `qenv.yml` to include `S3_ENDPOINT` as a required environment variable.
|
||||
- Updated test files to fetch `S3_ENDPOINT` from environment instead of hardcoding.
|
||||
|
||||
## 2024-06-19 - 3.0.20 - Fix and Stability Updates
|
||||
Improved overall stability and consistency.
|
||||
|
||||
## 2024-06-18 - 3.0.18 - Delete Functions Consistency
|
||||
Ensured more consistency between delete methods and trash behavior.
|
||||
|
||||
## 2024-06-17 - 3.0.17 to 3.0.16 - Fix and Update
|
||||
Routine updates and fixes performed.
|
||||
|
||||
## 2024-06-11 - 3.0.15 to 3.0.14 - Fix and Update
|
||||
Routine updates and fixes performed.
|
||||
|
||||
## 2024-06-10 - 3.0.13 - Trash Feature Completion
|
||||
Finished work on trash feature.
|
||||
|
||||
## 2024-06-09 - 3.0.12 - Fix and Update
|
||||
Routine updates and fixes performed.
|
||||
|
||||
## 2024-06-08 - 3.0.11 to 3.0.10 - Fix and Update
|
||||
Routine updates and fixes performed.
|
||||
|
||||
## 2024-06-03 - 3.0.10 - Fix and Update
|
||||
Routine updates and fixes performed.
|
||||
|
||||
## 2024-05-29 - 3.0.9 - Update Description
|
||||
Updated project description.
|
||||
|
||||
## 2024-05-27 - 3.0.8 to 3.0.6 - Pathing and Core Updates
|
||||
Routine updates and fixes performed.
|
||||
- S3 paths' pathing differences now correctly handled with a reducePath method.
|
||||
|
||||
## 2024-05-21 - 3.0.5 to 3.0.4 - Fix and Update
|
||||
Routine updates and fixes performed.
|
||||
|
||||
## 2024-05-17 - 3.0.3 to 3.0.2 - Fix and Update
|
||||
Routine updates and fixes performed.
|
||||
|
||||
## 2024-05-17 - 3.0.0 - Major Release
|
||||
Introduced breaking changes in core and significant improvements.
|
||||
|
||||
## 2024-05-05 - 2.0.5 - Breaking Changes
|
||||
Introduced breaking changes in core functionality.
|
||||
|
||||
## 2024-04-14 - 2.0.4 - TSConfig Update
|
||||
Updated TypeScript configuration.
|
||||
|
||||
## 2024-01-01 - 2.0.2 - Organization Scheme Update
|
||||
Switched to the new organizational scheme.
|
4
package-lock.json
generated
4
package-lock.json
generated
@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@push.rocks/smartbucket",
|
||||
"version": "3.0.14",
|
||||
"version": "3.0.23",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@push.rocks/smartbucket",
|
||||
"version": "3.0.14",
|
||||
"version": "3.0.23",
|
||||
"license": "UNLICENSED",
|
||||
"dependencies": {
|
||||
"@push.rocks/smartpath": "^5.0.18",
|
||||
|
16
package.json
16
package.json
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@push.rocks/smartbucket",
|
||||
"version": "3.0.14",
|
||||
"version": "3.0.23",
|
||||
"description": "A TypeScript library offering simple and cloud-agnostic object storage with advanced features like bucket creation, file and directory management, and data streaming.",
|
||||
"main": "dist_ts/index.js",
|
||||
"typings": "dist_ts/index.d.ts",
|
||||
@ -12,22 +12,22 @@
|
||||
"build": "(tsbuild --web --allowimplicitany)"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@git.zone/tsbuild": "^2.1.80",
|
||||
"@git.zone/tsrun": "^1.2.46",
|
||||
"@git.zone/tsbuild": "^2.1.84",
|
||||
"@git.zone/tsrun": "^1.2.49",
|
||||
"@git.zone/tstest": "^1.0.90",
|
||||
"@push.rocks/qenv": "^6.0.5",
|
||||
"@push.rocks/tapbundle": "^5.0.23"
|
||||
"@push.rocks/tapbundle": "^5.3.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "^3.670.0",
|
||||
"@push.rocks/smartmime": "^2.0.2",
|
||||
"@push.rocks/smartpath": "^5.0.18",
|
||||
"@push.rocks/smartpromise": "^4.0.3",
|
||||
"@push.rocks/smartpromise": "^4.0.4",
|
||||
"@push.rocks/smartrx": "^3.0.7",
|
||||
"@push.rocks/smartstream": "^3.0.44",
|
||||
"@push.rocks/smartstream": "^3.2.4",
|
||||
"@push.rocks/smartstring": "^4.0.15",
|
||||
"@push.rocks/smartunique": "^3.0.9",
|
||||
"@tsclass/tsclass": "^4.0.55",
|
||||
"minio": "^8.0.0"
|
||||
"@tsclass/tsclass": "^4.1.2"
|
||||
},
|
||||
"private": false,
|
||||
"files": [
|
||||
|
2348
pnpm-lock.yaml
generated
2348
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
24
test/test.trash.ts
Normal file
24
test/test.trash.ts
Normal file
@ -0,0 +1,24 @@
|
||||
import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
|
||||
import { Qenv } from '@push.rocks/qenv';
|
||||
|
||||
import * as smartbucket from '../ts/index.js';
|
||||
|
||||
const testQenv = new Qenv('./', './.nogit/');
|
||||
|
||||
let testSmartbucket: smartbucket.SmartBucket;
|
||||
let myBucket: smartbucket.Bucket;
|
||||
let baseDirectory: smartbucket.Directory;
|
||||
|
||||
tap.test('should create a valid smartbucket', async () => {
|
||||
testSmartbucket = new smartbucket.SmartBucket({
|
||||
accessKey: await testQenv.getEnvVarOnDemand('S3_KEY'),
|
||||
accessSecret: await testQenv.getEnvVarOnDemand('S3_SECRET'),
|
||||
endpoint: await testQenv.getEnvVarOnDemand('S3_ENDPOINT'),
|
||||
});
|
||||
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
|
||||
myBucket = await testSmartbucket.getBucketByName('testzone');
|
||||
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
|
||||
expect(myBucket.name).toEqual('testzone');
|
||||
});
|
||||
|
||||
export default tap.start();
|
22
test/test.ts
22
test/test.ts
@ -13,24 +13,22 @@ tap.test('should create a valid smartbucket', async () => {
|
||||
testSmartbucket = new smartbucket.SmartBucket({
|
||||
accessKey: await testQenv.getEnvVarOnDemand('S3_KEY'),
|
||||
accessSecret: await testQenv.getEnvVarOnDemand('S3_SECRET'),
|
||||
endpoint: 's3.eu-central-1.wasabisys.com',
|
||||
endpoint: await testQenv.getEnvVarOnDemand('S3_ENDPOINT'),
|
||||
});
|
||||
});
|
||||
|
||||
tap.skip.test('should create testbucket', async () => {
|
||||
// await testSmartbucket.createBucket('testzone');
|
||||
});
|
||||
|
||||
tap.skip.test('should remove testbucket', async () => {
|
||||
// await testSmartbucket.removeBucket('testzone');
|
||||
});
|
||||
|
||||
tap.test('should get a bucket', async () => {
|
||||
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
|
||||
myBucket = await testSmartbucket.getBucketByName('testzone');
|
||||
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
|
||||
expect(myBucket.name).toEqual('testzone');
|
||||
});
|
||||
|
||||
tap.skip.test('should create testbucket', async () => {
|
||||
// await testSmartbucket.createBucket('testzone2');
|
||||
});
|
||||
|
||||
tap.skip.test('should remove testbucket', async () => {
|
||||
// await testSmartbucket.removeBucket('testzone2');
|
||||
});
|
||||
|
||||
// Fast operations
|
||||
tap.test('should store data in bucket fast', async () => {
|
||||
await myBucket.fastPut({
|
||||
|
@ -1,8 +1,8 @@
|
||||
/**
|
||||
* autocreated commitinfo by @pushrocks/commitinfo
|
||||
* autocreated commitinfo by @push.rocks/commitinfo
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartbucket',
|
||||
version: '3.0.14',
|
||||
version: '3.0.23',
|
||||
description: 'A TypeScript library offering simple and cloud-agnostic object storage with advanced features like bucket creation, file and directory management, and data streaming.'
|
||||
}
|
||||
|
@ -1,3 +1,5 @@
|
||||
// classes.bucket.ts
|
||||
|
||||
import * as plugins from './plugins.js';
|
||||
import * as helpers from './helpers.js';
|
||||
import * as interfaces from './interfaces.js';
|
||||
@ -7,33 +9,35 @@ import { File } from './classes.file.js';
|
||||
import { Trash } from './classes.trash.js';
|
||||
|
||||
/**
|
||||
* The bucket class exposes the basc functionality of a bucket.
|
||||
* The bucket class exposes the basic functionality of a bucket.
|
||||
* The functions of the bucket alone are enough to
|
||||
* operate in s3 basic fashion on blobs of data.
|
||||
* operate in S3 basic fashion on blobs of data.
|
||||
*/
|
||||
export class Bucket {
|
||||
public static async getBucketByName(smartbucketRef: SmartBucket, bucketNameArg: string) {
|
||||
const buckets = await smartbucketRef.minioClient.listBuckets();
|
||||
const foundBucket = buckets.find((bucket) => {
|
||||
return bucket.name === bucketNameArg;
|
||||
});
|
||||
const command = new plugins.s3.ListBucketsCommand({});
|
||||
const buckets = await smartbucketRef.s3Client.send(command);
|
||||
const foundBucket = buckets.Buckets.find((bucket) => bucket.Name === bucketNameArg);
|
||||
|
||||
if (foundBucket) {
|
||||
console.log(`bucket with name ${bucketNameArg} exists.`);
|
||||
console.log(`Taking this as base for new Bucket instance`);
|
||||
return new this(smartbucketRef, bucketNameArg);
|
||||
} else {
|
||||
console.log(`did not find bucket by name: ${bucketNameArg}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public static async createBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
|
||||
await smartbucketRef.minioClient.makeBucket(bucketName, 'ams3').catch((e) => console.log(e));
|
||||
const command = new plugins.s3.CreateBucketCommand({ Bucket: bucketName });
|
||||
await smartbucketRef.s3Client.send(command).catch((e) => console.log(e));
|
||||
return new Bucket(smartbucketRef, bucketName);
|
||||
}
|
||||
|
||||
public static async removeBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
|
||||
await smartbucketRef.minioClient.removeBucket(bucketName).catch((e) => console.log(e));
|
||||
const command = new plugins.s3.DeleteBucketCommand({ Bucket: bucketName });
|
||||
await smartbucketRef.s3Client.send(command).catch((e) => console.log(e));
|
||||
}
|
||||
|
||||
public smartbucketRef: SmartBucket;
|
||||
@ -65,7 +69,7 @@ export class Bucket {
|
||||
if (!pathDescriptorArg.path && !pathDescriptorArg.directory) {
|
||||
return this.getBaseDirectory();
|
||||
}
|
||||
let checkPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
|
||||
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
|
||||
const baseDirectory = await this.getBaseDirectory();
|
||||
return await baseDirectory.getSubDirectoryByName(checkPath);
|
||||
}
|
||||
@ -77,13 +81,14 @@ export class Bucket {
|
||||
/**
|
||||
* store file
|
||||
*/
|
||||
public async fastPut(optionsArg: interfaces.IPathDecriptor & {
|
||||
contents: string | Buffer;
|
||||
overwrite?: boolean;
|
||||
}): Promise<File> {
|
||||
public async fastPut(
|
||||
optionsArg: interfaces.IPathDecriptor & {
|
||||
contents: string | Buffer;
|
||||
overwrite?: boolean;
|
||||
}
|
||||
): Promise<File> {
|
||||
try {
|
||||
const reducedPath = await helpers.reducePathDescriptorToPath(optionsArg);
|
||||
// Check if the object already exists
|
||||
const exists = await this.fastExists({ path: reducedPath });
|
||||
|
||||
if (exists && !optionsArg.overwrite) {
|
||||
@ -97,16 +102,12 @@ export class Bucket {
|
||||
console.log(`Creating new object at path '${reducedPath}' in bucket '${this.name}'.`);
|
||||
}
|
||||
|
||||
// Proceed with putting the object
|
||||
const streamIntake = new plugins.smartstream.StreamIntake();
|
||||
const putPromise = this.smartbucketRef.minioClient.putObject(
|
||||
this.name,
|
||||
reducedPath,
|
||||
streamIntake
|
||||
);
|
||||
streamIntake.pushData(optionsArg.contents);
|
||||
streamIntake.signalEnd();
|
||||
await putPromise;
|
||||
const command = new plugins.s3.PutObjectCommand({
|
||||
Bucket: this.name,
|
||||
Key: reducedPath,
|
||||
Body: optionsArg.contents,
|
||||
});
|
||||
await this.smartbucketRef.s3Client.send(command);
|
||||
|
||||
console.log(`Object '${reducedPath}' has been successfully stored in bucket '${this.name}'.`);
|
||||
const parsedPath = plugins.path.parse(reducedPath);
|
||||
@ -161,27 +162,30 @@ export class Bucket {
|
||||
public async fastGetReplaySubject(optionsArg: {
|
||||
path: string;
|
||||
}): Promise<plugins.smartrx.rxjs.ReplaySubject<Buffer>> {
|
||||
const fileStream = await this.smartbucketRef.minioClient
|
||||
.getObject(this.name, optionsArg.path)
|
||||
.catch((e) => console.log(e));
|
||||
const replaySubject = new plugins.smartrx.rxjs.ReplaySubject<Buffer>();
|
||||
const duplexStream = new plugins.smartstream.SmartDuplex<Buffer, void>({
|
||||
writeFunction: async (chunk) => {
|
||||
replaySubject.next(chunk);
|
||||
return;
|
||||
},
|
||||
finalFunction: async (cb) => {
|
||||
replaySubject.complete();
|
||||
return;
|
||||
},
|
||||
const command = new plugins.s3.GetObjectCommand({
|
||||
Bucket: this.name,
|
||||
Key: optionsArg.path,
|
||||
});
|
||||
const response = await this.smartbucketRef.s3Client.send(command);
|
||||
const replaySubject = new plugins.smartrx.rxjs.ReplaySubject<Buffer>();
|
||||
|
||||
if (!fileStream) {
|
||||
return null;
|
||||
// Convert the stream to a format that supports piping
|
||||
const stream = response.Body as any; // SdkStreamMixin includes readable stream
|
||||
if (typeof stream.pipe === 'function') {
|
||||
const duplexStream = new plugins.smartstream.SmartDuplex<Buffer, void>({
|
||||
writeFunction: async (chunk) => {
|
||||
replaySubject.next(chunk);
|
||||
return;
|
||||
},
|
||||
finalFunction: async (cb) => {
|
||||
replaySubject.complete();
|
||||
return;
|
||||
},
|
||||
});
|
||||
|
||||
stream.pipe(duplexStream);
|
||||
}
|
||||
|
||||
const smartstream = new plugins.smartstream.StreamWrapper([fileStream, duplexStream]);
|
||||
smartstream.run();
|
||||
return replaySubject;
|
||||
}
|
||||
|
||||
@ -198,18 +202,17 @@ export class Bucket {
|
||||
typeArg: 'nodestream'
|
||||
): Promise<plugins.stream.Readable>;
|
||||
|
||||
/**
|
||||
* fastGetStream
|
||||
* @param optionsArg
|
||||
* @returns
|
||||
*/
|
||||
public async fastGetStream(
|
||||
optionsArg: { path: string },
|
||||
typeArg: 'webstream' | 'nodestream' = 'nodestream'
|
||||
): Promise<ReadableStream | plugins.stream.Readable> {
|
||||
const fileStream = await this.smartbucketRef.minioClient
|
||||
.getObject(this.name, optionsArg.path)
|
||||
.catch((e) => console.log(e));
|
||||
const command = new plugins.s3.GetObjectCommand({
|
||||
Bucket: this.name,
|
||||
Key: optionsArg.path,
|
||||
});
|
||||
const response = await this.smartbucketRef.s3Client.send(command);
|
||||
const stream = response.Body as any; // SdkStreamMixin includes readable stream
|
||||
|
||||
const duplexStream = new plugins.smartstream.SmartDuplex<Buffer, Buffer>({
|
||||
writeFunction: async (chunk) => {
|
||||
return chunk;
|
||||
@ -219,12 +222,10 @@ export class Bucket {
|
||||
},
|
||||
});
|
||||
|
||||
if (!fileStream) {
|
||||
return null;
|
||||
if (typeof stream.pipe === 'function') {
|
||||
stream.pipe(duplexStream);
|
||||
}
|
||||
|
||||
const smartstream = new plugins.smartstream.StreamWrapper([fileStream, duplexStream]);
|
||||
smartstream.run();
|
||||
if (typeArg === 'nodestream') {
|
||||
return duplexStream;
|
||||
}
|
||||
@ -243,7 +244,6 @@ export class Bucket {
|
||||
overwrite?: boolean;
|
||||
}): Promise<void> {
|
||||
try {
|
||||
// Check if the object already exists
|
||||
const exists = await this.fastExists({ path: optionsArg.path });
|
||||
|
||||
if (exists && !optionsArg.overwrite) {
|
||||
@ -259,18 +259,13 @@ export class Bucket {
|
||||
console.log(`Creating new object at path '${optionsArg.path}' in bucket '${this.name}'.`);
|
||||
}
|
||||
|
||||
const streamIntake = await plugins.smartstream.StreamIntake.fromStream<Uint8Array>(
|
||||
optionsArg.readableStream
|
||||
);
|
||||
|
||||
// Proceed with putting the object
|
||||
await this.smartbucketRef.minioClient.putObject(
|
||||
this.name,
|
||||
optionsArg.path,
|
||||
streamIntake,
|
||||
null,
|
||||
null // TODO: Add support for custom metadata once proper support is in minio.
|
||||
);
|
||||
const command = new plugins.s3.PutObjectCommand({
|
||||
Bucket: this.name,
|
||||
Key: optionsArg.path,
|
||||
Body: optionsArg.readableStream,
|
||||
Metadata: optionsArg.nativeMetadata,
|
||||
});
|
||||
await this.smartbucketRef.s3Client.send(command);
|
||||
|
||||
console.log(
|
||||
`Object '${optionsArg.path}' has been successfully stored in bucket '${this.name}'.`
|
||||
@ -295,28 +290,29 @@ export class Bucket {
|
||||
const targetBucketName = optionsArg.targetBucket ? optionsArg.targetBucket.name : this.name;
|
||||
|
||||
// Retrieve current object information to use in copy conditions
|
||||
const currentObjInfo = await this.smartbucketRef.minioClient.statObject(
|
||||
targetBucketName,
|
||||
optionsArg.sourcePath
|
||||
const currentObjInfo = await this.smartbucketRef.s3Client.send(
|
||||
new plugins.s3.HeadObjectCommand({
|
||||
Bucket: this.name,
|
||||
Key: optionsArg.sourcePath,
|
||||
})
|
||||
);
|
||||
|
||||
// Setting up copy conditions
|
||||
const copyConditions = new plugins.minio.CopyConditions();
|
||||
|
||||
// Prepare new metadata
|
||||
const newNativeMetadata = {
|
||||
...(optionsArg.deleteExistingNativeMetadata ? {} : currentObjInfo.metaData),
|
||||
...(optionsArg.deleteExistingNativeMetadata ? {} : currentObjInfo.Metadata),
|
||||
...optionsArg.nativeMetadata,
|
||||
};
|
||||
|
||||
// Define the copy operation as a Promise
|
||||
// TODO: check on issue here: https://github.com/minio/minio-js/issues/1286
|
||||
await this.smartbucketRef.minioClient.copyObject(
|
||||
this.name,
|
||||
optionsArg.sourcePath,
|
||||
`/${targetBucketName}/${optionsArg.destinationPath || optionsArg.sourcePath}`,
|
||||
copyConditions
|
||||
);
|
||||
// Define the copy operation
|
||||
const copySource = `${this.name}/${optionsArg.sourcePath}`;
|
||||
const command = new plugins.s3.CopyObjectCommand({
|
||||
Bucket: targetBucketName,
|
||||
CopySource: copySource,
|
||||
Key: optionsArg.destinationPath || optionsArg.sourcePath,
|
||||
Metadata: newNativeMetadata,
|
||||
MetadataDirective: optionsArg.deleteExistingNativeMetadata ? 'REPLACE' : 'COPY',
|
||||
});
|
||||
await this.smartbucketRef.s3Client.send(command);
|
||||
} catch (err) {
|
||||
console.error('Error updating metadata:', err);
|
||||
throw err; // rethrow to allow caller to handle
|
||||
@ -333,7 +329,6 @@ export class Bucket {
|
||||
overwrite?: boolean;
|
||||
}): Promise<void> {
|
||||
try {
|
||||
// Check if the destination object already exists
|
||||
const destinationBucket = optionsArg.targetBucket || this;
|
||||
const exists = await destinationBucket.fastExists({ path: optionsArg.destinationPath });
|
||||
|
||||
@ -352,10 +347,7 @@ export class Bucket {
|
||||
);
|
||||
}
|
||||
|
||||
// Proceed with copying the object to the new path
|
||||
await this.fastCopy(optionsArg);
|
||||
|
||||
// Remove the original object after successful copy
|
||||
await this.fastRemove({ path: optionsArg.sourcePath });
|
||||
|
||||
console.log(
|
||||
@ -374,21 +366,29 @@ export class Bucket {
|
||||
* removeObject
|
||||
*/
|
||||
public async fastRemove(optionsArg: { path: string }) {
|
||||
await this.smartbucketRef.minioClient.removeObject(this.name, optionsArg.path);
|
||||
const command = new plugins.s3.DeleteObjectCommand({
|
||||
Bucket: this.name,
|
||||
Key: optionsArg.path,
|
||||
});
|
||||
await this.smartbucketRef.s3Client.send(command);
|
||||
}
|
||||
|
||||
/**
|
||||
* check wether file exists
|
||||
* check whether file exists
|
||||
* @param optionsArg
|
||||
* @returns
|
||||
*/
|
||||
public async fastExists(optionsArg: { path: string }): Promise<boolean> {
|
||||
try {
|
||||
await this.smartbucketRef.minioClient.statObject(this.name, optionsArg.path);
|
||||
const command = new plugins.s3.HeadObjectCommand({
|
||||
Bucket: this.name,
|
||||
Key: optionsArg.path,
|
||||
});
|
||||
await this.smartbucketRef.s3Client.send(command);
|
||||
console.log(`Object '${optionsArg.path}' exists in bucket '${this.name}'.`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
if (error.code === 'NotFound') {
|
||||
if (error.name === 'NotFound') {
|
||||
console.log(`Object '${optionsArg.path}' does not exist in bucket '${this.name}'.`);
|
||||
return false;
|
||||
} else {
|
||||
@ -402,59 +402,39 @@ export class Bucket {
|
||||
* deletes this bucket
|
||||
*/
|
||||
public async delete() {
|
||||
await this.smartbucketRef.minioClient.removeBucket(this.name);
|
||||
await this.smartbucketRef.s3Client.send(
|
||||
new plugins.s3.DeleteBucketCommand({ Bucket: this.name })
|
||||
);
|
||||
}
|
||||
|
||||
public async fastStat(pathDescriptor: interfaces.IPathDecriptor) {
|
||||
let checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||
return this.smartbucketRef.minioClient.statObject(this.name, checkPath);
|
||||
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||
const command = new plugins.s3.HeadObjectCommand({
|
||||
Bucket: this.name,
|
||||
Key: checkPath,
|
||||
});
|
||||
return this.smartbucketRef.s3Client.send(command);
|
||||
}
|
||||
|
||||
public async isDirectory(pathDescriptor: interfaces.IPathDecriptor): Promise<boolean> {
|
||||
let checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||
|
||||
// lets check if the checkPath is a directory
|
||||
const stream = this.smartbucketRef.minioClient.listObjectsV2(this.name, checkPath, true);
|
||||
const done = plugins.smartpromise.defer<boolean>();
|
||||
stream.on('data', (dataArg) => {
|
||||
stream.destroy(); // Stop the stream early if we find at least one object
|
||||
if (dataArg.prefix.startsWith(checkPath + '/')) {
|
||||
done.resolve(true);
|
||||
}
|
||||
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||
const command = new plugins.s3.ListObjectsV2Command({
|
||||
Bucket: this.name,
|
||||
Prefix: checkPath,
|
||||
Delimiter: '/',
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
done.resolve(false);
|
||||
});
|
||||
|
||||
stream.on('error', (err) => {
|
||||
done.reject(err);
|
||||
});
|
||||
|
||||
return done.promise;
|
||||
const response = await this.smartbucketRef.s3Client.send(command);
|
||||
return response.CommonPrefixes.length > 0;
|
||||
}
|
||||
|
||||
public async isFile(pathDescriptor: interfaces.IPathDecriptor): Promise<boolean> {
|
||||
let checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||
|
||||
// lets check if the checkPath is a directory
|
||||
const stream = this.smartbucketRef.minioClient.listObjectsV2(this.name, checkPath, true);
|
||||
const done = plugins.smartpromise.defer<boolean>();
|
||||
stream.on('data', (dataArg) => {
|
||||
stream.destroy(); // Stop the stream early if we find at least one object
|
||||
if (dataArg.prefix === checkPath) {
|
||||
done.resolve(true);
|
||||
}
|
||||
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||
const command = new plugins.s3.ListObjectsV2Command({
|
||||
Bucket: this.name,
|
||||
Prefix: checkPath,
|
||||
Delimiter: '/',
|
||||
});
|
||||
|
||||
stream.on('end', () => {
|
||||
done.resolve(false);
|
||||
});
|
||||
|
||||
stream.on('error', (err) => {
|
||||
done.reject(err);
|
||||
});
|
||||
|
||||
return done.promise;
|
||||
const response = await this.smartbucketRef.s3Client.send(command);
|
||||
return response.Contents.length > 0;
|
||||
}
|
||||
}
|
||||
|
@ -1,7 +1,8 @@
|
||||
// classes.directory.ts
|
||||
|
||||
import * as plugins from './plugins.js';
|
||||
import { Bucket } from './classes.bucket.js';
|
||||
import { File } from './classes.file.js';
|
||||
|
||||
import * as helpers from './helpers.js';
|
||||
|
||||
export class Directory {
|
||||
@ -13,9 +14,9 @@ export class Directory {
|
||||
public files: string[];
|
||||
public folders: string[];
|
||||
|
||||
constructor(bucketRefArg: Bucket, parentDiretory: Directory, name: string) {
|
||||
constructor(bucketRefArg: Bucket, parentDirectory: Directory, name: string) {
|
||||
this.bucketRef = bucketRefArg;
|
||||
this.parentDirectoryRef = parentDiretory;
|
||||
this.parentDirectoryRef = parentDirectory;
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
@ -65,23 +66,20 @@ export class Directory {
|
||||
* gets a file by name
|
||||
*/
|
||||
public async getFile(optionsArg: {
|
||||
name: string;
|
||||
path: string;
|
||||
createWithContents?: string | Buffer;
|
||||
getFromTrash?: boolean;
|
||||
}): Promise<File> {
|
||||
const pathDescriptor = {
|
||||
directory: this,
|
||||
path: optionsArg.name,
|
||||
path: optionsArg.path,
|
||||
};
|
||||
// check wether the file exists
|
||||
const exists = await this.bucketRef.fastExists({
|
||||
path: await helpers.reducePathDescriptorToPath(pathDescriptor),
|
||||
});
|
||||
if (!exists && optionsArg.getFromTrash) {
|
||||
const trash = await this.bucketRef.getTrash();
|
||||
const trashedFile = await trash.getTrashedFileByOriginalName(
|
||||
pathDescriptor
|
||||
)
|
||||
const trashedFile = await trash.getTrashedFileByOriginalName(pathDescriptor);
|
||||
return trashedFile;
|
||||
}
|
||||
if (!exists && !optionsArg.createWithContents) {
|
||||
@ -90,13 +88,13 @@ export class Directory {
|
||||
if (!exists && optionsArg.createWithContents) {
|
||||
await File.create({
|
||||
directory: this,
|
||||
name: optionsArg.name,
|
||||
name: optionsArg.path,
|
||||
contents: optionsArg.createWithContents,
|
||||
});
|
||||
}
|
||||
return new File({
|
||||
directoryRefArg: this,
|
||||
fileName: optionsArg.name,
|
||||
fileName: optionsArg.path,
|
||||
});
|
||||
}
|
||||
|
||||
@ -104,26 +102,17 @@ export class Directory {
|
||||
* lists all files
|
||||
*/
|
||||
public async listFiles(): Promise<File[]> {
|
||||
const done = plugins.smartpromise.defer();
|
||||
const fileNameStream = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2(
|
||||
this.bucketRef.name,
|
||||
this.getBasePath(),
|
||||
false
|
||||
);
|
||||
const command = new plugins.s3.ListObjectsV2Command({
|
||||
Bucket: this.bucketRef.name,
|
||||
Prefix: this.getBasePath(),
|
||||
Delimiter: '/',
|
||||
});
|
||||
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
|
||||
const fileArray: File[] = [];
|
||||
const duplexStream = new plugins.smartstream.SmartDuplex<plugins.minio.BucketItem, void>({
|
||||
objectMode: true,
|
||||
writeFunction: async (bucketItem) => {
|
||||
if (bucketItem.prefix) {
|
||||
return;
|
||||
}
|
||||
if (!bucketItem.name) {
|
||||
return;
|
||||
}
|
||||
let subtractedPath = bucketItem.name.replace(this.getBasePath(), '');
|
||||
if (subtractedPath.startsWith('/')) {
|
||||
subtractedPath = subtractedPath.substr(1);
|
||||
}
|
||||
|
||||
response.Contents.forEach((item) => {
|
||||
if (item.Key && !item.Key.endsWith('/')) {
|
||||
const subtractedPath = item.Key.replace(this.getBasePath(), '');
|
||||
if (!subtractedPath.includes('/')) {
|
||||
fileArray.push(
|
||||
new File({
|
||||
@ -132,13 +121,9 @@ export class Directory {
|
||||
})
|
||||
);
|
||||
}
|
||||
},
|
||||
finalFunction: async (tools) => {
|
||||
done.resolve();
|
||||
},
|
||||
}
|
||||
});
|
||||
fileNameStream.pipe(duplexStream);
|
||||
await done.promise;
|
||||
|
||||
return fileArray;
|
||||
}
|
||||
|
||||
@ -146,54 +131,52 @@ export class Directory {
|
||||
* lists all folders
|
||||
*/
|
||||
public async listDirectories(): Promise<Directory[]> {
|
||||
const done = plugins.smartpromise.defer();
|
||||
const basePath = this.getBasePath();
|
||||
const completeDirStream = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2(
|
||||
this.bucketRef.name,
|
||||
this.getBasePath(),
|
||||
false
|
||||
);
|
||||
const directoryArray: Directory[] = [];
|
||||
const duplexStream = new plugins.smartstream.SmartDuplex<plugins.minio.BucketItem, void>({
|
||||
objectMode: true,
|
||||
writeFunction: async (bucketItem) => {
|
||||
if (bucketItem.name) {
|
||||
return;
|
||||
}
|
||||
let subtractedPath = bucketItem.prefix.replace(this.getBasePath(), '');
|
||||
if (subtractedPath.startsWith('/')) {
|
||||
subtractedPath = subtractedPath.substr(1);
|
||||
}
|
||||
if (subtractedPath.includes('/')) {
|
||||
const dirName = subtractedPath.split('/')[0];
|
||||
if (directoryArray.find((directory) => directory.name === dirName)) {
|
||||
return;
|
||||
try {
|
||||
const command = new plugins.s3.ListObjectsV2Command({
|
||||
Bucket: this.bucketRef.name,
|
||||
Prefix: this.getBasePath(),
|
||||
Delimiter: '/',
|
||||
});
|
||||
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
|
||||
const directoryArray: Directory[] = [];
|
||||
|
||||
if (response.CommonPrefixes) {
|
||||
response.CommonPrefixes.forEach((item) => {
|
||||
if (item.Prefix) {
|
||||
const subtractedPath = item.Prefix.replace(this.getBasePath(), '');
|
||||
if (subtractedPath.endsWith('/')) {
|
||||
const dirName = subtractedPath.slice(0, -1);
|
||||
// Ensure the directory name is not empty (which would indicate the base directory itself)
|
||||
if (dirName) {
|
||||
directoryArray.push(new Directory(this.bucketRef, this, dirName));
|
||||
}
|
||||
}
|
||||
}
|
||||
directoryArray.push(new Directory(this.bucketRef, this, dirName));
|
||||
}
|
||||
},
|
||||
finalFunction: async (tools) => {
|
||||
done.resolve();
|
||||
},
|
||||
});
|
||||
completeDirStream.pipe(duplexStream);
|
||||
await done.promise;
|
||||
return directoryArray;
|
||||
});
|
||||
}
|
||||
|
||||
return directoryArray;
|
||||
} catch (error) {
|
||||
console.error('Error listing directories:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* gets an array that has all objects with a certain prefix;
|
||||
* gets an array that has all objects with a certain prefix
|
||||
*/
|
||||
public async getTreeArray() {
|
||||
const treeArray = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2(
|
||||
this.bucketRef.name,
|
||||
this.getBasePath(),
|
||||
true
|
||||
);
|
||||
const command = new plugins.s3.ListObjectsV2Command({
|
||||
Bucket: this.bucketRef.name,
|
||||
Prefix: this.getBasePath(),
|
||||
Delimiter: '/',
|
||||
});
|
||||
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
|
||||
return response.Contents;
|
||||
}
|
||||
|
||||
/**
|
||||
* gets a sub directory
|
||||
* gets a sub directory by name
|
||||
*/
|
||||
public async getSubDirectoryByName(dirNameArg: string): Promise<Directory> {
|
||||
const dirNameArray = dirNameArg.split('/');
|
||||
@ -204,11 +187,13 @@ export class Directory {
|
||||
return directory.name === dirNameToSearch;
|
||||
});
|
||||
};
|
||||
|
||||
let wantedDirectory: Directory;
|
||||
for (const dirNameToSearch of dirNameArray) {
|
||||
const directoryToSearchIn = wantedDirectory ? wantedDirectory : this;
|
||||
wantedDirectory = await getDirectory(directoryToSearchIn, dirNameToSearch);
|
||||
}
|
||||
|
||||
return wantedDirectory;
|
||||
}
|
||||
|
||||
@ -217,19 +202,20 @@ export class Directory {
|
||||
*/
|
||||
public async move() {
|
||||
// TODO
|
||||
throw new Error('moving a directory is not yet implemented');
|
||||
throw new Error('Moving a directory is not yet implemented');
|
||||
}
|
||||
|
||||
/**
|
||||
* creates a file within this directory
|
||||
* creates an empty file within this directory
|
||||
* @param relativePathArg
|
||||
*/
|
||||
public async createEmptyFile(relativePathArg: string) {
|
||||
const emtpyFile = await File.create({
|
||||
const emptyFile = await File.create({
|
||||
directory: this,
|
||||
name: relativePathArg,
|
||||
contents: '',
|
||||
});
|
||||
return emptyFile;
|
||||
}
|
||||
|
||||
// file operations
|
||||
@ -297,23 +283,34 @@ export class Directory {
|
||||
|
||||
/**
|
||||
* removes a file within the directory
|
||||
* uses file class to make sure effects for metadata etc. are handled correctly
|
||||
* @param optionsArg
|
||||
*/
|
||||
public async fastRemove(optionsArg: { path: string }) {
|
||||
const path = plugins.path.join(this.getBasePath(), optionsArg.path);
|
||||
await this.bucketRef.fastRemove({
|
||||
path,
|
||||
public async fastRemove(optionsArg: {
|
||||
path: string
|
||||
/**
|
||||
* wether the file should be placed into trash. Default is false.
|
||||
*/
|
||||
mode?: 'permanent' | 'trash';
|
||||
}) {
|
||||
const file = await this.getFile({
|
||||
path: optionsArg.path,
|
||||
});
|
||||
await file.delete({
|
||||
mode: optionsArg.mode ? optionsArg.mode : 'permanent',
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* deletes the directory with all its contents
|
||||
*/
|
||||
public async delete() {
|
||||
public async delete(optionsArg: {
|
||||
mode?: 'permanent' | 'trash';
|
||||
}) {
|
||||
const deleteDirectory = async (directoryArg: Directory) => {
|
||||
const childDirectories = await directoryArg.listDirectories();
|
||||
if (childDirectories.length === 0) {
|
||||
console.log('directory empty! Path complete!');
|
||||
console.log('Directory empty! Path complete!');
|
||||
} else {
|
||||
for (const childDir of childDirectories) {
|
||||
await deleteDirectory(childDir);
|
||||
@ -321,9 +318,9 @@ export class Directory {
|
||||
}
|
||||
const files = await directoryArg.listFiles();
|
||||
for (const file of files) {
|
||||
await directoryArg.fastRemove({
|
||||
path: file.name,
|
||||
});
|
||||
await file.delete({
|
||||
mode: optionsArg.mode ? optionsArg.mode : 'permanent',
|
||||
})
|
||||
}
|
||||
};
|
||||
await deleteDirectory(this);
|
||||
|
@ -10,7 +10,7 @@ export class MetaData {
|
||||
|
||||
// lets find the existing metadata file
|
||||
metaData.metadataFile = await metaData.fileRef.parentDirectoryRef.getFile({
|
||||
name: metaData.fileRef.name + '.metadata',
|
||||
path: metaData.fileRef.name + '.metadata',
|
||||
createWithContents: '{}',
|
||||
});
|
||||
|
||||
@ -44,7 +44,7 @@ export class MetaData {
|
||||
const stat = await this.fileRef.parentDirectoryRef.bucketRef.fastStat({
|
||||
path: this.fileRef.getBasePath(),
|
||||
});
|
||||
return stat.size;
|
||||
return stat.ContentLength;
|
||||
}
|
||||
|
||||
private prefixCustomMetaData = 'custom_';
|
||||
|
@ -1,22 +1,34 @@
|
||||
// classes.smartbucket.ts
|
||||
|
||||
import * as plugins from './plugins.js';
|
||||
import { Bucket } from './classes.bucket.js';
|
||||
|
||||
export class SmartBucket {
|
||||
public config: plugins.tsclass.storage.IS3Descriptor;
|
||||
|
||||
public minioClient: plugins.minio.Client;
|
||||
public s3Client: plugins.s3.S3Client;
|
||||
|
||||
/**
|
||||
* the constructor of SmartBucket
|
||||
*/
|
||||
/**
|
||||
* the constructor of SmartBucket
|
||||
*/
|
||||
constructor(configArg: plugins.tsclass.storage.IS3Descriptor) {
|
||||
this.config = configArg;
|
||||
this.minioClient = new plugins.minio.Client({
|
||||
endPoint: this.config.endpoint,
|
||||
port: configArg.port || 443,
|
||||
useSSL: configArg.useSsl !== undefined ? configArg.useSsl : true,
|
||||
accessKey: this.config.accessKey,
|
||||
secretKey: this.config.accessSecret,
|
||||
|
||||
const protocol = configArg.useSsl === false ? 'http' : 'https';
|
||||
const port = configArg.port ? `:${configArg.port}` : '';
|
||||
const endpoint = `${protocol}://${configArg.endpoint}${port}`;
|
||||
|
||||
this.s3Client = new plugins.s3.S3Client({
|
||||
endpoint,
|
||||
region: configArg.region || 'us-east-1',
|
||||
credentials: {
|
||||
accessKeyId: configArg.accessKey,
|
||||
secretAccessKey: configArg.accessSecret,
|
||||
},
|
||||
forcePathStyle: true, // Necessary for S3-compatible storage like MinIO or Wasabi
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -21,7 +21,7 @@ export class Trash {
|
||||
const trashDir = await this.getTrashDir();
|
||||
const originalPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||
const trashKey = await this.getTrashKeyByOriginalBasePath(originalPath);
|
||||
return trashDir.getFile({ name: trashKey });
|
||||
return trashDir.getFile({ path: trashKey });
|
||||
}
|
||||
|
||||
public async getTrashKeyByOriginalBasePath (originalPath: string): Promise<string> {
|
||||
|
@ -1,3 +1,5 @@
|
||||
// plugins.ts
|
||||
|
||||
// node native
|
||||
import * as path from 'path';
|
||||
import * as stream from 'stream';
|
||||
@ -23,6 +25,8 @@ export {
|
||||
}
|
||||
|
||||
// third party scope
|
||||
import * as minio from 'minio';
|
||||
import * as s3 from '@aws-sdk/client-s3';
|
||||
|
||||
export { minio };
|
||||
export {
|
||||
s3,
|
||||
}
|
||||
|
Reference in New Issue
Block a user