fix(core): update

This commit is contained in:
Philipp Kunz 2024-06-17 16:01:35 +02:00
parent 8401fe1c0c
commit 535d9f8520
8 changed files with 1421 additions and 421 deletions

View File

@ -19,6 +19,7 @@
"@push.rocks/tapbundle": "^5.0.23"
},
"dependencies": {
"@aws-sdk/client-s3": "^3.598.0",
"@push.rocks/smartmime": "^2.0.2",
"@push.rocks/smartpath": "^5.0.18",
"@push.rocks/smartpromise": "^4.0.3",
@ -26,8 +27,7 @@
"@push.rocks/smartstream": "^3.0.44",
"@push.rocks/smartstring": "^4.0.15",
"@push.rocks/smartunique": "^3.0.9",
"@tsclass/tsclass": "^4.0.55",
"minio": "^8.0.0"
"@tsclass/tsclass": "^4.0.59"
},
"private": false,
"files": [

1420
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

View File

@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@push.rocks/smartbucket',
version: '3.0.15',
version: '3.0.16',
description: 'A TypeScript library offering simple and cloud-agnostic object storage with advanced features like bucket creation, file and directory management, and data streaming.'
}

View File

@ -1,3 +1,5 @@
// classes.bucket.ts
import * as plugins from './plugins.js';
import * as helpers from './helpers.js';
import * as interfaces from './interfaces.js';
@ -7,16 +9,15 @@ import { File } from './classes.file.js';
import { Trash } from './classes.trash.js';
/**
* The bucket class exposes the basc functionality of a bucket.
* The bucket class exposes the basic functionality of a bucket.
* The functions of the bucket alone are enough to
* operate in s3 basic fashion on blobs of data.
* operate in S3 basic fashion on blobs of data.
*/
export class Bucket {
public static async getBucketByName(smartbucketRef: SmartBucket, bucketNameArg: string) {
const buckets = await smartbucketRef.minioClient.listBuckets();
const foundBucket = buckets.find((bucket) => {
return bucket.name === bucketNameArg;
});
const command = new plugins.s3.ListBucketsCommand({});
const buckets = await smartbucketRef.s3Client.send(command);
const foundBucket = buckets.Buckets.find((bucket) => bucket.Name === bucketNameArg);
if (foundBucket) {
console.log(`bucket with name ${bucketNameArg} exists.`);
@ -28,12 +29,14 @@ export class Bucket {
}
public static async createBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
await smartbucketRef.minioClient.makeBucket(bucketName, 'ams3').catch((e) => console.log(e));
const command = new plugins.s3.CreateBucketCommand({ Bucket: bucketName });
await smartbucketRef.s3Client.send(command).catch((e) => console.log(e));
return new Bucket(smartbucketRef, bucketName);
}
public static async removeBucketByName(smartbucketRef: SmartBucket, bucketName: string) {
await smartbucketRef.minioClient.removeBucket(bucketName).catch((e) => console.log(e));
const command = new plugins.s3.DeleteBucketCommand({ Bucket: bucketName });
await smartbucketRef.s3Client.send(command).catch((e) => console.log(e));
}
public smartbucketRef: SmartBucket;
@ -65,7 +68,7 @@ export class Bucket {
if (!pathDescriptorArg.path && !pathDescriptorArg.directory) {
return this.getBaseDirectory();
}
let checkPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
const baseDirectory = await this.getBaseDirectory();
return await baseDirectory.getSubDirectoryByName(checkPath);
}
@ -77,13 +80,14 @@ export class Bucket {
/**
* store file
*/
public async fastPut(optionsArg: interfaces.IPathDecriptor & {
public async fastPut(
optionsArg: interfaces.IPathDecriptor & {
contents: string | Buffer;
overwrite?: boolean;
}): Promise<File> {
}
): Promise<File> {
try {
const reducedPath = await helpers.reducePathDescriptorToPath(optionsArg);
// Check if the object already exists
const exists = await this.fastExists({ path: reducedPath });
if (exists && !optionsArg.overwrite) {
@ -97,16 +101,12 @@ export class Bucket {
console.log(`Creating new object at path '${reducedPath}' in bucket '${this.name}'.`);
}
// Proceed with putting the object
const streamIntake = new plugins.smartstream.StreamIntake();
const putPromise = this.smartbucketRef.minioClient.putObject(
this.name,
reducedPath,
streamIntake
);
streamIntake.pushData(optionsArg.contents);
streamIntake.signalEnd();
await putPromise;
const command = new plugins.s3.PutObjectCommand({
Bucket: this.name,
Key: reducedPath,
Body: optionsArg.contents,
});
await this.smartbucketRef.s3Client.send(command);
console.log(`Object '${reducedPath}' has been successfully stored in bucket '${this.name}'.`);
const parsedPath = plugins.path.parse(reducedPath);
@ -161,10 +161,16 @@ export class Bucket {
public async fastGetReplaySubject(optionsArg: {
path: string;
}): Promise<plugins.smartrx.rxjs.ReplaySubject<Buffer>> {
const fileStream = await this.smartbucketRef.minioClient
.getObject(this.name, optionsArg.path)
.catch((e) => console.log(e));
const command = new plugins.s3.GetObjectCommand({
Bucket: this.name,
Key: optionsArg.path,
});
const response = await this.smartbucketRef.s3Client.send(command);
const replaySubject = new plugins.smartrx.rxjs.ReplaySubject<Buffer>();
// Convert the stream to a format that supports piping
const stream = response.Body as any; // SdkStreamMixin includes readable stream
if (typeof stream.pipe === 'function') {
const duplexStream = new plugins.smartstream.SmartDuplex<Buffer, void>({
writeFunction: async (chunk) => {
replaySubject.next(chunk);
@ -176,12 +182,9 @@ export class Bucket {
},
});
if (!fileStream) {
return null;
stream.pipe(duplexStream);
}
const smartstream = new plugins.smartstream.StreamWrapper([fileStream, duplexStream]);
smartstream.run();
return replaySubject;
}
@ -198,18 +201,17 @@ export class Bucket {
typeArg: 'nodestream'
): Promise<plugins.stream.Readable>;
/**
* fastGetStream
* @param optionsArg
* @returns
*/
public async fastGetStream(
optionsArg: { path: string },
typeArg: 'webstream' | 'nodestream' = 'nodestream'
): Promise<ReadableStream | plugins.stream.Readable> {
const fileStream = await this.smartbucketRef.minioClient
.getObject(this.name, optionsArg.path)
.catch((e) => console.log(e));
const command = new plugins.s3.GetObjectCommand({
Bucket: this.name,
Key: optionsArg.path,
});
const response = await this.smartbucketRef.s3Client.send(command);
const stream = response.Body as any; // SdkStreamMixin includes readable stream
const duplexStream = new plugins.smartstream.SmartDuplex<Buffer, Buffer>({
writeFunction: async (chunk) => {
return chunk;
@ -219,12 +221,10 @@ export class Bucket {
},
});
if (!fileStream) {
return null;
if (typeof stream.pipe === 'function') {
stream.pipe(duplexStream);
}
const smartstream = new plugins.smartstream.StreamWrapper([fileStream, duplexStream]);
smartstream.run();
if (typeArg === 'nodestream') {
return duplexStream;
}
@ -243,7 +243,6 @@ export class Bucket {
overwrite?: boolean;
}): Promise<void> {
try {
// Check if the object already exists
const exists = await this.fastExists({ path: optionsArg.path });
if (exists && !optionsArg.overwrite) {
@ -259,18 +258,13 @@ export class Bucket {
console.log(`Creating new object at path '${optionsArg.path}' in bucket '${this.name}'.`);
}
const streamIntake = await plugins.smartstream.StreamIntake.fromStream<Uint8Array>(
optionsArg.readableStream
);
// Proceed with putting the object
await this.smartbucketRef.minioClient.putObject(
this.name,
optionsArg.path,
streamIntake,
null,
null // TODO: Add support for custom metadata once proper support is in minio.
);
const command = new plugins.s3.PutObjectCommand({
Bucket: this.name,
Key: optionsArg.path,
Body: optionsArg.readableStream,
Metadata: optionsArg.nativeMetadata,
});
await this.smartbucketRef.s3Client.send(command);
console.log(
`Object '${optionsArg.path}' has been successfully stored in bucket '${this.name}'.`
@ -295,28 +289,29 @@ export class Bucket {
const targetBucketName = optionsArg.targetBucket ? optionsArg.targetBucket.name : this.name;
// Retrieve current object information to use in copy conditions
const currentObjInfo = await this.smartbucketRef.minioClient.statObject(
targetBucketName,
optionsArg.sourcePath
const currentObjInfo = await this.smartbucketRef.s3Client.send(
new plugins.s3.HeadObjectCommand({
Bucket: this.name,
Key: optionsArg.sourcePath,
})
);
// Setting up copy conditions
const copyConditions = new plugins.minio.CopyConditions();
// Prepare new metadata
const newNativeMetadata = {
...(optionsArg.deleteExistingNativeMetadata ? {} : currentObjInfo.metaData),
...(optionsArg.deleteExistingNativeMetadata ? {} : currentObjInfo.Metadata),
...optionsArg.nativeMetadata,
};
// Define the copy operation as a Promise
// TODO: check on issue here: https://github.com/minio/minio-js/issues/1286
await this.smartbucketRef.minioClient.copyObject(
this.name,
optionsArg.sourcePath,
`/${targetBucketName}/${optionsArg.destinationPath || optionsArg.sourcePath}`,
copyConditions
);
// Define the copy operation
const copySource = `${this.name}/${optionsArg.sourcePath}`;
const command = new plugins.s3.CopyObjectCommand({
Bucket: targetBucketName,
CopySource: copySource,
Key: optionsArg.destinationPath || optionsArg.sourcePath,
Metadata: newNativeMetadata,
MetadataDirective: optionsArg.deleteExistingNativeMetadata ? 'REPLACE' : 'COPY',
});
await this.smartbucketRef.s3Client.send(command);
} catch (err) {
console.error('Error updating metadata:', err);
throw err; // rethrow to allow caller to handle
@ -333,7 +328,6 @@ export class Bucket {
overwrite?: boolean;
}): Promise<void> {
try {
// Check if the destination object already exists
const destinationBucket = optionsArg.targetBucket || this;
const exists = await destinationBucket.fastExists({ path: optionsArg.destinationPath });
@ -352,10 +346,7 @@ export class Bucket {
);
}
// Proceed with copying the object to the new path
await this.fastCopy(optionsArg);
// Remove the original object after successful copy
await this.fastRemove({ path: optionsArg.sourcePath });
console.log(
@ -374,21 +365,29 @@ export class Bucket {
* removeObject
*/
public async fastRemove(optionsArg: { path: string }) {
await this.smartbucketRef.minioClient.removeObject(this.name, optionsArg.path);
const command = new plugins.s3.DeleteObjectCommand({
Bucket: this.name,
Key: optionsArg.path,
});
await this.smartbucketRef.s3Client.send(command);
}
/**
* check wether file exists
* check whether file exists
* @param optionsArg
* @returns
*/
public async fastExists(optionsArg: { path: string }): Promise<boolean> {
try {
await this.smartbucketRef.minioClient.statObject(this.name, optionsArg.path);
const command = new plugins.s3.HeadObjectCommand({
Bucket: this.name,
Key: optionsArg.path,
});
await this.smartbucketRef.s3Client.send(command);
console.log(`Object '${optionsArg.path}' exists in bucket '${this.name}'.`);
return true;
} catch (error) {
if (error.code === 'NotFound') {
if (error.name === 'NotFound') {
console.log(`Object '${optionsArg.path}' does not exist in bucket '${this.name}'.`);
return false;
} else {
@ -402,59 +401,39 @@ export class Bucket {
* deletes this bucket
*/
public async delete() {
await this.smartbucketRef.minioClient.removeBucket(this.name);
await this.smartbucketRef.s3Client.send(
new plugins.s3.DeleteBucketCommand({ Bucket: this.name })
);
}
public async fastStat(pathDescriptor: interfaces.IPathDecriptor) {
let checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
return this.smartbucketRef.minioClient.statObject(this.name, checkPath);
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
const command = new plugins.s3.HeadObjectCommand({
Bucket: this.name,
Key: checkPath,
});
return this.smartbucketRef.s3Client.send(command);
}
public async isDirectory(pathDescriptor: interfaces.IPathDecriptor): Promise<boolean> {
let checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
// lets check if the checkPath is a directory
const stream = this.smartbucketRef.minioClient.listObjectsV2(this.name, checkPath, true);
const done = plugins.smartpromise.defer<boolean>();
stream.on('data', (dataArg) => {
stream.destroy(); // Stop the stream early if we find at least one object
if (dataArg.prefix.startsWith(checkPath + '/')) {
done.resolve(true);
}
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
const command = new plugins.s3.ListObjectsV2Command({
Bucket: this.name,
Prefix: checkPath,
Delimiter: '/',
});
stream.on('end', () => {
done.resolve(false);
});
stream.on('error', (err) => {
done.reject(err);
});
return done.promise;
const response = await this.smartbucketRef.s3Client.send(command);
return response.CommonPrefixes.length > 0;
}
public async isFile(pathDescriptor: interfaces.IPathDecriptor): Promise<boolean> {
let checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
// lets check if the checkPath is a directory
const stream = this.smartbucketRef.minioClient.listObjectsV2(this.name, checkPath, true);
const done = plugins.smartpromise.defer<boolean>();
stream.on('data', (dataArg) => {
stream.destroy(); // Stop the stream early if we find at least one object
if (dataArg.prefix === checkPath) {
done.resolve(true);
}
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
const command = new plugins.s3.ListObjectsV2Command({
Bucket: this.name,
Prefix: checkPath,
Delimiter: '/',
});
stream.on('end', () => {
done.resolve(false);
});
stream.on('error', (err) => {
done.reject(err);
});
return done.promise;
const response = await this.smartbucketRef.s3Client.send(command);
return response.Contents.length > 0;
}
}

View File

@ -1,7 +1,8 @@
// classes.directory.ts
import * as plugins from './plugins.js';
import { Bucket } from './classes.bucket.js';
import { File } from './classes.file.js';
import * as helpers from './helpers.js';
export class Directory {
@ -13,9 +14,9 @@ export class Directory {
public files: string[];
public folders: string[];
constructor(bucketRefArg: Bucket, parentDiretory: Directory, name: string) {
constructor(bucketRefArg: Bucket, parentDirectory: Directory, name: string) {
this.bucketRef = bucketRefArg;
this.parentDirectoryRef = parentDiretory;
this.parentDirectoryRef = parentDirectory;
this.name = name;
}
@ -73,15 +74,12 @@ export class Directory {
directory: this,
path: optionsArg.name,
};
// check wether the file exists
const exists = await this.bucketRef.fastExists({
path: await helpers.reducePathDescriptorToPath(pathDescriptor),
});
if (!exists && optionsArg.getFromTrash) {
const trash = await this.bucketRef.getTrash();
const trashedFile = await trash.getTrashedFileByOriginalName(
pathDescriptor
)
const trashedFile = await trash.getTrashedFileByOriginalName(pathDescriptor);
return trashedFile;
}
if (!exists && !optionsArg.createWithContents) {
@ -104,26 +102,17 @@ export class Directory {
* lists all files
*/
public async listFiles(): Promise<File[]> {
const done = plugins.smartpromise.defer();
const fileNameStream = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2(
this.bucketRef.name,
this.getBasePath(),
false
);
const command = new plugins.s3.ListObjectsV2Command({
Bucket: this.bucketRef.name,
Prefix: this.getBasePath(),
Delimiter: '/',
});
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
const fileArray: File[] = [];
const duplexStream = new plugins.smartstream.SmartDuplex<plugins.minio.BucketItem, void>({
objectMode: true,
writeFunction: async (bucketItem) => {
if (bucketItem.prefix) {
return;
}
if (!bucketItem.name) {
return;
}
let subtractedPath = bucketItem.name.replace(this.getBasePath(), '');
if (subtractedPath.startsWith('/')) {
subtractedPath = subtractedPath.substr(1);
}
response.Contents.forEach((item) => {
if (item.Key && !item.Key.endsWith('/')) {
const subtractedPath = item.Key.replace(this.getBasePath(), '');
if (!subtractedPath.includes('/')) {
fileArray.push(
new File({
@ -132,13 +121,9 @@ export class Directory {
})
);
}
},
finalFunction: async (tools) => {
done.resolve();
},
}
});
fileNameStream.pipe(duplexStream);
await done.promise;
return fileArray;
}
@ -146,54 +131,52 @@ export class Directory {
* lists all folders
*/
public async listDirectories(): Promise<Directory[]> {
const done = plugins.smartpromise.defer();
const basePath = this.getBasePath();
const completeDirStream = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2(
this.bucketRef.name,
this.getBasePath(),
false
);
try {
const command = new plugins.s3.ListObjectsV2Command({
Bucket: this.bucketRef.name,
Prefix: this.getBasePath(),
Delimiter: '/',
});
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
const directoryArray: Directory[] = [];
const duplexStream = new plugins.smartstream.SmartDuplex<plugins.minio.BucketItem, void>({
objectMode: true,
writeFunction: async (bucketItem) => {
if (bucketItem.name) {
return;
}
let subtractedPath = bucketItem.prefix.replace(this.getBasePath(), '');
if (subtractedPath.startsWith('/')) {
subtractedPath = subtractedPath.substr(1);
}
if (subtractedPath.includes('/')) {
const dirName = subtractedPath.split('/')[0];
if (directoryArray.find((directory) => directory.name === dirName)) {
return;
}
if (response.CommonPrefixes) {
response.CommonPrefixes.forEach((item) => {
if (item.Prefix) {
const subtractedPath = item.Prefix.replace(this.getBasePath(), '');
if (subtractedPath.endsWith('/')) {
const dirName = subtractedPath.slice(0, -1);
// Ensure the directory name is not empty (which would indicate the base directory itself)
if (dirName) {
directoryArray.push(new Directory(this.bucketRef, this, dirName));
}
},
finalFunction: async (tools) => {
done.resolve();
},
}
}
});
completeDirStream.pipe(duplexStream);
await done.promise;
}
return directoryArray;
} catch (error) {
console.error('Error listing directories:', error);
throw error;
}
}
/**
* gets an array that has all objects with a certain prefix;
* gets an array that has all objects with a certain prefix
*/
public async getTreeArray() {
const treeArray = await this.bucketRef.smartbucketRef.minioClient.listObjectsV2(
this.bucketRef.name,
this.getBasePath(),
true
);
const command = new plugins.s3.ListObjectsV2Command({
Bucket: this.bucketRef.name,
Prefix: this.getBasePath(),
Delimiter: '/',
});
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
return response.Contents;
}
/**
* gets a sub directory
* gets a sub directory by name
*/
public async getSubDirectoryByName(dirNameArg: string): Promise<Directory> {
const dirNameArray = dirNameArg.split('/');
@ -204,11 +187,13 @@ export class Directory {
return directory.name === dirNameToSearch;
});
};
let wantedDirectory: Directory;
for (const dirNameToSearch of dirNameArray) {
const directoryToSearchIn = wantedDirectory ? wantedDirectory : this;
wantedDirectory = await getDirectory(directoryToSearchIn, dirNameToSearch);
}
return wantedDirectory;
}
@ -217,19 +202,20 @@ export class Directory {
*/
public async move() {
// TODO
throw new Error('moving a directory is not yet implemented');
throw new Error('Moving a directory is not yet implemented');
}
/**
* creates a file within this directory
* creates an empty file within this directory
* @param relativePathArg
*/
public async createEmptyFile(relativePathArg: string) {
const emtpyFile = await File.create({
const emptyFile = await File.create({
directory: this,
name: relativePathArg,
contents: '',
});
return emptyFile;
}
// file operations
@ -313,7 +299,7 @@ export class Directory {
const deleteDirectory = async (directoryArg: Directory) => {
const childDirectories = await directoryArg.listDirectories();
if (childDirectories.length === 0) {
console.log('directory empty! Path complete!');
console.log('Directory empty! Path complete!');
} else {
for (const childDir of childDirectories) {
await deleteDirectory(childDir);

View File

@ -44,7 +44,7 @@ export class MetaData {
const stat = await this.fileRef.parentDirectoryRef.bucketRef.fastStat({
path: this.fileRef.getBasePath(),
});
return stat.size;
return stat.ContentLength;
}
private prefixCustomMetaData = 'custom_';

View File

@ -1,22 +1,33 @@
// classes.smartbucket.ts
import * as plugins from './plugins.js';
import { Bucket } from './classes.bucket.js';
export class SmartBucket {
public config: plugins.tsclass.storage.IS3Descriptor;
public minioClient: plugins.minio.Client;
public s3Client: plugins.s3.S3Client;
/**
* the constructor of SmartBucket
*/
/**
* the constructor of SmartBucket
*/
constructor(configArg: plugins.tsclass.storage.IS3Descriptor) {
this.config = configArg;
this.minioClient = new plugins.minio.Client({
endPoint: this.config.endpoint,
port: configArg.port || 443,
useSSL: configArg.useSsl !== undefined ? configArg.useSsl : true,
accessKey: this.config.accessKey,
secretKey: this.config.accessSecret,
const endpoint = this.config.endpoint.startsWith('http://') || this.config.endpoint.startsWith('https://')
? this.config.endpoint
: `https://${this.config.endpoint}`;
this.s3Client = new plugins.s3.S3Client({
endpoint,
region: this.config.region || 'us-east-1',
credentials: {
accessKeyId: this.config.accessKey,
secretAccessKey: this.config.accessSecret,
},
forcePathStyle: true, // Necessary for S3-compatible storage like MinIO or Wasabi
});
}

View File

@ -1,3 +1,5 @@
// plugins.ts
// node native
import * as path from 'path';
import * as stream from 'stream';
@ -23,6 +25,8 @@ export {
}
// third party scope
import * as minio from 'minio';
import * as s3 from '@aws-sdk/client-s3';
export { minio };
export {
s3,
}