Compare commits
18 Commits
Author | SHA1 | Date | |
---|---|---|---|
aa9a2e9220 | |||
154854dc21 | |||
8e9041fbbf | |||
16a82ac50a | |||
0b396f19cf | |||
6ab77ece6e | |||
b7a1f2087c | |||
b0d41fa9a0 | |||
34082c38a7 | |||
8d160cefb0 | |||
cec9c07b7c | |||
383a5204f4 | |||
c7f0c97341 | |||
e7f60465ff | |||
7db4d24817 | |||
dc599585b8 | |||
a22e32cd32 | |||
4647181807 |
57
changelog.md
57
changelog.md
@ -1,5 +1,62 @@
|
||||
# Changelog
|
||||
|
||||
## 2024-11-24 - 3.3.0 - feat(core)
|
||||
Enhanced directory handling and file restoration from trash
|
||||
|
||||
- Refined getSubDirectoryByName to handle file paths treated as directories.
|
||||
- Introduced file restoration function from trash to original or specified paths.
|
||||
|
||||
## 2024-11-24 - 3.2.2 - fix(core)
|
||||
Refactor Bucket class for improved error handling
|
||||
|
||||
- Ensured safe access using non-null assertions when finding a bucket.
|
||||
- Enhanced fastPut method by adding fastPutStrict for safer operations.
|
||||
- Added explicit error handling and type checking in fastExists method.
|
||||
|
||||
## 2024-11-24 - 3.2.1 - fix(metadata)
|
||||
Fix metadata handling for deleted files
|
||||
|
||||
- Ensured metadata is correctly stored and managed when files are deleted into the trash.
|
||||
|
||||
## 2024-11-24 - 3.2.0 - feat(bucket)
|
||||
Enhanced SmartBucket with trash management and metadata handling
|
||||
|
||||
- Added functionality to move files to a trash directory.
|
||||
- Introduced methods to handle file metadata more robustly.
|
||||
- Implemented a method to clean all contents from a bucket.
|
||||
- Enhanced directory retrieval to handle non-existent directories with options.
|
||||
- Improved handling of file paths and metadata within the storage system.
|
||||
|
||||
## 2024-11-18 - 3.1.0 - feat(file)
|
||||
Added functionality to retrieve magic bytes from files and detect file types using magic bytes.
|
||||
|
||||
- Introduced method `getMagicBytes` in `File` and `Bucket` classes to retrieve a specific number of bytes from a file.
|
||||
- Enhanced file type detection by utilizing magic bytes in `MetaData` class.
|
||||
- Updated dependencies for better performance and compatibility.
|
||||
|
||||
## 2024-11-18 - 3.0.24 - fix(metadata)
|
||||
Fix metadata handling to address type assertion and data retrieval.
|
||||
|
||||
- Fixed type assertion issues in `MetaData` class properties with type non-null assertions.
|
||||
- Corrected the handling of JSON data retrieval in `MetaData.storeCustomMetaData` function.
|
||||
|
||||
## 2024-10-16 - 3.0.23 - fix(dependencies)
|
||||
Update package dependencies for improved functionality and security.
|
||||
|
||||
- Updated @aws-sdk/client-s3 to version ^3.670.0 for enhanced S3 client capabilities.
|
||||
- Updated @push.rocks/smartstream to version ^3.2.4.
|
||||
- Updated the dev dependency @push.rocks/tapbundle to version ^5.3.0.
|
||||
|
||||
## 2024-07-28 - 3.0.22 - fix(dependencies)
|
||||
Update dependencies and improve bucket retrieval logging
|
||||
|
||||
- Updated @aws-sdk/client-s3 to ^3.620.0
|
||||
- Updated @git.zone/tsbuild to ^2.1.84
|
||||
- Updated @git.zone/tsrun to ^1.2.49
|
||||
- Updated @push.rocks/smartpromise to ^4.0.4
|
||||
- Updated @tsclass/tsclass to ^4.1.2
|
||||
- Added a log for when a bucket is not found by name in getBucketByName method
|
||||
|
||||
## 2024-07-04 - 3.0.21 - fix(test)
|
||||
Update endpoint configuration in tests to use environment variable
|
||||
|
||||
|
4
package-lock.json
generated
4
package-lock.json
generated
@ -1,12 +1,12 @@
|
||||
{
|
||||
"name": "@push.rocks/smartbucket",
|
||||
"version": "3.0.21",
|
||||
"version": "3.3.0",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"": {
|
||||
"name": "@push.rocks/smartbucket",
|
||||
"version": "3.0.21",
|
||||
"version": "3.3.0",
|
||||
"license": "UNLICENSED",
|
||||
"dependencies": {
|
||||
"@push.rocks/smartpath": "^5.0.18",
|
||||
|
20
package.json
20
package.json
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@push.rocks/smartbucket",
|
||||
"version": "3.0.21",
|
||||
"version": "3.3.0",
|
||||
"description": "A TypeScript library offering simple and cloud-agnostic object storage with advanced features like bucket creation, file and directory management, and data streaming.",
|
||||
"main": "dist_ts/index.js",
|
||||
"typings": "dist_ts/index.d.ts",
|
||||
@ -12,22 +12,22 @@
|
||||
"build": "(tsbuild --web --allowimplicitany)"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@git.zone/tsbuild": "^2.1.80",
|
||||
"@git.zone/tsrun": "^1.2.46",
|
||||
"@git.zone/tsbuild": "^2.1.84",
|
||||
"@git.zone/tsrun": "^1.2.49",
|
||||
"@git.zone/tstest": "^1.0.90",
|
||||
"@push.rocks/qenv": "^6.0.5",
|
||||
"@push.rocks/tapbundle": "^5.0.23"
|
||||
"@push.rocks/qenv": "^6.1.0",
|
||||
"@push.rocks/tapbundle": "^5.5.3"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "^3.600.0",
|
||||
"@push.rocks/smartmime": "^2.0.2",
|
||||
"@aws-sdk/client-s3": "^3.699.0",
|
||||
"@push.rocks/smartmime": "^2.0.4",
|
||||
"@push.rocks/smartpath": "^5.0.18",
|
||||
"@push.rocks/smartpromise": "^4.0.3",
|
||||
"@push.rocks/smartpromise": "^4.0.4",
|
||||
"@push.rocks/smartrx": "^3.0.7",
|
||||
"@push.rocks/smartstream": "^3.0.44",
|
||||
"@push.rocks/smartstream": "^3.2.5",
|
||||
"@push.rocks/smartstring": "^4.0.15",
|
||||
"@push.rocks/smartunique": "^3.0.9",
|
||||
"@tsclass/tsclass": "^4.0.60"
|
||||
"@tsclass/tsclass": "^4.1.2"
|
||||
},
|
||||
"private": false,
|
||||
"files": [
|
||||
|
6193
pnpm-lock.yaml
generated
6193
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
0
test/helpers/prepare.ts
Normal file
0
test/helpers/prepare.ts
Normal file
7
test/test.metadata.ts
Normal file
7
test/test.metadata.ts
Normal file
@ -0,0 +1,7 @@
|
||||
import { tap, expect } from '@push.rocks/tapbundle';
|
||||
|
||||
tap.test('test metadata functionality', async () => {
|
||||
|
||||
})
|
||||
|
||||
tap.start();
|
@ -1,4 +1,5 @@
|
||||
import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
|
||||
import { jestExpect } from '@push.rocks/tapbundle/node';
|
||||
import { Qenv } from '@push.rocks/qenv';
|
||||
|
||||
import * as smartbucket from '../ts/index.js';
|
||||
@ -11,14 +12,67 @@ let baseDirectory: smartbucket.Directory;
|
||||
|
||||
tap.test('should create a valid smartbucket', async () => {
|
||||
testSmartbucket = new smartbucket.SmartBucket({
|
||||
accessKey: await testQenv.getEnvVarOnDemand('S3_KEY'),
|
||||
accessSecret: await testQenv.getEnvVarOnDemand('S3_SECRET'),
|
||||
endpoint: await testQenv.getEnvVarOnDemand('S3_ENDPOINT'),
|
||||
accessKey: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSKEY'),
|
||||
accessSecret: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSSECRET'),
|
||||
endpoint: await testQenv.getEnvVarOnDemandStrict('S3_ENDPOINT'),
|
||||
});
|
||||
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
|
||||
myBucket = await testSmartbucket.getBucketByName('testzone');
|
||||
myBucket = await testSmartbucket.getBucketByNameStrict(await testQenv.getEnvVarOnDemandStrict('S3_BUCKET'),);
|
||||
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
|
||||
expect(myBucket.name).toEqual('testzone');
|
||||
expect(myBucket.name).toEqual('test-pushrocks-smartbucket');
|
||||
});
|
||||
|
||||
export default tap.start();
|
||||
tap.test('should clean all contents', async () => {
|
||||
await myBucket.cleanAllContents();
|
||||
expect(await myBucket.fastExists({ path: 'hithere/socool.txt' })).toBeFalse();
|
||||
expect(await myBucket.fastExists({ path: 'trashtest/trashme.txt' })).toBeFalse();
|
||||
});
|
||||
|
||||
tap.test('should delete a file into the normally', async () => {
|
||||
const path = 'trashtest/trashme.txt';
|
||||
const file = await myBucket.fastPutStrict({
|
||||
path,
|
||||
contents: 'I\'m in the trash test content!',
|
||||
});
|
||||
const fileMetadata = await (await file.getMetaData()).metadataFile.getContents();
|
||||
console.log(fileMetadata.toString());
|
||||
expect(await file.getMetaData().then((meta) => meta.metadataFile.getJsonData())).toEqual({});
|
||||
await file.delete({ mode: 'permanent' });
|
||||
expect((await (await myBucket.getBaseDirectory()).listFiles()).length).toEqual(0);
|
||||
expect((await (await myBucket.getBaseDirectory()).listDirectories()).length).toEqual(0);
|
||||
});
|
||||
|
||||
tap.test('should put a file into the trash', async () => {
|
||||
const path = 'trashtest/trashme.txt';
|
||||
const file = await myBucket.fastPutStrict({
|
||||
path,
|
||||
contents: 'I\'m in the trash test content!',
|
||||
});
|
||||
const fileMetadata = await (await file.getMetaData()).metadataFile.getContents();
|
||||
console.log(fileMetadata.toString());
|
||||
expect(await file.getMetaData().then((meta) => meta.metadataFile.getJsonData())).toEqual({});
|
||||
await file.delete({ mode: 'trash' });
|
||||
jestExpect(await file.getMetaData().then((meta) => meta.metadataFile.getJsonData())).toEqual({
|
||||
custom_recycle: {
|
||||
deletedAt: jestExpect.any(Number),
|
||||
originalPath: "trashtest/trashme.txt",
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
tap.test('should restore a file from trash', async () => {
|
||||
const baseDirectory = await myBucket.getBaseDirectory();
|
||||
const file = await baseDirectory.getFileStrict({
|
||||
path: 'trashtest/trashme.txt',
|
||||
getFromTrash: true
|
||||
});
|
||||
const trashFileMeta = await file.getMetaData();
|
||||
const data = await trashFileMeta.getCustomMetaData({
|
||||
key: 'recycle'
|
||||
});
|
||||
expect(file).toBeInstanceOf(smartbucket.File);
|
||||
await file.restore();
|
||||
});
|
||||
|
||||
|
||||
export default tap.start();
|
||||
|
28
test/test.ts
28
test/test.ts
@ -11,14 +11,20 @@ let baseDirectory: smartbucket.Directory;
|
||||
|
||||
tap.test('should create a valid smartbucket', async () => {
|
||||
testSmartbucket = new smartbucket.SmartBucket({
|
||||
accessKey: await testQenv.getEnvVarOnDemand('S3_KEY'),
|
||||
accessSecret: await testQenv.getEnvVarOnDemand('S3_SECRET'),
|
||||
endpoint: await testQenv.getEnvVarOnDemand('S3_ENDPOINT'),
|
||||
accessKey: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSKEY'),
|
||||
accessSecret: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSSECRET'),
|
||||
endpoint: await testQenv.getEnvVarOnDemandStrict('S3_ENDPOINT'),
|
||||
});
|
||||
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
|
||||
myBucket = await testSmartbucket.getBucketByName('testzone');
|
||||
myBucket = await testSmartbucket.getBucketByNameStrict(await testQenv.getEnvVarOnDemandStrict('S3_BUCKET'),);
|
||||
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
|
||||
expect(myBucket.name).toEqual('testzone');
|
||||
expect(myBucket.name).toEqual('test-pushrocks-smartbucket');
|
||||
});
|
||||
|
||||
tap.test('should clean all contents', async () => {
|
||||
await myBucket.cleanAllContents();
|
||||
expect(await myBucket.fastExists({ path: 'hithere/socool.txt' })).toBeFalse();
|
||||
expect(await myBucket.fastExists({ path: 'trashtest/trashme.txt' })).toBeFalse();
|
||||
});
|
||||
|
||||
tap.skip.test('should create testbucket', async () => {
|
||||
@ -41,9 +47,12 @@ tap.test('should get data in bucket', async () => {
|
||||
const fileString = await myBucket.fastGet({
|
||||
path: 'hithere/socool.txt',
|
||||
});
|
||||
const fileStringStream = await myBucket.fastGetStream({
|
||||
path: 'hithere/socool.txt',
|
||||
}, 'nodestream');
|
||||
const fileStringStream = await myBucket.fastGetStream(
|
||||
{
|
||||
path: 'hithere/socool.txt',
|
||||
},
|
||||
'nodestream'
|
||||
);
|
||||
console.log(fileString);
|
||||
});
|
||||
|
||||
@ -97,8 +106,9 @@ tap.test('should get base directory', async () => {
|
||||
tap.test('should correctly build paths for sub directories', async () => {
|
||||
const dir4 = await baseDirectory.getSubDirectoryByName('dir3/dir4');
|
||||
expect(dir4).toBeInstanceOf(smartbucket.Directory);
|
||||
const dir4BasePath = dir4.getBasePath();
|
||||
const dir4BasePath = dir4?.getBasePath();
|
||||
console.log(dir4BasePath);
|
||||
expect(dir4BasePath).toEqual('dir3/dir4/');
|
||||
});
|
||||
|
||||
tap.test('clean up directory style tests', async () => {
|
||||
|
@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartbucket',
|
||||
version: '3.0.21',
|
||||
version: '3.3.0',
|
||||
description: 'A TypeScript library offering simple and cloud-agnostic object storage with advanced features like bucket creation, file and directory management, and data streaming.'
|
||||
}
|
||||
|
@ -17,13 +17,14 @@ export class Bucket {
|
||||
public static async getBucketByName(smartbucketRef: SmartBucket, bucketNameArg: string) {
|
||||
const command = new plugins.s3.ListBucketsCommand({});
|
||||
const buckets = await smartbucketRef.s3Client.send(command);
|
||||
const foundBucket = buckets.Buckets.find((bucket) => bucket.Name === bucketNameArg);
|
||||
const foundBucket = buckets.Buckets!.find((bucket) => bucket.Name === bucketNameArg);
|
||||
|
||||
if (foundBucket) {
|
||||
console.log(`bucket with name ${bucketNameArg} exists.`);
|
||||
console.log(`Taking this as base for new Bucket instance`);
|
||||
return new this(smartbucketRef, bucketNameArg);
|
||||
} else {
|
||||
console.log(`did not find bucket by name: ${bucketNameArg}`);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
@ -51,7 +52,7 @@ export class Bucket {
|
||||
* gets the base directory of the bucket
|
||||
*/
|
||||
public async getBaseDirectory(): Promise<Directory> {
|
||||
return new Directory(this, null, '');
|
||||
return new Directory(this, null!, '');
|
||||
}
|
||||
|
||||
/**
|
||||
@ -70,7 +71,9 @@ export class Bucket {
|
||||
}
|
||||
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
|
||||
const baseDirectory = await this.getBaseDirectory();
|
||||
return await baseDirectory.getSubDirectoryByName(checkPath);
|
||||
return await baseDirectory.getSubDirectoryByNameStrict(checkPath, {
|
||||
getEmptyDirectory: true,
|
||||
});
|
||||
}
|
||||
|
||||
// ===============
|
||||
@ -85,14 +88,15 @@ export class Bucket {
|
||||
contents: string | Buffer;
|
||||
overwrite?: boolean;
|
||||
}
|
||||
): Promise<File> {
|
||||
): Promise<File | null> {
|
||||
try {
|
||||
const reducedPath = await helpers.reducePathDescriptorToPath(optionsArg);
|
||||
const exists = await this.fastExists({ path: reducedPath });
|
||||
|
||||
if (exists && !optionsArg.overwrite) {
|
||||
console.error(`Object already exists at path '${reducedPath}' in bucket '${this.name}'.`);
|
||||
return;
|
||||
const errorText = `Object already exists at path '${reducedPath}' in bucket '${this.name}'.`;
|
||||
console.error(errorText);
|
||||
return null;
|
||||
} else if (exists && optionsArg.overwrite) {
|
||||
console.log(
|
||||
`Overwriting existing object at path '${reducedPath}' in bucket '${this.name}'.`
|
||||
@ -125,6 +129,14 @@ export class Bucket {
|
||||
}
|
||||
}
|
||||
|
||||
public async fastPutStrict(...args: Parameters<Bucket['fastPut']>) {
|
||||
const file = await this.fastPut(...args);
|
||||
if (!file) {
|
||||
throw new Error(`File not stored at path '${args[0].path}'`);
|
||||
}
|
||||
return file;
|
||||
}
|
||||
|
||||
/**
|
||||
* get file
|
||||
*/
|
||||
@ -149,7 +161,7 @@ export class Bucket {
|
||||
},
|
||||
});
|
||||
await done.promise;
|
||||
return completeFile;
|
||||
return completeFile!;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -217,7 +229,7 @@ export class Bucket {
|
||||
return chunk;
|
||||
},
|
||||
finalFunction: async (cb) => {
|
||||
return null;
|
||||
return null!;
|
||||
},
|
||||
});
|
||||
|
||||
@ -231,6 +243,7 @@ export class Bucket {
|
||||
if (typeArg === 'webstream') {
|
||||
return (await duplexStream.getWebStreams()).readable;
|
||||
}
|
||||
throw new Error('unknown typeArg');
|
||||
}
|
||||
|
||||
/**
|
||||
@ -329,7 +342,9 @@ export class Bucket {
|
||||
}): Promise<void> {
|
||||
try {
|
||||
const destinationBucket = optionsArg.targetBucket || this;
|
||||
const exists = await destinationBucket.fastExists({ path: optionsArg.destinationPath });
|
||||
const exists = await destinationBucket.fastExists({
|
||||
path: optionsArg.destinationPath,
|
||||
});
|
||||
|
||||
if (exists && !optionsArg.overwrite) {
|
||||
console.error(
|
||||
@ -386,8 +401,8 @@ export class Bucket {
|
||||
await this.smartbucketRef.s3Client.send(command);
|
||||
console.log(`Object '${optionsArg.path}' exists in bucket '${this.name}'.`);
|
||||
return true;
|
||||
} catch (error) {
|
||||
if (error.name === 'NotFound') {
|
||||
} catch (error: any) {
|
||||
if (error?.name === 'NotFound') {
|
||||
console.log(`Object '${optionsArg.path}' does not exist in bucket '${this.name}'.`);
|
||||
return false;
|
||||
} else {
|
||||
@ -422,8 +437,8 @@ export class Bucket {
|
||||
Prefix: checkPath,
|
||||
Delimiter: '/',
|
||||
});
|
||||
const response = await this.smartbucketRef.s3Client.send(command);
|
||||
return response.CommonPrefixes.length > 0;
|
||||
const { CommonPrefixes } = await this.smartbucketRef.s3Client.send(command);
|
||||
return !!CommonPrefixes && CommonPrefixes.length > 0;
|
||||
}
|
||||
|
||||
public async isFile(pathDescriptor: interfaces.IPathDecriptor): Promise<boolean> {
|
||||
@ -433,7 +448,79 @@ export class Bucket {
|
||||
Prefix: checkPath,
|
||||
Delimiter: '/',
|
||||
});
|
||||
const response = await this.smartbucketRef.s3Client.send(command);
|
||||
return response.Contents.length > 0;
|
||||
const { Contents } = await this.smartbucketRef.s3Client.send(command);
|
||||
return !!Contents && Contents.length > 0;
|
||||
}
|
||||
|
||||
public async getMagicBytes(optionsArg: { path: string; length: number }): Promise<Buffer> {
|
||||
try {
|
||||
const command = new plugins.s3.GetObjectCommand({
|
||||
Bucket: this.name,
|
||||
Key: optionsArg.path,
|
||||
Range: `bytes=0-${optionsArg.length - 1}`,
|
||||
});
|
||||
const response = await this.smartbucketRef.s3Client.send(command);
|
||||
const chunks = [];
|
||||
const stream = response.Body as any; // SdkStreamMixin includes readable stream
|
||||
|
||||
for await (const chunk of stream) {
|
||||
chunks.push(chunk);
|
||||
}
|
||||
return Buffer.concat(chunks);
|
||||
} catch (error) {
|
||||
console.error(
|
||||
`Error retrieving magic bytes from object at path '${optionsArg.path}' in bucket '${this.name}':`,
|
||||
error
|
||||
);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
public async cleanAllContents(): Promise<void> {
|
||||
try {
|
||||
// Define the command type explicitly
|
||||
const listCommandInput: plugins.s3.ListObjectsV2CommandInput = {
|
||||
Bucket: this.name,
|
||||
};
|
||||
|
||||
let isTruncated = true;
|
||||
let continuationToken: string | undefined = undefined;
|
||||
|
||||
while (isTruncated) {
|
||||
// Add the continuation token to the input if present
|
||||
const listCommand = new plugins.s3.ListObjectsV2Command({
|
||||
...listCommandInput,
|
||||
ContinuationToken: continuationToken,
|
||||
});
|
||||
|
||||
// Explicitly type the response
|
||||
const response: plugins.s3.ListObjectsV2Output =
|
||||
await this.smartbucketRef.s3Client.send(listCommand);
|
||||
|
||||
console.log(`Cleaning contents of bucket '${this.name}': Now deleting ${response.Contents?.length} items...`);
|
||||
|
||||
if (response.Contents && response.Contents.length > 0) {
|
||||
// Delete objects in batches, mapping each item to { Key: string }
|
||||
const deleteCommand = new plugins.s3.DeleteObjectsCommand({
|
||||
Bucket: this.name,
|
||||
Delete: {
|
||||
Objects: response.Contents.map((item) => ({ Key: item.Key! })),
|
||||
Quiet: true,
|
||||
},
|
||||
});
|
||||
|
||||
await this.smartbucketRef.s3Client.send(deleteCommand);
|
||||
}
|
||||
|
||||
// Update continuation token and truncation status
|
||||
isTruncated = response.IsTruncated || false;
|
||||
continuationToken = response.NextContinuationToken;
|
||||
}
|
||||
|
||||
console.log(`All contents in bucket '${this.name}' have been deleted.`);
|
||||
} catch (error) {
|
||||
console.error(`Error cleaning contents of bucket '${this.name}':`, error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -10,9 +10,9 @@ export class Directory {
|
||||
public parentDirectoryRef: Directory;
|
||||
public name: string;
|
||||
|
||||
public tree: string[];
|
||||
public files: string[];
|
||||
public folders: string[];
|
||||
public tree!: string[];
|
||||
public files!: string[];
|
||||
public folders!: string[];
|
||||
|
||||
constructor(bucketRefArg: Bucket, parentDirectory: Directory, name: string) {
|
||||
this.bucketRef = bucketRefArg;
|
||||
@ -69,7 +69,7 @@ export class Directory {
|
||||
path: string;
|
||||
createWithContents?: string | Buffer;
|
||||
getFromTrash?: boolean;
|
||||
}): Promise<File> {
|
||||
}): Promise<File | null> {
|
||||
const pathDescriptor = {
|
||||
directory: this,
|
||||
path: optionsArg.path,
|
||||
@ -98,6 +98,19 @@ export class Directory {
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* gets a file strictly
|
||||
* @param args
|
||||
* @returns
|
||||
*/
|
||||
public async getFileStrict(...args: Parameters<Directory['getFile']>) {
|
||||
const file = await this.getFile(...args);
|
||||
if (!file) {
|
||||
throw new Error(`File not found at path '${args[0].path}'`);
|
||||
}
|
||||
return file;
|
||||
}
|
||||
|
||||
/**
|
||||
* lists all files
|
||||
*/
|
||||
@ -110,7 +123,7 @@ export class Directory {
|
||||
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
|
||||
const fileArray: File[] = [];
|
||||
|
||||
response.Contents.forEach((item) => {
|
||||
response.Contents?.forEach((item) => {
|
||||
if (item.Key && !item.Key.endsWith('/')) {
|
||||
const subtractedPath = item.Key.replace(this.getBasePath(), '');
|
||||
if (!subtractedPath.includes('/')) {
|
||||
@ -178,23 +191,77 @@ export class Directory {
|
||||
/**
|
||||
* gets a sub directory by name
|
||||
*/
|
||||
public async getSubDirectoryByName(dirNameArg: string): Promise<Directory> {
|
||||
const dirNameArray = dirNameArg.split('/');
|
||||
public async getSubDirectoryByName(dirNameArg: string, optionsArg: {
|
||||
/**
|
||||
* in s3 a directory does not exist if it is empty
|
||||
* this option returns a directory even if it is empty
|
||||
*/
|
||||
getEmptyDirectory?: boolean;
|
||||
/**
|
||||
* in s3 a directory does not exist if it is empty
|
||||
* this option creates a directory even if it is empty using a initializer file
|
||||
*/
|
||||
createWithInitializerFile?: boolean;
|
||||
/**
|
||||
* if the path is a file path, it will be treated as a file and the parent directory will be returned
|
||||
*/
|
||||
couldBeFilePath?: boolean;
|
||||
} = {}): Promise<Directory | null> {
|
||||
|
||||
const getDirectory = async (directoryArg: Directory, dirNameToSearch: string) => {
|
||||
const directories = await directoryArg.listDirectories();
|
||||
return directories.find((directory) => {
|
||||
return directory.name === dirNameToSearch;
|
||||
});
|
||||
};
|
||||
const dirNameArray = dirNameArg.split('/').filter(str => str.trim() !== "");
|
||||
|
||||
let wantedDirectory: Directory;
|
||||
for (const dirNameToSearch of dirNameArray) {
|
||||
const directoryToSearchIn = wantedDirectory ? wantedDirectory : this;
|
||||
wantedDirectory = await getDirectory(directoryToSearchIn, dirNameToSearch);
|
||||
optionsArg = {
|
||||
getEmptyDirectory: false,
|
||||
createWithInitializerFile: false,
|
||||
...optionsArg,
|
||||
}
|
||||
|
||||
return wantedDirectory;
|
||||
|
||||
const getDirectory = async (directoryArg: Directory, dirNameToSearch: string, isFinalDirectory: boolean) => {
|
||||
const directories = await directoryArg.listDirectories();
|
||||
let returnDirectory = directories.find((directory) => {
|
||||
return directory.name === dirNameToSearch;
|
||||
});
|
||||
if (returnDirectory) {
|
||||
return returnDirectory;
|
||||
}
|
||||
if (optionsArg.getEmptyDirectory || optionsArg.createWithInitializerFile) {
|
||||
returnDirectory = new Directory(this.bucketRef, this, dirNameToSearch);
|
||||
}
|
||||
if (isFinalDirectory && optionsArg.createWithInitializerFile) {
|
||||
returnDirectory?.createEmptyFile('00init.txt');
|
||||
}
|
||||
return returnDirectory || null;
|
||||
};
|
||||
|
||||
if (optionsArg.couldBeFilePath) {
|
||||
const baseDirectory = await this.bucketRef.getBaseDirectory();
|
||||
const existingFile = await baseDirectory.getFile({
|
||||
path: dirNameArg,
|
||||
});
|
||||
if (existingFile) {
|
||||
const adjustedPath = dirNameArg.substring(0, dirNameArg.lastIndexOf('/'));
|
||||
return this.getSubDirectoryByName(adjustedPath);
|
||||
}
|
||||
}
|
||||
|
||||
let wantedDirectory: Directory | null = null;
|
||||
let counter = 0;
|
||||
for (const dirNameToSearch of dirNameArray) {
|
||||
counter++;
|
||||
const directoryToSearchIn = wantedDirectory ? wantedDirectory : this;
|
||||
wantedDirectory = await getDirectory(directoryToSearchIn, dirNameToSearch, counter === dirNameArray.length);
|
||||
}
|
||||
|
||||
return wantedDirectory || null;
|
||||
}
|
||||
|
||||
public async getSubDirectoryByNameStrict(...args: Parameters<Directory['getSubDirectoryByName']>) {
|
||||
const directory = await this.getSubDirectoryByName(...args);
|
||||
if (!directory) {
|
||||
throw new Error(`Directory not found at path '${args[0]}'`);
|
||||
}
|
||||
return directory;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -293,7 +360,7 @@ export class Directory {
|
||||
*/
|
||||
mode?: 'permanent' | 'trash';
|
||||
}) {
|
||||
const file = await this.getFile({
|
||||
const file = await this.getFileStrict({
|
||||
path: optionsArg.path,
|
||||
});
|
||||
await file.delete({
|
||||
|
@ -50,6 +50,10 @@ export class File {
|
||||
public parentDirectoryRef: Directory;
|
||||
public name: string;
|
||||
|
||||
/**
|
||||
* get the full path to the file
|
||||
* @returns the full path to the file
|
||||
*/
|
||||
public getBasePath(): string {
|
||||
return plugins.path.join(this.parentDirectoryRef.getBasePath(), this.name);
|
||||
}
|
||||
@ -88,24 +92,23 @@ export class File {
|
||||
/**
|
||||
* deletes this file
|
||||
*/
|
||||
public async delete(optionsArg?: {
|
||||
mode: 'trash' | 'permanent';
|
||||
}) {
|
||||
|
||||
public async delete(optionsArg?: { mode: 'trash' | 'permanent' }) {
|
||||
optionsArg = {
|
||||
... {
|
||||
...{
|
||||
mode: 'permanent',
|
||||
},
|
||||
...optionsArg,
|
||||
}
|
||||
};
|
||||
|
||||
if (optionsArg.mode === 'permanent') {
|
||||
await this.parentDirectoryRef.bucketRef.fastRemove({
|
||||
path: this.getBasePath(),
|
||||
});
|
||||
if (!this.name.endsWith('.metadata')) {
|
||||
const metadata = await this.getMetaData();
|
||||
await metadata.metadataFile.delete(optionsArg);
|
||||
if (await this.hasMetaData()) {
|
||||
const metadata = await this.getMetaData();
|
||||
await metadata.metadataFile.delete(optionsArg);
|
||||
}
|
||||
}
|
||||
} else if (optionsArg.mode === 'trash') {
|
||||
const metadata = await this.getMetaData();
|
||||
@ -117,15 +120,39 @@ export class File {
|
||||
},
|
||||
});
|
||||
const trash = await this.parentDirectoryRef.bucketRef.getTrash();
|
||||
const trashDir = await trash.getTrashDir();
|
||||
await this.move({
|
||||
directory: await trash.getTrashDir(),
|
||||
directory: trashDir,
|
||||
path: await trash.getTrashKeyByOriginalBasePath(this.getBasePath()),
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
await this.parentDirectoryRef.listFiles();
|
||||
}
|
||||
|
||||
/**
|
||||
* restores
|
||||
*/
|
||||
public async restore(optionsArg: {
|
||||
useOriginalPath?: boolean;
|
||||
toPath?: string;
|
||||
overwrite?: boolean;
|
||||
} = {}) {
|
||||
optionsArg = {
|
||||
useOriginalPath: (() => {
|
||||
return optionsArg.toPath ? false : true;
|
||||
})(),
|
||||
overwrite: false,
|
||||
...optionsArg,
|
||||
};
|
||||
const moveToPath = optionsArg.toPath || (await (await this.getMetaData()).getCustomMetaData({
|
||||
key: 'recycle'
|
||||
})).originalPath;
|
||||
await this.move({
|
||||
path: moveToPath,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* allows locking the file
|
||||
* @param optionsArg
|
||||
@ -150,7 +177,7 @@ export class File {
|
||||
}) {
|
||||
const metadata = await this.getMetaData();
|
||||
await metadata.removeLock({
|
||||
force: optionsArg?.force,
|
||||
force: optionsArg?.force || false,
|
||||
});
|
||||
}
|
||||
|
||||
@ -165,16 +192,19 @@ export class File {
|
||||
await this.parentDirectoryRef.bucketRef.fastPutStream({
|
||||
path: this.getBasePath(),
|
||||
readableStream: optionsArg.contents,
|
||||
overwrite: true,
|
||||
});
|
||||
} else if (Buffer.isBuffer(optionsArg.contents)) {
|
||||
await this.parentDirectoryRef.bucketRef.fastPut({
|
||||
path: this.getBasePath(),
|
||||
contents: optionsArg.contents,
|
||||
overwrite: true,
|
||||
});
|
||||
} else if (typeof optionsArg.contents === 'string') {
|
||||
await this.parentDirectoryRef.bucketRef.fastPut({
|
||||
path: this.getBasePath(),
|
||||
contents: Buffer.from(optionsArg.contents, optionsArg.encoding),
|
||||
overwrite: true,
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -183,23 +213,52 @@ export class File {
|
||||
* moves the file to another directory
|
||||
*/
|
||||
public async move(pathDescriptorArg: interfaces.IPathDecriptor) {
|
||||
let moveToPath = '';
|
||||
let moveToPath: string = '';
|
||||
const isDirectory = await this.parentDirectoryRef.bucketRef.isDirectory(pathDescriptorArg);
|
||||
if (isDirectory) {
|
||||
moveToPath = await helpers.reducePathDescriptorToPath({
|
||||
...pathDescriptorArg,
|
||||
path: plugins.path.join(pathDescriptorArg.path, this.name),
|
||||
path: plugins.path.join(pathDescriptorArg.path!, this.name),
|
||||
});
|
||||
} else {
|
||||
moveToPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
|
||||
}
|
||||
// lets move the file
|
||||
await this.parentDirectoryRef.bucketRef.fastMove({
|
||||
sourcePath: this.getBasePath(),
|
||||
destinationPath: moveToPath,
|
||||
overwrite: true,
|
||||
});
|
||||
|
||||
// lets move the metadatafile
|
||||
const metadata = await this.getMetaData();
|
||||
await metadata.metadataFile.move(pathDescriptorArg);
|
||||
if (!this.name.endsWith('.metadata')) {
|
||||
const metadata = await this.getMetaData();
|
||||
await this.parentDirectoryRef.bucketRef.fastMove({
|
||||
sourcePath: metadata.metadataFile.getBasePath(),
|
||||
destinationPath: moveToPath + '.metadata',
|
||||
overwrite: true,
|
||||
});
|
||||
}
|
||||
|
||||
// lets update references of this
|
||||
const baseDirectory = await this.parentDirectoryRef.bucketRef.getBaseDirectory();
|
||||
this.parentDirectoryRef = await baseDirectory.getSubDirectoryByNameStrict(
|
||||
await helpers.reducePathDescriptorToPath(pathDescriptorArg),
|
||||
{
|
||||
couldBeFilePath: true,
|
||||
}
|
||||
);
|
||||
this.name = pathDescriptorArg.path!;
|
||||
}
|
||||
|
||||
public async hasMetaData(): Promise<boolean> {
|
||||
if (!this.name.endsWith('.metadata')) {
|
||||
const hasMetadataBool = MetaData.hasMetaData({
|
||||
file: this,
|
||||
});
|
||||
return hasMetadataBool;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -230,4 +289,11 @@ export class File {
|
||||
contents: JSON.stringify(dataArg),
|
||||
});
|
||||
}
|
||||
|
||||
public async getMagicBytes(optionsArg: { length: number }): Promise<Buffer> {
|
||||
return this.parentDirectoryRef.bucketRef.getMagicBytes({
|
||||
path: this.getBasePath(),
|
||||
length: optionsArg.length,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -3,13 +3,21 @@ import * as plugins from './plugins.js';
|
||||
import { File } from './classes.file.js';
|
||||
|
||||
export class MetaData {
|
||||
public static async hasMetaData(optionsArg: { file: File }) {
|
||||
// lets find the existing metadata file
|
||||
const existingFile = await optionsArg.file.parentDirectoryRef.getFile({
|
||||
path: optionsArg.file.name + '.metadata',
|
||||
});
|
||||
return !!existingFile;
|
||||
}
|
||||
|
||||
// static
|
||||
public static async createForFile(optionsArg: { file: File }) {
|
||||
const metaData = new MetaData();
|
||||
metaData.fileRef = optionsArg.file;
|
||||
|
||||
// lets find the existing metadata file
|
||||
metaData.metadataFile = await metaData.fileRef.parentDirectoryRef.getFile({
|
||||
metaData.metadataFile = await metaData.fileRef.parentDirectoryRef.getFileStrict({
|
||||
path: metaData.fileRef.name + '.metadata',
|
||||
createWithContents: '{}',
|
||||
});
|
||||
@ -21,20 +29,34 @@ export class MetaData {
|
||||
/**
|
||||
* the file that contains the metadata
|
||||
*/
|
||||
metadataFile: File;
|
||||
metadataFile!: File;
|
||||
|
||||
/**
|
||||
* the file that the metadata is for
|
||||
*/
|
||||
fileRef: File;
|
||||
fileRef!: File;
|
||||
|
||||
public async getFileType(optionsArg?: {
|
||||
useFileExtension?: boolean;
|
||||
useMagicBytes?: boolean;
|
||||
}): Promise<string> {
|
||||
if ((optionsArg && optionsArg.useFileExtension) || optionsArg.useFileExtension === undefined) {
|
||||
return plugins.path.extname(this.fileRef.name);
|
||||
}): Promise<plugins.smartmime.IFileTypeResult | undefined> {
|
||||
if ((optionsArg && optionsArg.useFileExtension) || !optionsArg) {
|
||||
const fileType = await plugins.smartmime.detectMimeType({
|
||||
path: this.fileRef.name,
|
||||
});
|
||||
|
||||
return fileType;
|
||||
}
|
||||
if (optionsArg && optionsArg.useMagicBytes) {
|
||||
const fileType = await plugins.smartmime.detectMimeType({
|
||||
buffer: await this.fileRef.getMagicBytes({
|
||||
length: 100,
|
||||
})
|
||||
});
|
||||
|
||||
return fileType;
|
||||
}
|
||||
throw new Error('optionsArg.useFileExtension and optionsArg.useMagicBytes cannot both be false');
|
||||
}
|
||||
|
||||
/**
|
||||
@ -44,13 +66,13 @@ export class MetaData {
|
||||
const stat = await this.fileRef.parentDirectoryRef.bucketRef.fastStat({
|
||||
path: this.fileRef.getBasePath(),
|
||||
});
|
||||
return stat.ContentLength;
|
||||
return stat.ContentLength!;
|
||||
}
|
||||
|
||||
private prefixCustomMetaData = 'custom_';
|
||||
|
||||
public async storeCustomMetaData<T = any>(optionsArg: { key: string; value: T }) {
|
||||
const data = await this.metadataFile.getContentsAsString();
|
||||
const data = await this.metadataFile.getJsonData();
|
||||
data[this.prefixCustomMetaData + optionsArg.key] = optionsArg.value;
|
||||
await this.metadataFile.writeJsonData(data);
|
||||
}
|
||||
|
@ -41,7 +41,15 @@ export class SmartBucket {
|
||||
await Bucket.removeBucketByName(this, bucketName);
|
||||
}
|
||||
|
||||
public async getBucketByName(bucketName: string) {
|
||||
return Bucket.getBucketByName(this, bucketName);
|
||||
public async getBucketByName(bucketNameArg: string) {
|
||||
return Bucket.getBucketByName(this, bucketNameArg);
|
||||
}
|
||||
|
||||
public async getBucketByNameStrict(...args: Parameters<SmartBucket['getBucketByName']>) {
|
||||
const bucket = await this.getBucketByName(...args);
|
||||
if (!bucket) {
|
||||
throw new Error(`Bucket ${args[0]} does not exist.`);
|
||||
}
|
||||
return bucket;
|
||||
}
|
||||
}
|
||||
|
@ -21,7 +21,7 @@ export class Trash {
|
||||
const trashDir = await this.getTrashDir();
|
||||
const originalPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||
const trashKey = await this.getTrashKeyByOriginalBasePath(originalPath);
|
||||
return trashDir.getFile({ path: trashKey });
|
||||
return trashDir.getFileStrict({ path: trashKey });
|
||||
}
|
||||
|
||||
public async getTrashKeyByOriginalBasePath (originalPath: string): Promise<string> {
|
||||
|
@ -6,7 +6,8 @@
|
||||
"module": "NodeNext",
|
||||
"moduleResolution": "NodeNext",
|
||||
"esModuleInterop": true,
|
||||
"verbatimModuleSyntax": true
|
||||
"verbatimModuleSyntax": true,
|
||||
"strict": true
|
||||
},
|
||||
"exclude": [
|
||||
"dist_*/**/*.d.ts"
|
||||
|
Reference in New Issue
Block a user