Compare commits
20 Commits
Author | SHA1 | Date | |
---|---|---|---|
aa9a2e9220 | |||
154854dc21 | |||
8e9041fbbf | |||
16a82ac50a | |||
0b396f19cf | |||
6ab77ece6e | |||
b7a1f2087c | |||
b0d41fa9a0 | |||
34082c38a7 | |||
8d160cefb0 | |||
cec9c07b7c | |||
383a5204f4 | |||
c7f0c97341 | |||
e7f60465ff | |||
7db4d24817 | |||
dc599585b8 | |||
a22e32cd32 | |||
4647181807 | |||
99c3935d0c | |||
05523dc7a1 |
113
changelog.md
Normal file
113
changelog.md
Normal file
@ -0,0 +1,113 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
## 2024-11-24 - 3.3.0 - feat(core)
|
||||||
|
Enhanced directory handling and file restoration from trash
|
||||||
|
|
||||||
|
- Refined getSubDirectoryByName to handle file paths treated as directories.
|
||||||
|
- Introduced file restoration function from trash to original or specified paths.
|
||||||
|
|
||||||
|
## 2024-11-24 - 3.2.2 - fix(core)
|
||||||
|
Refactor Bucket class for improved error handling
|
||||||
|
|
||||||
|
- Ensured safe access using non-null assertions when finding a bucket.
|
||||||
|
- Enhanced fastPut method by adding fastPutStrict for safer operations.
|
||||||
|
- Added explicit error handling and type checking in fastExists method.
|
||||||
|
|
||||||
|
## 2024-11-24 - 3.2.1 - fix(metadata)
|
||||||
|
Fix metadata handling for deleted files
|
||||||
|
|
||||||
|
- Ensured metadata is correctly stored and managed when files are deleted into the trash.
|
||||||
|
|
||||||
|
## 2024-11-24 - 3.2.0 - feat(bucket)
|
||||||
|
Enhanced SmartBucket with trash management and metadata handling
|
||||||
|
|
||||||
|
- Added functionality to move files to a trash directory.
|
||||||
|
- Introduced methods to handle file metadata more robustly.
|
||||||
|
- Implemented a method to clean all contents from a bucket.
|
||||||
|
- Enhanced directory retrieval to handle non-existent directories with options.
|
||||||
|
- Improved handling of file paths and metadata within the storage system.
|
||||||
|
|
||||||
|
## 2024-11-18 - 3.1.0 - feat(file)
|
||||||
|
Added functionality to retrieve magic bytes from files and detect file types using magic bytes.
|
||||||
|
|
||||||
|
- Introduced method `getMagicBytes` in `File` and `Bucket` classes to retrieve a specific number of bytes from a file.
|
||||||
|
- Enhanced file type detection by utilizing magic bytes in `MetaData` class.
|
||||||
|
- Updated dependencies for better performance and compatibility.
|
||||||
|
|
||||||
|
## 2024-11-18 - 3.0.24 - fix(metadata)
|
||||||
|
Fix metadata handling to address type assertion and data retrieval.
|
||||||
|
|
||||||
|
- Fixed type assertion issues in `MetaData` class properties with type non-null assertions.
|
||||||
|
- Corrected the handling of JSON data retrieval in `MetaData.storeCustomMetaData` function.
|
||||||
|
|
||||||
|
## 2024-10-16 - 3.0.23 - fix(dependencies)
|
||||||
|
Update package dependencies for improved functionality and security.
|
||||||
|
|
||||||
|
- Updated @aws-sdk/client-s3 to version ^3.670.0 for enhanced S3 client capabilities.
|
||||||
|
- Updated @push.rocks/smartstream to version ^3.2.4.
|
||||||
|
- Updated the dev dependency @push.rocks/tapbundle to version ^5.3.0.
|
||||||
|
|
||||||
|
## 2024-07-28 - 3.0.22 - fix(dependencies)
|
||||||
|
Update dependencies and improve bucket retrieval logging
|
||||||
|
|
||||||
|
- Updated @aws-sdk/client-s3 to ^3.620.0
|
||||||
|
- Updated @git.zone/tsbuild to ^2.1.84
|
||||||
|
- Updated @git.zone/tsrun to ^1.2.49
|
||||||
|
- Updated @push.rocks/smartpromise to ^4.0.4
|
||||||
|
- Updated @tsclass/tsclass to ^4.1.2
|
||||||
|
- Added a log for when a bucket is not found by name in getBucketByName method
|
||||||
|
|
||||||
|
## 2024-07-04 - 3.0.21 - fix(test)
|
||||||
|
Update endpoint configuration in tests to use environment variable
|
||||||
|
|
||||||
|
- Modified `qenv.yml` to include `S3_ENDPOINT` as a required environment variable.
|
||||||
|
- Updated test files to fetch `S3_ENDPOINT` from environment instead of hardcoding.
|
||||||
|
|
||||||
|
## 2024-06-19 - 3.0.20 - Fix and Stability Updates
|
||||||
|
Improved overall stability and consistency.
|
||||||
|
|
||||||
|
## 2024-06-18 - 3.0.18 - Delete Functions Consistency
|
||||||
|
Ensured more consistency between delete methods and trash behavior.
|
||||||
|
|
||||||
|
## 2024-06-17 - 3.0.17 to 3.0.16 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-06-11 - 3.0.15 to 3.0.14 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-06-10 - 3.0.13 - Trash Feature Completion
|
||||||
|
Finished work on trash feature.
|
||||||
|
|
||||||
|
## 2024-06-09 - 3.0.12 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-06-08 - 3.0.11 to 3.0.10 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-06-03 - 3.0.10 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-05-29 - 3.0.9 - Update Description
|
||||||
|
Updated project description.
|
||||||
|
|
||||||
|
## 2024-05-27 - 3.0.8 to 3.0.6 - Pathing and Core Updates
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
- S3 paths' pathing differences now correctly handled with a reducePath method.
|
||||||
|
|
||||||
|
## 2024-05-21 - 3.0.5 to 3.0.4 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-05-17 - 3.0.3 to 3.0.2 - Fix and Update
|
||||||
|
Routine updates and fixes performed.
|
||||||
|
|
||||||
|
## 2024-05-17 - 3.0.0 - Major Release
|
||||||
|
Introduced breaking changes in core and significant improvements.
|
||||||
|
|
||||||
|
## 2024-05-05 - 2.0.5 - Breaking Changes
|
||||||
|
Introduced breaking changes in core functionality.
|
||||||
|
|
||||||
|
## 2024-04-14 - 2.0.4 - TSConfig Update
|
||||||
|
Updated TypeScript configuration.
|
||||||
|
|
||||||
|
## 2024-01-01 - 2.0.2 - Organization Scheme Update
|
||||||
|
Switched to the new organizational scheme.
|
4
package-lock.json
generated
4
package-lock.json
generated
@ -1,12 +1,12 @@
|
|||||||
{
|
{
|
||||||
"name": "@push.rocks/smartbucket",
|
"name": "@push.rocks/smartbucket",
|
||||||
"version": "3.0.20",
|
"version": "3.3.0",
|
||||||
"lockfileVersion": 3,
|
"lockfileVersion": 3,
|
||||||
"requires": true,
|
"requires": true,
|
||||||
"packages": {
|
"packages": {
|
||||||
"": {
|
"": {
|
||||||
"name": "@push.rocks/smartbucket",
|
"name": "@push.rocks/smartbucket",
|
||||||
"version": "3.0.20",
|
"version": "3.3.0",
|
||||||
"license": "UNLICENSED",
|
"license": "UNLICENSED",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@push.rocks/smartpath": "^5.0.18",
|
"@push.rocks/smartpath": "^5.0.18",
|
||||||
|
20
package.json
20
package.json
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@push.rocks/smartbucket",
|
"name": "@push.rocks/smartbucket",
|
||||||
"version": "3.0.20",
|
"version": "3.3.0",
|
||||||
"description": "A TypeScript library offering simple and cloud-agnostic object storage with advanced features like bucket creation, file and directory management, and data streaming.",
|
"description": "A TypeScript library offering simple and cloud-agnostic object storage with advanced features like bucket creation, file and directory management, and data streaming.",
|
||||||
"main": "dist_ts/index.js",
|
"main": "dist_ts/index.js",
|
||||||
"typings": "dist_ts/index.d.ts",
|
"typings": "dist_ts/index.d.ts",
|
||||||
@ -12,22 +12,22 @@
|
|||||||
"build": "(tsbuild --web --allowimplicitany)"
|
"build": "(tsbuild --web --allowimplicitany)"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.1.80",
|
"@git.zone/tsbuild": "^2.1.84",
|
||||||
"@git.zone/tsrun": "^1.2.46",
|
"@git.zone/tsrun": "^1.2.49",
|
||||||
"@git.zone/tstest": "^1.0.90",
|
"@git.zone/tstest": "^1.0.90",
|
||||||
"@push.rocks/qenv": "^6.0.5",
|
"@push.rocks/qenv": "^6.1.0",
|
||||||
"@push.rocks/tapbundle": "^5.0.23"
|
"@push.rocks/tapbundle": "^5.5.3"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@aws-sdk/client-s3": "^3.600.0",
|
"@aws-sdk/client-s3": "^3.699.0",
|
||||||
"@push.rocks/smartmime": "^2.0.2",
|
"@push.rocks/smartmime": "^2.0.4",
|
||||||
"@push.rocks/smartpath": "^5.0.18",
|
"@push.rocks/smartpath": "^5.0.18",
|
||||||
"@push.rocks/smartpromise": "^4.0.3",
|
"@push.rocks/smartpromise": "^4.0.4",
|
||||||
"@push.rocks/smartrx": "^3.0.7",
|
"@push.rocks/smartrx": "^3.0.7",
|
||||||
"@push.rocks/smartstream": "^3.0.44",
|
"@push.rocks/smartstream": "^3.2.5",
|
||||||
"@push.rocks/smartstring": "^4.0.15",
|
"@push.rocks/smartstring": "^4.0.15",
|
||||||
"@push.rocks/smartunique": "^3.0.9",
|
"@push.rocks/smartunique": "^3.0.9",
|
||||||
"@tsclass/tsclass": "^4.0.60"
|
"@tsclass/tsclass": "^4.1.2"
|
||||||
},
|
},
|
||||||
"private": false,
|
"private": false,
|
||||||
"files": [
|
"files": [
|
||||||
|
6193
pnpm-lock.yaml
generated
6193
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
1
qenv.yml
1
qenv.yml
@ -1,3 +1,4 @@
|
|||||||
required:
|
required:
|
||||||
- S3_KEY
|
- S3_KEY
|
||||||
- S3_SECRET
|
- S3_SECRET
|
||||||
|
- S3_ENDPOINT
|
0
test/helpers/prepare.ts
Normal file
0
test/helpers/prepare.ts
Normal file
7
test/test.metadata.ts
Normal file
7
test/test.metadata.ts
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
import { tap, expect } from '@push.rocks/tapbundle';
|
||||||
|
|
||||||
|
tap.test('test metadata functionality', async () => {
|
||||||
|
|
||||||
|
})
|
||||||
|
|
||||||
|
tap.start();
|
@ -1,4 +1,5 @@
|
|||||||
import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
|
import { expect, expectAsync, tap } from '@push.rocks/tapbundle';
|
||||||
|
import { jestExpect } from '@push.rocks/tapbundle/node';
|
||||||
import { Qenv } from '@push.rocks/qenv';
|
import { Qenv } from '@push.rocks/qenv';
|
||||||
|
|
||||||
import * as smartbucket from '../ts/index.js';
|
import * as smartbucket from '../ts/index.js';
|
||||||
@ -11,14 +12,67 @@ let baseDirectory: smartbucket.Directory;
|
|||||||
|
|
||||||
tap.test('should create a valid smartbucket', async () => {
|
tap.test('should create a valid smartbucket', async () => {
|
||||||
testSmartbucket = new smartbucket.SmartBucket({
|
testSmartbucket = new smartbucket.SmartBucket({
|
||||||
accessKey: await testQenv.getEnvVarOnDemand('S3_KEY'),
|
accessKey: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSKEY'),
|
||||||
accessSecret: await testQenv.getEnvVarOnDemand('S3_SECRET'),
|
accessSecret: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSSECRET'),
|
||||||
endpoint: 's3.eu-central-1.wasabisys.com',
|
endpoint: await testQenv.getEnvVarOnDemandStrict('S3_ENDPOINT'),
|
||||||
});
|
});
|
||||||
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
|
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
|
||||||
myBucket = await testSmartbucket.getBucketByName('testzone');
|
myBucket = await testSmartbucket.getBucketByNameStrict(await testQenv.getEnvVarOnDemandStrict('S3_BUCKET'),);
|
||||||
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
|
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
|
||||||
expect(myBucket.name).toEqual('testzone');
|
expect(myBucket.name).toEqual('test-pushrocks-smartbucket');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
tap.test('should clean all contents', async () => {
|
||||||
|
await myBucket.cleanAllContents();
|
||||||
|
expect(await myBucket.fastExists({ path: 'hithere/socool.txt' })).toBeFalse();
|
||||||
|
expect(await myBucket.fastExists({ path: 'trashtest/trashme.txt' })).toBeFalse();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should delete a file into the normally', async () => {
|
||||||
|
const path = 'trashtest/trashme.txt';
|
||||||
|
const file = await myBucket.fastPutStrict({
|
||||||
|
path,
|
||||||
|
contents: 'I\'m in the trash test content!',
|
||||||
|
});
|
||||||
|
const fileMetadata = await (await file.getMetaData()).metadataFile.getContents();
|
||||||
|
console.log(fileMetadata.toString());
|
||||||
|
expect(await file.getMetaData().then((meta) => meta.metadataFile.getJsonData())).toEqual({});
|
||||||
|
await file.delete({ mode: 'permanent' });
|
||||||
|
expect((await (await myBucket.getBaseDirectory()).listFiles()).length).toEqual(0);
|
||||||
|
expect((await (await myBucket.getBaseDirectory()).listDirectories()).length).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should put a file into the trash', async () => {
|
||||||
|
const path = 'trashtest/trashme.txt';
|
||||||
|
const file = await myBucket.fastPutStrict({
|
||||||
|
path,
|
||||||
|
contents: 'I\'m in the trash test content!',
|
||||||
|
});
|
||||||
|
const fileMetadata = await (await file.getMetaData()).metadataFile.getContents();
|
||||||
|
console.log(fileMetadata.toString());
|
||||||
|
expect(await file.getMetaData().then((meta) => meta.metadataFile.getJsonData())).toEqual({});
|
||||||
|
await file.delete({ mode: 'trash' });
|
||||||
|
jestExpect(await file.getMetaData().then((meta) => meta.metadataFile.getJsonData())).toEqual({
|
||||||
|
custom_recycle: {
|
||||||
|
deletedAt: jestExpect.any(Number),
|
||||||
|
originalPath: "trashtest/trashme.txt",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should restore a file from trash', async () => {
|
||||||
|
const baseDirectory = await myBucket.getBaseDirectory();
|
||||||
|
const file = await baseDirectory.getFileStrict({
|
||||||
|
path: 'trashtest/trashme.txt',
|
||||||
|
getFromTrash: true
|
||||||
|
});
|
||||||
|
const trashFileMeta = await file.getMetaData();
|
||||||
|
const data = await trashFileMeta.getCustomMetaData({
|
||||||
|
key: 'recycle'
|
||||||
|
});
|
||||||
|
expect(file).toBeInstanceOf(smartbucket.File);
|
||||||
|
await file.restore();
|
||||||
|
});
|
||||||
|
|
||||||
|
|
||||||
export default tap.start();
|
export default tap.start();
|
28
test/test.ts
28
test/test.ts
@ -11,14 +11,20 @@ let baseDirectory: smartbucket.Directory;
|
|||||||
|
|
||||||
tap.test('should create a valid smartbucket', async () => {
|
tap.test('should create a valid smartbucket', async () => {
|
||||||
testSmartbucket = new smartbucket.SmartBucket({
|
testSmartbucket = new smartbucket.SmartBucket({
|
||||||
accessKey: await testQenv.getEnvVarOnDemand('S3_KEY'),
|
accessKey: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSKEY'),
|
||||||
accessSecret: await testQenv.getEnvVarOnDemand('S3_SECRET'),
|
accessSecret: await testQenv.getEnvVarOnDemandStrict('S3_ACCESSSECRET'),
|
||||||
endpoint: 's3.eu-central-1.wasabisys.com',
|
endpoint: await testQenv.getEnvVarOnDemandStrict('S3_ENDPOINT'),
|
||||||
});
|
});
|
||||||
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
|
expect(testSmartbucket).toBeInstanceOf(smartbucket.SmartBucket);
|
||||||
myBucket = await testSmartbucket.getBucketByName('testzone');
|
myBucket = await testSmartbucket.getBucketByNameStrict(await testQenv.getEnvVarOnDemandStrict('S3_BUCKET'),);
|
||||||
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
|
expect(myBucket).toBeInstanceOf(smartbucket.Bucket);
|
||||||
expect(myBucket.name).toEqual('testzone');
|
expect(myBucket.name).toEqual('test-pushrocks-smartbucket');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should clean all contents', async () => {
|
||||||
|
await myBucket.cleanAllContents();
|
||||||
|
expect(await myBucket.fastExists({ path: 'hithere/socool.txt' })).toBeFalse();
|
||||||
|
expect(await myBucket.fastExists({ path: 'trashtest/trashme.txt' })).toBeFalse();
|
||||||
});
|
});
|
||||||
|
|
||||||
tap.skip.test('should create testbucket', async () => {
|
tap.skip.test('should create testbucket', async () => {
|
||||||
@ -41,9 +47,12 @@ tap.test('should get data in bucket', async () => {
|
|||||||
const fileString = await myBucket.fastGet({
|
const fileString = await myBucket.fastGet({
|
||||||
path: 'hithere/socool.txt',
|
path: 'hithere/socool.txt',
|
||||||
});
|
});
|
||||||
const fileStringStream = await myBucket.fastGetStream({
|
const fileStringStream = await myBucket.fastGetStream(
|
||||||
path: 'hithere/socool.txt',
|
{
|
||||||
}, 'nodestream');
|
path: 'hithere/socool.txt',
|
||||||
|
},
|
||||||
|
'nodestream'
|
||||||
|
);
|
||||||
console.log(fileString);
|
console.log(fileString);
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -97,8 +106,9 @@ tap.test('should get base directory', async () => {
|
|||||||
tap.test('should correctly build paths for sub directories', async () => {
|
tap.test('should correctly build paths for sub directories', async () => {
|
||||||
const dir4 = await baseDirectory.getSubDirectoryByName('dir3/dir4');
|
const dir4 = await baseDirectory.getSubDirectoryByName('dir3/dir4');
|
||||||
expect(dir4).toBeInstanceOf(smartbucket.Directory);
|
expect(dir4).toBeInstanceOf(smartbucket.Directory);
|
||||||
const dir4BasePath = dir4.getBasePath();
|
const dir4BasePath = dir4?.getBasePath();
|
||||||
console.log(dir4BasePath);
|
console.log(dir4BasePath);
|
||||||
|
expect(dir4BasePath).toEqual('dir3/dir4/');
|
||||||
});
|
});
|
||||||
|
|
||||||
tap.test('clean up directory style tests', async () => {
|
tap.test('clean up directory style tests', async () => {
|
||||||
|
@ -1,8 +1,8 @@
|
|||||||
/**
|
/**
|
||||||
* autocreated commitinfo by @pushrocks/commitinfo
|
* autocreated commitinfo by @push.rocks/commitinfo
|
||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@push.rocks/smartbucket',
|
name: '@push.rocks/smartbucket',
|
||||||
version: '3.0.20',
|
version: '3.3.0',
|
||||||
description: 'A TypeScript library offering simple and cloud-agnostic object storage with advanced features like bucket creation, file and directory management, and data streaming.'
|
description: 'A TypeScript library offering simple and cloud-agnostic object storage with advanced features like bucket creation, file and directory management, and data streaming.'
|
||||||
}
|
}
|
||||||
|
@ -17,13 +17,14 @@ export class Bucket {
|
|||||||
public static async getBucketByName(smartbucketRef: SmartBucket, bucketNameArg: string) {
|
public static async getBucketByName(smartbucketRef: SmartBucket, bucketNameArg: string) {
|
||||||
const command = new plugins.s3.ListBucketsCommand({});
|
const command = new plugins.s3.ListBucketsCommand({});
|
||||||
const buckets = await smartbucketRef.s3Client.send(command);
|
const buckets = await smartbucketRef.s3Client.send(command);
|
||||||
const foundBucket = buckets.Buckets.find((bucket) => bucket.Name === bucketNameArg);
|
const foundBucket = buckets.Buckets!.find((bucket) => bucket.Name === bucketNameArg);
|
||||||
|
|
||||||
if (foundBucket) {
|
if (foundBucket) {
|
||||||
console.log(`bucket with name ${bucketNameArg} exists.`);
|
console.log(`bucket with name ${bucketNameArg} exists.`);
|
||||||
console.log(`Taking this as base for new Bucket instance`);
|
console.log(`Taking this as base for new Bucket instance`);
|
||||||
return new this(smartbucketRef, bucketNameArg);
|
return new this(smartbucketRef, bucketNameArg);
|
||||||
} else {
|
} else {
|
||||||
|
console.log(`did not find bucket by name: ${bucketNameArg}`);
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -51,7 +52,7 @@ export class Bucket {
|
|||||||
* gets the base directory of the bucket
|
* gets the base directory of the bucket
|
||||||
*/
|
*/
|
||||||
public async getBaseDirectory(): Promise<Directory> {
|
public async getBaseDirectory(): Promise<Directory> {
|
||||||
return new Directory(this, null, '');
|
return new Directory(this, null!, '');
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -70,7 +71,9 @@ export class Bucket {
|
|||||||
}
|
}
|
||||||
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
|
const checkPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
|
||||||
const baseDirectory = await this.getBaseDirectory();
|
const baseDirectory = await this.getBaseDirectory();
|
||||||
return await baseDirectory.getSubDirectoryByName(checkPath);
|
return await baseDirectory.getSubDirectoryByNameStrict(checkPath, {
|
||||||
|
getEmptyDirectory: true,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// ===============
|
// ===============
|
||||||
@ -85,14 +88,15 @@ export class Bucket {
|
|||||||
contents: string | Buffer;
|
contents: string | Buffer;
|
||||||
overwrite?: boolean;
|
overwrite?: boolean;
|
||||||
}
|
}
|
||||||
): Promise<File> {
|
): Promise<File | null> {
|
||||||
try {
|
try {
|
||||||
const reducedPath = await helpers.reducePathDescriptorToPath(optionsArg);
|
const reducedPath = await helpers.reducePathDescriptorToPath(optionsArg);
|
||||||
const exists = await this.fastExists({ path: reducedPath });
|
const exists = await this.fastExists({ path: reducedPath });
|
||||||
|
|
||||||
if (exists && !optionsArg.overwrite) {
|
if (exists && !optionsArg.overwrite) {
|
||||||
console.error(`Object already exists at path '${reducedPath}' in bucket '${this.name}'.`);
|
const errorText = `Object already exists at path '${reducedPath}' in bucket '${this.name}'.`;
|
||||||
return;
|
console.error(errorText);
|
||||||
|
return null;
|
||||||
} else if (exists && optionsArg.overwrite) {
|
} else if (exists && optionsArg.overwrite) {
|
||||||
console.log(
|
console.log(
|
||||||
`Overwriting existing object at path '${reducedPath}' in bucket '${this.name}'.`
|
`Overwriting existing object at path '${reducedPath}' in bucket '${this.name}'.`
|
||||||
@ -125,6 +129,14 @@ export class Bucket {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async fastPutStrict(...args: Parameters<Bucket['fastPut']>) {
|
||||||
|
const file = await this.fastPut(...args);
|
||||||
|
if (!file) {
|
||||||
|
throw new Error(`File not stored at path '${args[0].path}'`);
|
||||||
|
}
|
||||||
|
return file;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* get file
|
* get file
|
||||||
*/
|
*/
|
||||||
@ -149,7 +161,7 @@ export class Bucket {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
await done.promise;
|
await done.promise;
|
||||||
return completeFile;
|
return completeFile!;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -217,7 +229,7 @@ export class Bucket {
|
|||||||
return chunk;
|
return chunk;
|
||||||
},
|
},
|
||||||
finalFunction: async (cb) => {
|
finalFunction: async (cb) => {
|
||||||
return null;
|
return null!;
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
@ -231,6 +243,7 @@ export class Bucket {
|
|||||||
if (typeArg === 'webstream') {
|
if (typeArg === 'webstream') {
|
||||||
return (await duplexStream.getWebStreams()).readable;
|
return (await duplexStream.getWebStreams()).readable;
|
||||||
}
|
}
|
||||||
|
throw new Error('unknown typeArg');
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -329,7 +342,9 @@ export class Bucket {
|
|||||||
}): Promise<void> {
|
}): Promise<void> {
|
||||||
try {
|
try {
|
||||||
const destinationBucket = optionsArg.targetBucket || this;
|
const destinationBucket = optionsArg.targetBucket || this;
|
||||||
const exists = await destinationBucket.fastExists({ path: optionsArg.destinationPath });
|
const exists = await destinationBucket.fastExists({
|
||||||
|
path: optionsArg.destinationPath,
|
||||||
|
});
|
||||||
|
|
||||||
if (exists && !optionsArg.overwrite) {
|
if (exists && !optionsArg.overwrite) {
|
||||||
console.error(
|
console.error(
|
||||||
@ -386,8 +401,8 @@ export class Bucket {
|
|||||||
await this.smartbucketRef.s3Client.send(command);
|
await this.smartbucketRef.s3Client.send(command);
|
||||||
console.log(`Object '${optionsArg.path}' exists in bucket '${this.name}'.`);
|
console.log(`Object '${optionsArg.path}' exists in bucket '${this.name}'.`);
|
||||||
return true;
|
return true;
|
||||||
} catch (error) {
|
} catch (error: any) {
|
||||||
if (error.name === 'NotFound') {
|
if (error?.name === 'NotFound') {
|
||||||
console.log(`Object '${optionsArg.path}' does not exist in bucket '${this.name}'.`);
|
console.log(`Object '${optionsArg.path}' does not exist in bucket '${this.name}'.`);
|
||||||
return false;
|
return false;
|
||||||
} else {
|
} else {
|
||||||
@ -422,8 +437,8 @@ export class Bucket {
|
|||||||
Prefix: checkPath,
|
Prefix: checkPath,
|
||||||
Delimiter: '/',
|
Delimiter: '/',
|
||||||
});
|
});
|
||||||
const response = await this.smartbucketRef.s3Client.send(command);
|
const { CommonPrefixes } = await this.smartbucketRef.s3Client.send(command);
|
||||||
return response.CommonPrefixes.length > 0;
|
return !!CommonPrefixes && CommonPrefixes.length > 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
public async isFile(pathDescriptor: interfaces.IPathDecriptor): Promise<boolean> {
|
public async isFile(pathDescriptor: interfaces.IPathDecriptor): Promise<boolean> {
|
||||||
@ -433,7 +448,79 @@ export class Bucket {
|
|||||||
Prefix: checkPath,
|
Prefix: checkPath,
|
||||||
Delimiter: '/',
|
Delimiter: '/',
|
||||||
});
|
});
|
||||||
const response = await this.smartbucketRef.s3Client.send(command);
|
const { Contents } = await this.smartbucketRef.s3Client.send(command);
|
||||||
return response.Contents.length > 0;
|
return !!Contents && Contents.length > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getMagicBytes(optionsArg: { path: string; length: number }): Promise<Buffer> {
|
||||||
|
try {
|
||||||
|
const command = new plugins.s3.GetObjectCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Key: optionsArg.path,
|
||||||
|
Range: `bytes=0-${optionsArg.length - 1}`,
|
||||||
|
});
|
||||||
|
const response = await this.smartbucketRef.s3Client.send(command);
|
||||||
|
const chunks = [];
|
||||||
|
const stream = response.Body as any; // SdkStreamMixin includes readable stream
|
||||||
|
|
||||||
|
for await (const chunk of stream) {
|
||||||
|
chunks.push(chunk);
|
||||||
|
}
|
||||||
|
return Buffer.concat(chunks);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(
|
||||||
|
`Error retrieving magic bytes from object at path '${optionsArg.path}' in bucket '${this.name}':`,
|
||||||
|
error
|
||||||
|
);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public async cleanAllContents(): Promise<void> {
|
||||||
|
try {
|
||||||
|
// Define the command type explicitly
|
||||||
|
const listCommandInput: plugins.s3.ListObjectsV2CommandInput = {
|
||||||
|
Bucket: this.name,
|
||||||
|
};
|
||||||
|
|
||||||
|
let isTruncated = true;
|
||||||
|
let continuationToken: string | undefined = undefined;
|
||||||
|
|
||||||
|
while (isTruncated) {
|
||||||
|
// Add the continuation token to the input if present
|
||||||
|
const listCommand = new plugins.s3.ListObjectsV2Command({
|
||||||
|
...listCommandInput,
|
||||||
|
ContinuationToken: continuationToken,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Explicitly type the response
|
||||||
|
const response: plugins.s3.ListObjectsV2Output =
|
||||||
|
await this.smartbucketRef.s3Client.send(listCommand);
|
||||||
|
|
||||||
|
console.log(`Cleaning contents of bucket '${this.name}': Now deleting ${response.Contents?.length} items...`);
|
||||||
|
|
||||||
|
if (response.Contents && response.Contents.length > 0) {
|
||||||
|
// Delete objects in batches, mapping each item to { Key: string }
|
||||||
|
const deleteCommand = new plugins.s3.DeleteObjectsCommand({
|
||||||
|
Bucket: this.name,
|
||||||
|
Delete: {
|
||||||
|
Objects: response.Contents.map((item) => ({ Key: item.Key! })),
|
||||||
|
Quiet: true,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
await this.smartbucketRef.s3Client.send(deleteCommand);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update continuation token and truncation status
|
||||||
|
isTruncated = response.IsTruncated || false;
|
||||||
|
continuationToken = response.NextContinuationToken;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`All contents in bucket '${this.name}' have been deleted.`);
|
||||||
|
} catch (error) {
|
||||||
|
console.error(`Error cleaning contents of bucket '${this.name}':`, error);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -10,9 +10,9 @@ export class Directory {
|
|||||||
public parentDirectoryRef: Directory;
|
public parentDirectoryRef: Directory;
|
||||||
public name: string;
|
public name: string;
|
||||||
|
|
||||||
public tree: string[];
|
public tree!: string[];
|
||||||
public files: string[];
|
public files!: string[];
|
||||||
public folders: string[];
|
public folders!: string[];
|
||||||
|
|
||||||
constructor(bucketRefArg: Bucket, parentDirectory: Directory, name: string) {
|
constructor(bucketRefArg: Bucket, parentDirectory: Directory, name: string) {
|
||||||
this.bucketRef = bucketRefArg;
|
this.bucketRef = bucketRefArg;
|
||||||
@ -69,7 +69,7 @@ export class Directory {
|
|||||||
path: string;
|
path: string;
|
||||||
createWithContents?: string | Buffer;
|
createWithContents?: string | Buffer;
|
||||||
getFromTrash?: boolean;
|
getFromTrash?: boolean;
|
||||||
}): Promise<File> {
|
}): Promise<File | null> {
|
||||||
const pathDescriptor = {
|
const pathDescriptor = {
|
||||||
directory: this,
|
directory: this,
|
||||||
path: optionsArg.path,
|
path: optionsArg.path,
|
||||||
@ -98,6 +98,19 @@ export class Directory {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* gets a file strictly
|
||||||
|
* @param args
|
||||||
|
* @returns
|
||||||
|
*/
|
||||||
|
public async getFileStrict(...args: Parameters<Directory['getFile']>) {
|
||||||
|
const file = await this.getFile(...args);
|
||||||
|
if (!file) {
|
||||||
|
throw new Error(`File not found at path '${args[0].path}'`);
|
||||||
|
}
|
||||||
|
return file;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* lists all files
|
* lists all files
|
||||||
*/
|
*/
|
||||||
@ -110,7 +123,7 @@ export class Directory {
|
|||||||
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
|
const response = await this.bucketRef.smartbucketRef.s3Client.send(command);
|
||||||
const fileArray: File[] = [];
|
const fileArray: File[] = [];
|
||||||
|
|
||||||
response.Contents.forEach((item) => {
|
response.Contents?.forEach((item) => {
|
||||||
if (item.Key && !item.Key.endsWith('/')) {
|
if (item.Key && !item.Key.endsWith('/')) {
|
||||||
const subtractedPath = item.Key.replace(this.getBasePath(), '');
|
const subtractedPath = item.Key.replace(this.getBasePath(), '');
|
||||||
if (!subtractedPath.includes('/')) {
|
if (!subtractedPath.includes('/')) {
|
||||||
@ -178,23 +191,77 @@ export class Directory {
|
|||||||
/**
|
/**
|
||||||
* gets a sub directory by name
|
* gets a sub directory by name
|
||||||
*/
|
*/
|
||||||
public async getSubDirectoryByName(dirNameArg: string): Promise<Directory> {
|
public async getSubDirectoryByName(dirNameArg: string, optionsArg: {
|
||||||
const dirNameArray = dirNameArg.split('/');
|
/**
|
||||||
|
* in s3 a directory does not exist if it is empty
|
||||||
|
* this option returns a directory even if it is empty
|
||||||
|
*/
|
||||||
|
getEmptyDirectory?: boolean;
|
||||||
|
/**
|
||||||
|
* in s3 a directory does not exist if it is empty
|
||||||
|
* this option creates a directory even if it is empty using a initializer file
|
||||||
|
*/
|
||||||
|
createWithInitializerFile?: boolean;
|
||||||
|
/**
|
||||||
|
* if the path is a file path, it will be treated as a file and the parent directory will be returned
|
||||||
|
*/
|
||||||
|
couldBeFilePath?: boolean;
|
||||||
|
} = {}): Promise<Directory | null> {
|
||||||
|
|
||||||
const getDirectory = async (directoryArg: Directory, dirNameToSearch: string) => {
|
const dirNameArray = dirNameArg.split('/').filter(str => str.trim() !== "");
|
||||||
const directories = await directoryArg.listDirectories();
|
|
||||||
return directories.find((directory) => {
|
|
||||||
return directory.name === dirNameToSearch;
|
|
||||||
});
|
|
||||||
};
|
|
||||||
|
|
||||||
let wantedDirectory: Directory;
|
optionsArg = {
|
||||||
for (const dirNameToSearch of dirNameArray) {
|
getEmptyDirectory: false,
|
||||||
const directoryToSearchIn = wantedDirectory ? wantedDirectory : this;
|
createWithInitializerFile: false,
|
||||||
wantedDirectory = await getDirectory(directoryToSearchIn, dirNameToSearch);
|
...optionsArg,
|
||||||
}
|
}
|
||||||
|
|
||||||
return wantedDirectory;
|
|
||||||
|
const getDirectory = async (directoryArg: Directory, dirNameToSearch: string, isFinalDirectory: boolean) => {
|
||||||
|
const directories = await directoryArg.listDirectories();
|
||||||
|
let returnDirectory = directories.find((directory) => {
|
||||||
|
return directory.name === dirNameToSearch;
|
||||||
|
});
|
||||||
|
if (returnDirectory) {
|
||||||
|
return returnDirectory;
|
||||||
|
}
|
||||||
|
if (optionsArg.getEmptyDirectory || optionsArg.createWithInitializerFile) {
|
||||||
|
returnDirectory = new Directory(this.bucketRef, this, dirNameToSearch);
|
||||||
|
}
|
||||||
|
if (isFinalDirectory && optionsArg.createWithInitializerFile) {
|
||||||
|
returnDirectory?.createEmptyFile('00init.txt');
|
||||||
|
}
|
||||||
|
return returnDirectory || null;
|
||||||
|
};
|
||||||
|
|
||||||
|
if (optionsArg.couldBeFilePath) {
|
||||||
|
const baseDirectory = await this.bucketRef.getBaseDirectory();
|
||||||
|
const existingFile = await baseDirectory.getFile({
|
||||||
|
path: dirNameArg,
|
||||||
|
});
|
||||||
|
if (existingFile) {
|
||||||
|
const adjustedPath = dirNameArg.substring(0, dirNameArg.lastIndexOf('/'));
|
||||||
|
return this.getSubDirectoryByName(adjustedPath);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let wantedDirectory: Directory | null = null;
|
||||||
|
let counter = 0;
|
||||||
|
for (const dirNameToSearch of dirNameArray) {
|
||||||
|
counter++;
|
||||||
|
const directoryToSearchIn = wantedDirectory ? wantedDirectory : this;
|
||||||
|
wantedDirectory = await getDirectory(directoryToSearchIn, dirNameToSearch, counter === dirNameArray.length);
|
||||||
|
}
|
||||||
|
|
||||||
|
return wantedDirectory || null;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getSubDirectoryByNameStrict(...args: Parameters<Directory['getSubDirectoryByName']>) {
|
||||||
|
const directory = await this.getSubDirectoryByName(...args);
|
||||||
|
if (!directory) {
|
||||||
|
throw new Error(`Directory not found at path '${args[0]}'`);
|
||||||
|
}
|
||||||
|
return directory;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -293,7 +360,7 @@ export class Directory {
|
|||||||
*/
|
*/
|
||||||
mode?: 'permanent' | 'trash';
|
mode?: 'permanent' | 'trash';
|
||||||
}) {
|
}) {
|
||||||
const file = await this.getFile({
|
const file = await this.getFileStrict({
|
||||||
path: optionsArg.path,
|
path: optionsArg.path,
|
||||||
});
|
});
|
||||||
await file.delete({
|
await file.delete({
|
||||||
|
@ -50,6 +50,10 @@ export class File {
|
|||||||
public parentDirectoryRef: Directory;
|
public parentDirectoryRef: Directory;
|
||||||
public name: string;
|
public name: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* get the full path to the file
|
||||||
|
* @returns the full path to the file
|
||||||
|
*/
|
||||||
public getBasePath(): string {
|
public getBasePath(): string {
|
||||||
return plugins.path.join(this.parentDirectoryRef.getBasePath(), this.name);
|
return plugins.path.join(this.parentDirectoryRef.getBasePath(), this.name);
|
||||||
}
|
}
|
||||||
@ -88,24 +92,23 @@ export class File {
|
|||||||
/**
|
/**
|
||||||
* deletes this file
|
* deletes this file
|
||||||
*/
|
*/
|
||||||
public async delete(optionsArg?: {
|
public async delete(optionsArg?: { mode: 'trash' | 'permanent' }) {
|
||||||
mode: 'trash' | 'permanent';
|
|
||||||
}) {
|
|
||||||
|
|
||||||
optionsArg = {
|
optionsArg = {
|
||||||
... {
|
...{
|
||||||
mode: 'permanent',
|
mode: 'permanent',
|
||||||
},
|
},
|
||||||
...optionsArg,
|
...optionsArg,
|
||||||
}
|
};
|
||||||
|
|
||||||
if (optionsArg.mode === 'permanent') {
|
if (optionsArg.mode === 'permanent') {
|
||||||
await this.parentDirectoryRef.bucketRef.fastRemove({
|
await this.parentDirectoryRef.bucketRef.fastRemove({
|
||||||
path: this.getBasePath(),
|
path: this.getBasePath(),
|
||||||
});
|
});
|
||||||
if (!this.name.endsWith('.metadata')) {
|
if (!this.name.endsWith('.metadata')) {
|
||||||
const metadata = await this.getMetaData();
|
if (await this.hasMetaData()) {
|
||||||
await metadata.metadataFile.delete(optionsArg);
|
const metadata = await this.getMetaData();
|
||||||
|
await metadata.metadataFile.delete(optionsArg);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
} else if (optionsArg.mode === 'trash') {
|
} else if (optionsArg.mode === 'trash') {
|
||||||
const metadata = await this.getMetaData();
|
const metadata = await this.getMetaData();
|
||||||
@ -117,8 +120,9 @@ export class File {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
const trash = await this.parentDirectoryRef.bucketRef.getTrash();
|
const trash = await this.parentDirectoryRef.bucketRef.getTrash();
|
||||||
|
const trashDir = await trash.getTrashDir();
|
||||||
await this.move({
|
await this.move({
|
||||||
directory: await trash.getTrashDir(),
|
directory: trashDir,
|
||||||
path: await trash.getTrashKeyByOriginalBasePath(this.getBasePath()),
|
path: await trash.getTrashKeyByOriginalBasePath(this.getBasePath()),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -126,6 +130,29 @@ export class File {
|
|||||||
await this.parentDirectoryRef.listFiles();
|
await this.parentDirectoryRef.listFiles();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* restores
|
||||||
|
*/
|
||||||
|
public async restore(optionsArg: {
|
||||||
|
useOriginalPath?: boolean;
|
||||||
|
toPath?: string;
|
||||||
|
overwrite?: boolean;
|
||||||
|
} = {}) {
|
||||||
|
optionsArg = {
|
||||||
|
useOriginalPath: (() => {
|
||||||
|
return optionsArg.toPath ? false : true;
|
||||||
|
})(),
|
||||||
|
overwrite: false,
|
||||||
|
...optionsArg,
|
||||||
|
};
|
||||||
|
const moveToPath = optionsArg.toPath || (await (await this.getMetaData()).getCustomMetaData({
|
||||||
|
key: 'recycle'
|
||||||
|
})).originalPath;
|
||||||
|
await this.move({
|
||||||
|
path: moveToPath,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* allows locking the file
|
* allows locking the file
|
||||||
* @param optionsArg
|
* @param optionsArg
|
||||||
@ -150,7 +177,7 @@ export class File {
|
|||||||
}) {
|
}) {
|
||||||
const metadata = await this.getMetaData();
|
const metadata = await this.getMetaData();
|
||||||
await metadata.removeLock({
|
await metadata.removeLock({
|
||||||
force: optionsArg?.force,
|
force: optionsArg?.force || false,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -165,16 +192,19 @@ export class File {
|
|||||||
await this.parentDirectoryRef.bucketRef.fastPutStream({
|
await this.parentDirectoryRef.bucketRef.fastPutStream({
|
||||||
path: this.getBasePath(),
|
path: this.getBasePath(),
|
||||||
readableStream: optionsArg.contents,
|
readableStream: optionsArg.contents,
|
||||||
|
overwrite: true,
|
||||||
});
|
});
|
||||||
} else if (Buffer.isBuffer(optionsArg.contents)) {
|
} else if (Buffer.isBuffer(optionsArg.contents)) {
|
||||||
await this.parentDirectoryRef.bucketRef.fastPut({
|
await this.parentDirectoryRef.bucketRef.fastPut({
|
||||||
path: this.getBasePath(),
|
path: this.getBasePath(),
|
||||||
contents: optionsArg.contents,
|
contents: optionsArg.contents,
|
||||||
|
overwrite: true,
|
||||||
});
|
});
|
||||||
} else if (typeof optionsArg.contents === 'string') {
|
} else if (typeof optionsArg.contents === 'string') {
|
||||||
await this.parentDirectoryRef.bucketRef.fastPut({
|
await this.parentDirectoryRef.bucketRef.fastPut({
|
||||||
path: this.getBasePath(),
|
path: this.getBasePath(),
|
||||||
contents: Buffer.from(optionsArg.contents, optionsArg.encoding),
|
contents: Buffer.from(optionsArg.contents, optionsArg.encoding),
|
||||||
|
overwrite: true,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -183,23 +213,52 @@ export class File {
|
|||||||
* moves the file to another directory
|
* moves the file to another directory
|
||||||
*/
|
*/
|
||||||
public async move(pathDescriptorArg: interfaces.IPathDecriptor) {
|
public async move(pathDescriptorArg: interfaces.IPathDecriptor) {
|
||||||
let moveToPath = '';
|
let moveToPath: string = '';
|
||||||
const isDirectory = await this.parentDirectoryRef.bucketRef.isDirectory(pathDescriptorArg);
|
const isDirectory = await this.parentDirectoryRef.bucketRef.isDirectory(pathDescriptorArg);
|
||||||
if (isDirectory) {
|
if (isDirectory) {
|
||||||
moveToPath = await helpers.reducePathDescriptorToPath({
|
moveToPath = await helpers.reducePathDescriptorToPath({
|
||||||
...pathDescriptorArg,
|
...pathDescriptorArg,
|
||||||
path: plugins.path.join(pathDescriptorArg.path, this.name),
|
path: plugins.path.join(pathDescriptorArg.path!, this.name),
|
||||||
});
|
});
|
||||||
|
} else {
|
||||||
|
moveToPath = await helpers.reducePathDescriptorToPath(pathDescriptorArg);
|
||||||
}
|
}
|
||||||
// lets move the file
|
// lets move the file
|
||||||
await this.parentDirectoryRef.bucketRef.fastMove({
|
await this.parentDirectoryRef.bucketRef.fastMove({
|
||||||
sourcePath: this.getBasePath(),
|
sourcePath: this.getBasePath(),
|
||||||
destinationPath: moveToPath,
|
destinationPath: moveToPath,
|
||||||
|
overwrite: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
// lets move the metadatafile
|
// lets move the metadatafile
|
||||||
const metadata = await this.getMetaData();
|
if (!this.name.endsWith('.metadata')) {
|
||||||
await metadata.metadataFile.move(pathDescriptorArg);
|
const metadata = await this.getMetaData();
|
||||||
|
await this.parentDirectoryRef.bucketRef.fastMove({
|
||||||
|
sourcePath: metadata.metadataFile.getBasePath(),
|
||||||
|
destinationPath: moveToPath + '.metadata',
|
||||||
|
overwrite: true,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// lets update references of this
|
||||||
|
const baseDirectory = await this.parentDirectoryRef.bucketRef.getBaseDirectory();
|
||||||
|
this.parentDirectoryRef = await baseDirectory.getSubDirectoryByNameStrict(
|
||||||
|
await helpers.reducePathDescriptorToPath(pathDescriptorArg),
|
||||||
|
{
|
||||||
|
couldBeFilePath: true,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
this.name = pathDescriptorArg.path!;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async hasMetaData(): Promise<boolean> {
|
||||||
|
if (!this.name.endsWith('.metadata')) {
|
||||||
|
const hasMetadataBool = MetaData.hasMetaData({
|
||||||
|
file: this,
|
||||||
|
});
|
||||||
|
return hasMetadataBool;
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -230,4 +289,11 @@ export class File {
|
|||||||
contents: JSON.stringify(dataArg),
|
contents: JSON.stringify(dataArg),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public async getMagicBytes(optionsArg: { length: number }): Promise<Buffer> {
|
||||||
|
return this.parentDirectoryRef.bucketRef.getMagicBytes({
|
||||||
|
path: this.getBasePath(),
|
||||||
|
length: optionsArg.length,
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,13 +3,21 @@ import * as plugins from './plugins.js';
|
|||||||
import { File } from './classes.file.js';
|
import { File } from './classes.file.js';
|
||||||
|
|
||||||
export class MetaData {
|
export class MetaData {
|
||||||
|
public static async hasMetaData(optionsArg: { file: File }) {
|
||||||
|
// lets find the existing metadata file
|
||||||
|
const existingFile = await optionsArg.file.parentDirectoryRef.getFile({
|
||||||
|
path: optionsArg.file.name + '.metadata',
|
||||||
|
});
|
||||||
|
return !!existingFile;
|
||||||
|
}
|
||||||
|
|
||||||
// static
|
// static
|
||||||
public static async createForFile(optionsArg: { file: File }) {
|
public static async createForFile(optionsArg: { file: File }) {
|
||||||
const metaData = new MetaData();
|
const metaData = new MetaData();
|
||||||
metaData.fileRef = optionsArg.file;
|
metaData.fileRef = optionsArg.file;
|
||||||
|
|
||||||
// lets find the existing metadata file
|
// lets find the existing metadata file
|
||||||
metaData.metadataFile = await metaData.fileRef.parentDirectoryRef.getFile({
|
metaData.metadataFile = await metaData.fileRef.parentDirectoryRef.getFileStrict({
|
||||||
path: metaData.fileRef.name + '.metadata',
|
path: metaData.fileRef.name + '.metadata',
|
||||||
createWithContents: '{}',
|
createWithContents: '{}',
|
||||||
});
|
});
|
||||||
@ -21,20 +29,34 @@ export class MetaData {
|
|||||||
/**
|
/**
|
||||||
* the file that contains the metadata
|
* the file that contains the metadata
|
||||||
*/
|
*/
|
||||||
metadataFile: File;
|
metadataFile!: File;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* the file that the metadata is for
|
* the file that the metadata is for
|
||||||
*/
|
*/
|
||||||
fileRef: File;
|
fileRef!: File;
|
||||||
|
|
||||||
public async getFileType(optionsArg?: {
|
public async getFileType(optionsArg?: {
|
||||||
useFileExtension?: boolean;
|
useFileExtension?: boolean;
|
||||||
useMagicBytes?: boolean;
|
useMagicBytes?: boolean;
|
||||||
}): Promise<string> {
|
}): Promise<plugins.smartmime.IFileTypeResult | undefined> {
|
||||||
if ((optionsArg && optionsArg.useFileExtension) || optionsArg.useFileExtension === undefined) {
|
if ((optionsArg && optionsArg.useFileExtension) || !optionsArg) {
|
||||||
return plugins.path.extname(this.fileRef.name);
|
const fileType = await plugins.smartmime.detectMimeType({
|
||||||
|
path: this.fileRef.name,
|
||||||
|
});
|
||||||
|
|
||||||
|
return fileType;
|
||||||
}
|
}
|
||||||
|
if (optionsArg && optionsArg.useMagicBytes) {
|
||||||
|
const fileType = await plugins.smartmime.detectMimeType({
|
||||||
|
buffer: await this.fileRef.getMagicBytes({
|
||||||
|
length: 100,
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
return fileType;
|
||||||
|
}
|
||||||
|
throw new Error('optionsArg.useFileExtension and optionsArg.useMagicBytes cannot both be false');
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -44,13 +66,13 @@ export class MetaData {
|
|||||||
const stat = await this.fileRef.parentDirectoryRef.bucketRef.fastStat({
|
const stat = await this.fileRef.parentDirectoryRef.bucketRef.fastStat({
|
||||||
path: this.fileRef.getBasePath(),
|
path: this.fileRef.getBasePath(),
|
||||||
});
|
});
|
||||||
return stat.ContentLength;
|
return stat.ContentLength!;
|
||||||
}
|
}
|
||||||
|
|
||||||
private prefixCustomMetaData = 'custom_';
|
private prefixCustomMetaData = 'custom_';
|
||||||
|
|
||||||
public async storeCustomMetaData<T = any>(optionsArg: { key: string; value: T }) {
|
public async storeCustomMetaData<T = any>(optionsArg: { key: string; value: T }) {
|
||||||
const data = await this.metadataFile.getContentsAsString();
|
const data = await this.metadataFile.getJsonData();
|
||||||
data[this.prefixCustomMetaData + optionsArg.key] = optionsArg.value;
|
data[this.prefixCustomMetaData + optionsArg.key] = optionsArg.value;
|
||||||
await this.metadataFile.writeJsonData(data);
|
await this.metadataFile.writeJsonData(data);
|
||||||
}
|
}
|
||||||
|
@ -41,7 +41,15 @@ export class SmartBucket {
|
|||||||
await Bucket.removeBucketByName(this, bucketName);
|
await Bucket.removeBucketByName(this, bucketName);
|
||||||
}
|
}
|
||||||
|
|
||||||
public async getBucketByName(bucketName: string) {
|
public async getBucketByName(bucketNameArg: string) {
|
||||||
return Bucket.getBucketByName(this, bucketName);
|
return Bucket.getBucketByName(this, bucketNameArg);
|
||||||
|
}
|
||||||
|
|
||||||
|
public async getBucketByNameStrict(...args: Parameters<SmartBucket['getBucketByName']>) {
|
||||||
|
const bucket = await this.getBucketByName(...args);
|
||||||
|
if (!bucket) {
|
||||||
|
throw new Error(`Bucket ${args[0]} does not exist.`);
|
||||||
|
}
|
||||||
|
return bucket;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -21,7 +21,7 @@ export class Trash {
|
|||||||
const trashDir = await this.getTrashDir();
|
const trashDir = await this.getTrashDir();
|
||||||
const originalPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
const originalPath = await helpers.reducePathDescriptorToPath(pathDescriptor);
|
||||||
const trashKey = await this.getTrashKeyByOriginalBasePath(originalPath);
|
const trashKey = await this.getTrashKeyByOriginalBasePath(originalPath);
|
||||||
return trashDir.getFile({ path: trashKey });
|
return trashDir.getFileStrict({ path: trashKey });
|
||||||
}
|
}
|
||||||
|
|
||||||
public async getTrashKeyByOriginalBasePath (originalPath: string): Promise<string> {
|
public async getTrashKeyByOriginalBasePath (originalPath: string): Promise<string> {
|
||||||
|
@ -6,7 +6,8 @@
|
|||||||
"module": "NodeNext",
|
"module": "NodeNext",
|
||||||
"moduleResolution": "NodeNext",
|
"moduleResolution": "NodeNext",
|
||||||
"esModuleInterop": true,
|
"esModuleInterop": true,
|
||||||
"verbatimModuleSyntax": true
|
"verbatimModuleSyntax": true,
|
||||||
|
"strict": true
|
||||||
},
|
},
|
||||||
"exclude": [
|
"exclude": [
|
||||||
"dist_*/**/*.d.ts"
|
"dist_*/**/*.d.ts"
|
||||||
|
Reference in New Issue
Block a user