Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 654f47b7fc | |||
| 18a2eb7e3f | |||
| 3ab667049a | |||
| 871f0e0b78 |
7
.gitignore
vendored
7
.gitignore
vendored
@@ -3,7 +3,6 @@
|
|||||||
# artifacts
|
# artifacts
|
||||||
coverage/
|
coverage/
|
||||||
public/
|
public/
|
||||||
pages/
|
|
||||||
|
|
||||||
# installs
|
# installs
|
||||||
node_modules/
|
node_modules/
|
||||||
@@ -17,4 +16,8 @@ node_modules/
|
|||||||
dist/
|
dist/
|
||||||
dist_*/
|
dist_*/
|
||||||
|
|
||||||
# custom
|
# AI
|
||||||
|
.claude/
|
||||||
|
.serena/
|
||||||
|
|
||||||
|
#------# custom
|
||||||
38
changelog.md
38
changelog.md
@@ -1,6 +1,33 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 2025-11-21 - 2.3.0 - feat(smarts3-server)
|
||||||
|
Introduce native custom S3 server implementation (Smarts3Server) with routing, middleware, context, filesystem store, controllers and XML utilities; add SmartXml and AWS SDK test; keep optional legacy s3rver backend.
|
||||||
|
|
||||||
|
- Add Smarts3Server: native, Node.js http-based S3-compatible server (ts/classes/smarts3-server.ts)
|
||||||
|
- New routing and middleware system: S3Router and MiddlewareStack for pattern matching and middleware composition (ts/classes/router.ts, ts/classes/middleware-stack.ts)
|
||||||
|
- Introduce request context and helpers: S3Context for parsing requests, sending responses and XML (ts/classes/context.ts)
|
||||||
|
- Filesystem-backed storage: FilesystemStore with bucket/object operations, streaming uploads, MD5 handling and Windows-safe key encoding (ts/classes/filesystem-store.ts)
|
||||||
|
- S3 error handling: S3Error class that maps S3 error codes and produces XML error responses (ts/classes/s3-error.ts)
|
||||||
|
- Controllers for service, bucket and object operations with S3-compatible XML responses and copy/range support (ts/controllers/*.ts)
|
||||||
|
- XML utilities and SmartXml integration for consistent XML generation/parsing (ts/utils/xml.utils.ts, ts/plugins.ts)
|
||||||
|
- Expose native plugins (http, crypto, url, fs) and SmartXml via plugins.ts
|
||||||
|
- ts/index.ts: add useCustomServer option, default to custom server, export Smarts3Server and handle start/stop for both custom and legacy backends
|
||||||
|
- Add AWS SDK v3 integration test (test/test.aws-sdk.node.ts) to validate compatibility
|
||||||
|
- package.json: add @aws-sdk/client-s3 devDependency and @push.rocks/smartxml dependency
|
||||||
|
- Documentation: readme.md updated to describe native custom server and legacy s3rver compatibility
|
||||||
|
|
||||||
|
## 2025-11-20 - 2.2.7 - fix(core)
|
||||||
|
Update dependencies, code style and project config; add pnpm overrides and ignore AI folders
|
||||||
|
|
||||||
|
- Bump devDependencies and runtime dependencies (@git.zone/*, @push.rocks/*, @tsclass/tsclass, s3rver) to newer compatible versions
|
||||||
|
- Add pnpm.overrides entry to package.json and normalize repository URL format
|
||||||
|
- Code style and formatting fixes in TypeScript sources (ts/index.ts, ts/00_commitinfo_data.ts): whitespace, trailing commas, parameter formatting and minor API-return typing preserved
|
||||||
|
- tsconfig.json: simplify compiler options and compact exclude list
|
||||||
|
- Update .gitignore to add AI-related folders (.claude/, .serena/) to avoid accidental commits
|
||||||
|
- Documentation and changelog formatting tweaks (readme.md, changelog.md, npmextra.json) — whitespace/newline cleanups and expanded changelog entries
|
||||||
|
|
||||||
## 2025-08-16 - 2.2.6 - fix(Smarts3)
|
## 2025-08-16 - 2.2.6 - fix(Smarts3)
|
||||||
|
|
||||||
Allow overriding S3 descriptor; update dependencies, test config and documentation
|
Allow overriding S3 descriptor; update dependencies, test config and documentation
|
||||||
|
|
||||||
- ts/index.ts: getS3Descriptor now accepts an optional Partial<IS3Descriptor> to override defaults (backwards compatible)
|
- ts/index.ts: getS3Descriptor now accepts an optional Partial<IS3Descriptor> to override defaults (backwards compatible)
|
||||||
@@ -11,12 +38,14 @@ Allow overriding S3 descriptor; update dependencies, test config and documentati
|
|||||||
- added project config files: .claude/settings.local.json and .serena/project.yml
|
- added project config files: .claude/settings.local.json and .serena/project.yml
|
||||||
|
|
||||||
## 2024-11-06 - 2.2.5 - fix(ci)
|
## 2024-11-06 - 2.2.5 - fix(ci)
|
||||||
|
|
||||||
Corrected docker image URLs in Gitea workflows to match the correct domain format.
|
Corrected docker image URLs in Gitea workflows to match the correct domain format.
|
||||||
|
|
||||||
- Updated IMAGE environment variable in .gitea/workflows/default_nottags.yaml
|
- Updated IMAGE environment variable in .gitea/workflows/default_nottags.yaml
|
||||||
- Updated IMAGE environment variable in .gitea/workflows/default_tags.yaml
|
- Updated IMAGE environment variable in .gitea/workflows/default_tags.yaml
|
||||||
|
|
||||||
## 2024-11-06 - 2.2.4 - fix(core)
|
## 2024-11-06 - 2.2.4 - fix(core)
|
||||||
|
|
||||||
Improve code style and update dependencies
|
Improve code style and update dependencies
|
||||||
|
|
||||||
- Updated @push.rocks/tapbundle to version ^5.4.3 in package.json.
|
- Updated @push.rocks/tapbundle to version ^5.4.3 in package.json.
|
||||||
@@ -24,29 +53,34 @@ Improve code style and update dependencies
|
|||||||
- Improved code consistency in ts/00_commitinfo_data.ts, ts/plugins.ts, and test/test.ts.
|
- Improved code consistency in ts/00_commitinfo_data.ts, ts/plugins.ts, and test/test.ts.
|
||||||
|
|
||||||
## 2024-11-06 - 2.2.3 - fix(core)
|
## 2024-11-06 - 2.2.3 - fix(core)
|
||||||
|
|
||||||
Fix endpoint address from 'localhost' to '127.0.0.1' for better compatibility in Smarts3.getS3Descriptor
|
Fix endpoint address from 'localhost' to '127.0.0.1' for better compatibility in Smarts3.getS3Descriptor
|
||||||
|
|
||||||
- Corrected the endpoint address in Smarts3.getS3Descriptor to ensure proper functioning across different environments.
|
- Corrected the endpoint address in Smarts3.getS3Descriptor to ensure proper functioning across different environments.
|
||||||
|
|
||||||
## 2024-11-06 - 2.2.2 - fix(core)
|
## 2024-11-06 - 2.2.2 - fix(core)
|
||||||
|
|
||||||
Fixed function call for fastPut in the test suite to ensure proper file upload handling.
|
Fixed function call for fastPut in the test suite to ensure proper file upload handling.
|
||||||
|
|
||||||
- Updated dependencies in package.json to newer versions.
|
- Updated dependencies in package.json to newer versions.
|
||||||
- Corrected the function call in test suite for file upload.
|
- Corrected the function call in test suite for file upload.
|
||||||
|
|
||||||
## 2024-10-26 - 2.2.1 - fix(core)
|
## 2024-10-26 - 2.2.1 - fix(core)
|
||||||
|
|
||||||
Fix import and typings for improved compatibility
|
Fix import and typings for improved compatibility
|
||||||
|
|
||||||
- Corrected the type signature for `getS3Descriptor` to return `IS3Descriptor`.
|
- Corrected the type signature for `getS3Descriptor` to return `IS3Descriptor`.
|
||||||
- Fixed import structure and updated dependencies for consistent namespace usage across plugins.
|
- Fixed import structure and updated dependencies for consistent namespace usage across plugins.
|
||||||
|
|
||||||
## 2024-10-26 - 2.2.0 - feat(ci)
|
## 2024-10-26 - 2.2.0 - feat(ci)
|
||||||
|
|
||||||
Migrate CI/CD workflow from GitLab CI to Gitea CI
|
Migrate CI/CD workflow from GitLab CI to Gitea CI
|
||||||
|
|
||||||
- Added new Gitea CI workflows for both non-tag and tag-based pushes
|
- Added new Gitea CI workflows for both non-tag and tag-based pushes
|
||||||
- Removed existing GitLab CI configuration
|
- Removed existing GitLab CI configuration
|
||||||
|
|
||||||
## 2024-05-29 - 2.1.1 - Updates and minor changes
|
## 2024-05-29 - 2.1.1 - Updates and minor changes
|
||||||
|
|
||||||
Updates and changes based on minor configuration improvements and organizational shifts.
|
Updates and changes based on minor configuration improvements and organizational shifts.
|
||||||
|
|
||||||
- Updated description file.
|
- Updated description file.
|
||||||
@@ -55,22 +89,26 @@ Updates and changes based on minor configuration improvements and organizational
|
|||||||
- Shifted to new organizational scheme.
|
- Shifted to new organizational scheme.
|
||||||
|
|
||||||
## 2022-07-30 - 2.1.0 - Core improvements and fixes
|
## 2022-07-30 - 2.1.0 - Core improvements and fixes
|
||||||
|
|
||||||
Minor improvements and important core changes.
|
Minor improvements and important core changes.
|
||||||
|
|
||||||
- Removed tslint from the core setup.
|
- Removed tslint from the core setup.
|
||||||
|
|
||||||
## 2022-07-30 - 2.0.2 - Bucket creation improvement
|
## 2022-07-30 - 2.0.2 - Bucket creation improvement
|
||||||
|
|
||||||
Enhanced file structure management.
|
Enhanced file structure management.
|
||||||
|
|
||||||
- Improved bucket creation to store locally within the .nogit directory.
|
- Improved bucket creation to store locally within the .nogit directory.
|
||||||
|
|
||||||
## 2022-04-14 - 2.0.0 to 2.0.1 - Structural updates and fixes
|
## 2022-04-14 - 2.0.0 to 2.0.1 - Structural updates and fixes
|
||||||
|
|
||||||
This release focused on core updates and structural changes.
|
This release focused on core updates and structural changes.
|
||||||
|
|
||||||
- Reformatted the project structure.
|
- Reformatted the project structure.
|
||||||
- Core updates with minor fixes.
|
- Core updates with minor fixes.
|
||||||
|
|
||||||
## 2021-12-20 - 1.0.10 - ESM Transition
|
## 2021-12-20 - 1.0.10 - ESM Transition
|
||||||
|
|
||||||
Breaking changes and minor fixes, transitioning to ES Modules.
|
Breaking changes and minor fixes, transitioning to ES Modules.
|
||||||
|
|
||||||
- BREAKING CHANGE: Transitioned core setup to ESM.
|
- BREAKING CHANGE: Transitioned core setup to ESM.
|
||||||
25
package.json
25
package.json
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@push.rocks/smarts3",
|
"name": "@push.rocks/smarts3",
|
||||||
"version": "2.2.6",
|
"version": "2.3.0",
|
||||||
"private": false,
|
"private": false,
|
||||||
"description": "A Node.js TypeScript package to create a local S3 endpoint for simulating AWS S3 operations using mapped local directories for development and testing purposes.",
|
"description": "A Node.js TypeScript package to create a local S3 endpoint for simulating AWS S3 operations using mapped local directories for development and testing purposes.",
|
||||||
"main": "dist_ts/index.js",
|
"main": "dist_ts/index.js",
|
||||||
@@ -14,10 +14,11 @@
|
|||||||
"buildDocs": "tsdoc"
|
"buildDocs": "tsdoc"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@git.zone/tsbuild": "^2.2.0",
|
"@aws-sdk/client-s3": "^3.936.0",
|
||||||
"@git.zone/tsbundle": "^2.1.0",
|
"@git.zone/tsbuild": "^3.1.0",
|
||||||
"@git.zone/tsrun": "^1.3.3",
|
"@git.zone/tsbundle": "^2.5.2",
|
||||||
"@git.zone/tstest": "^2.3.2",
|
"@git.zone/tsrun": "^2.0.0",
|
||||||
|
"@git.zone/tstest": "^3.0.0",
|
||||||
"@types/node": "^22.9.0"
|
"@types/node": "^22.9.0"
|
||||||
},
|
},
|
||||||
"browserslist": [
|
"browserslist": [
|
||||||
@@ -36,10 +37,11 @@
|
|||||||
"readme.md"
|
"readme.md"
|
||||||
],
|
],
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@push.rocks/smartbucket": "^3.0.23",
|
"@push.rocks/smartbucket": "^3.3.10",
|
||||||
"@push.rocks/smartfile": "^11.0.21",
|
"@push.rocks/smartfile": "^11.2.7",
|
||||||
"@push.rocks/smartpath": "^6.0.0",
|
"@push.rocks/smartpath": "^6.0.0",
|
||||||
"@tsclass/tsclass": "^9.2.0",
|
"@push.rocks/smartxml": "^1.0.6",
|
||||||
|
"@tsclass/tsclass": "^9.3.0",
|
||||||
"@types/s3rver": "^3.7.0",
|
"@types/s3rver": "^3.7.0",
|
||||||
"s3rver": "^3.7.1"
|
"s3rver": "^3.7.1"
|
||||||
},
|
},
|
||||||
@@ -64,10 +66,13 @@
|
|||||||
"homepage": "https://code.foss.global/push.rocks/smarts3#readme",
|
"homepage": "https://code.foss.global/push.rocks/smarts3#readme",
|
||||||
"repository": {
|
"repository": {
|
||||||
"type": "git",
|
"type": "git",
|
||||||
"url": "git+https://code.foss.global/push.rocks/smarts3.git"
|
"url": "https://code.foss.global/push.rocks/smarts3.git"
|
||||||
},
|
},
|
||||||
"bugs": {
|
"bugs": {
|
||||||
"url": "https://code.foss.global/push.rocks/smarts3/issues"
|
"url": "https://code.foss.global/push.rocks/smarts3/issues"
|
||||||
},
|
},
|
||||||
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748"
|
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748",
|
||||||
|
"pnpm": {
|
||||||
|
"overrides": {}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
4882
pnpm-lock.yaml
generated
4882
pnpm-lock.yaml
generated
File diff suppressed because it is too large
Load Diff
50
readme.md
50
readme.md
@@ -5,12 +5,14 @@
|
|||||||
## 🌟 Features
|
## 🌟 Features
|
||||||
|
|
||||||
- 🏃 **Lightning-fast local S3 simulation** - No more waiting for cloud operations during development
|
- 🏃 **Lightning-fast local S3 simulation** - No more waiting for cloud operations during development
|
||||||
- 🔄 **Full AWS S3 API compatibility** - Drop-in replacement for S3 in your tests
|
- ⚡ **Native custom S3 server** - Built on Node.js http module with zero framework dependencies (default)
|
||||||
- 📂 **Local directory mapping** - Your buckets live right on your filesystem
|
- 🔄 **Full AWS S3 API compatibility** - Drop-in replacement for AWS SDK v3 and other S3 clients
|
||||||
|
- 📂 **Local directory mapping** - Your buckets live right on your filesystem with Windows-compatible encoding
|
||||||
- 🧪 **Perfect for testing** - Reliable, repeatable tests without cloud dependencies
|
- 🧪 **Perfect for testing** - Reliable, repeatable tests without cloud dependencies
|
||||||
- 🎯 **TypeScript-first** - Built with TypeScript for excellent type safety and IDE support
|
- 🎯 **TypeScript-first** - Built with TypeScript for excellent type safety and IDE support
|
||||||
- 🔧 **Zero configuration** - Works out of the box with sensible defaults
|
- 🔧 **Zero configuration** - Works out of the box with sensible defaults
|
||||||
- 🧹 **Clean slate mode** - Start fresh on every test run
|
- 🧹 **Clean slate mode** - Start fresh on every test run
|
||||||
|
- 🔀 **Legacy compatibility** - Optional s3rver backend support for backward compatibility
|
||||||
|
|
||||||
## 📦 Installation
|
## 📦 Installation
|
||||||
|
|
||||||
@@ -37,7 +39,7 @@ import { Smarts3 } from '@push.rocks/smarts3';
|
|||||||
// Start your local S3 server
|
// Start your local S3 server
|
||||||
const s3Server = await Smarts3.createAndStart({
|
const s3Server = await Smarts3.createAndStart({
|
||||||
port: 3000,
|
port: 3000,
|
||||||
cleanSlate: true // Start with empty buckets
|
cleanSlate: true, // Start with empty buckets
|
||||||
});
|
});
|
||||||
|
|
||||||
// Create a bucket
|
// Create a bucket
|
||||||
@@ -61,8 +63,8 @@ import { Smarts3 } from '@push.rocks/smarts3';
|
|||||||
|
|
||||||
// Configuration options
|
// Configuration options
|
||||||
const config = {
|
const config = {
|
||||||
port: 3000, // Port to run the server on (default: 3000)
|
port: 3000, // Port to run the server on (default: 3000)
|
||||||
cleanSlate: true // Clear all data on start (default: false)
|
cleanSlate: true, // Clear all data on start (default: false)
|
||||||
};
|
};
|
||||||
|
|
||||||
// Create and start in one go
|
// Create and start in one go
|
||||||
@@ -108,7 +110,7 @@ await baseDir.fastStore('path/to/file.txt', 'Hello, S3! 🎉');
|
|||||||
// Upload with more control
|
// Upload with more control
|
||||||
await baseDir.fastPut({
|
await baseDir.fastPut({
|
||||||
path: 'documents/important.pdf',
|
path: 'documents/important.pdf',
|
||||||
contents: Buffer.from(yourPdfData)
|
contents: Buffer.from(yourPdfData),
|
||||||
});
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
@@ -133,7 +135,7 @@ Browse your bucket contents:
|
|||||||
// List all files in the bucket
|
// List all files in the bucket
|
||||||
const files = await baseDir.listFiles();
|
const files = await baseDir.listFiles();
|
||||||
|
|
||||||
files.forEach(file => {
|
files.forEach((file) => {
|
||||||
console.log(`📄 ${file.name} (${file.size} bytes)`);
|
console.log(`📄 ${file.name} (${file.size} bytes)`);
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -169,7 +171,7 @@ describe('S3 Operations', () => {
|
|||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
s3Server = await Smarts3.createAndStart({
|
s3Server = await Smarts3.createAndStart({
|
||||||
port: 9999,
|
port: 9999,
|
||||||
cleanSlate: true
|
cleanSlate: true,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -196,7 +198,7 @@ describe('S3 Operations', () => {
|
|||||||
before(async () => {
|
before(async () => {
|
||||||
s3Server = await Smarts3.createAndStart({
|
s3Server = await Smarts3.createAndStart({
|
||||||
port: 9999,
|
port: 9999,
|
||||||
cleanSlate: true
|
cleanSlate: true,
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -229,16 +231,16 @@ const s3Client = new S3Client({
|
|||||||
region: 'us-east-1',
|
region: 'us-east-1',
|
||||||
credentials: {
|
credentials: {
|
||||||
accessKeyId: config.accessKey,
|
accessKeyId: config.accessKey,
|
||||||
secretAccessKey: config.accessSecret
|
secretAccessKey: config.accessSecret,
|
||||||
},
|
},
|
||||||
forcePathStyle: true
|
forcePathStyle: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Use AWS SDK as normal
|
// Use AWS SDK as normal
|
||||||
const command = new PutObjectCommand({
|
const command = new PutObjectCommand({
|
||||||
Bucket: 'my-bucket',
|
Bucket: 'my-bucket',
|
||||||
Key: 'test-file.txt',
|
Key: 'test-file.txt',
|
||||||
Body: 'Hello from AWS SDK!'
|
Body: 'Hello from AWS SDK!',
|
||||||
});
|
});
|
||||||
|
|
||||||
await s3Client.send(command);
|
await s3Client.send(command);
|
||||||
@@ -256,7 +258,7 @@ export async function setupTestEnvironment() {
|
|||||||
// Start S3 server for CI tests
|
// Start S3 server for CI tests
|
||||||
const s3 = await Smarts3.createAndStart({
|
const s3 = await Smarts3.createAndStart({
|
||||||
port: process.env.S3_PORT || 3000,
|
port: process.env.S3_PORT || 3000,
|
||||||
cleanSlate: true
|
cleanSlate: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Create test buckets
|
// Create test buckets
|
||||||
@@ -312,8 +314,14 @@ async function testDataMigration() {
|
|||||||
const sourceDir = await source.getBaseDirectory();
|
const sourceDir = await source.getBaseDirectory();
|
||||||
|
|
||||||
// Add test files
|
// Add test files
|
||||||
await sourceDir.fastStore('user-1.json', JSON.stringify({ id: 1, name: 'Alice' }));
|
await sourceDir.fastStore(
|
||||||
await sourceDir.fastStore('user-2.json', JSON.stringify({ id: 2, name: 'Bob' }));
|
'user-1.json',
|
||||||
|
JSON.stringify({ id: 1, name: 'Alice' }),
|
||||||
|
);
|
||||||
|
await sourceDir.fastStore(
|
||||||
|
'user-2.json',
|
||||||
|
JSON.stringify({ id: 2, name: 'Bob' }),
|
||||||
|
);
|
||||||
|
|
||||||
// Run your migration logic
|
// Run your migration logic
|
||||||
await runMigration(config);
|
await runMigration(config);
|
||||||
@@ -335,9 +343,9 @@ When integrating with different S3 clients, you can customize the connection det
|
|||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
const customDescriptor = await s3Server.getS3Descriptor({
|
const customDescriptor = await s3Server.getS3Descriptor({
|
||||||
endpoint: 'localhost', // Custom endpoint
|
endpoint: 'localhost', // Custom endpoint
|
||||||
port: 3001, // Different port
|
port: 3001, // Different port
|
||||||
useSsl: false, // SSL configuration
|
useSsl: false, // SSL configuration
|
||||||
// Add any additional options your S3 client needs
|
// Add any additional options your S3 client needs
|
||||||
});
|
});
|
||||||
```
|
```
|
||||||
@@ -347,7 +355,7 @@ const customDescriptor = await s3Server.getS3Descriptor({
|
|||||||
```typescript
|
```typescript
|
||||||
const config = {
|
const config = {
|
||||||
port: parseInt(process.env.S3_PORT || '3000'),
|
port: parseInt(process.env.S3_PORT || '3000'),
|
||||||
cleanSlate: process.env.NODE_ENV === 'test'
|
cleanSlate: process.env.NODE_ENV === 'test',
|
||||||
};
|
};
|
||||||
|
|
||||||
const s3Server = await Smarts3.createAndStart(config);
|
const s3Server = await Smarts3.createAndStart(config);
|
||||||
@@ -370,7 +378,7 @@ const s3Server = await Smarts3.createAndStart(config);
|
|||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
interface ISmarts3ContructorOptions {
|
interface ISmarts3ContructorOptions {
|
||||||
port?: number; // Server port (default: 3000)
|
port?: number; // Server port (default: 3000)
|
||||||
cleanSlate?: boolean; // Clear storage on start (default: false)
|
cleanSlate?: boolean; // Clear storage on start (default: false)
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
@@ -404,7 +412,7 @@ interface ISmarts3ContructorOptions {
|
|||||||
- [`@push.rocks/smartbucket`](https://www.npmjs.com/package/@push.rocks/smartbucket) - Powerful S3 abstraction layer
|
- [`@push.rocks/smartbucket`](https://www.npmjs.com/package/@push.rocks/smartbucket) - Powerful S3 abstraction layer
|
||||||
- [`@push.rocks/smartfile`](https://www.npmjs.com/package/@push.rocks/smartfile) - Advanced file system operations
|
- [`@push.rocks/smartfile`](https://www.npmjs.com/package/@push.rocks/smartfile) - Advanced file system operations
|
||||||
- [`@tsclass/tsclass`](https://www.npmjs.com/package/@tsclass/tsclass) - TypeScript class helpers
|
- [`@tsclass/tsclass`](https://www.npmjs.com/package/@tsclass/tsclass) - TypeScript class helpers
|
||||||
- [`s3rver`](https://www.npmjs.com/package/s3rver) - The underlying S3 server implementation
|
- [`s3rver`](https://www.npmjs.com/package/s3rver) - Optional legacy S3 server implementation (used when `useCustomServer: false`)
|
||||||
|
|
||||||
## License and Legal Information
|
## License and Legal Information
|
||||||
|
|
||||||
|
|||||||
104
test/test.aws-sdk.node.ts
Normal file
104
test/test.aws-sdk.node.ts
Normal file
@@ -0,0 +1,104 @@
|
|||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import { S3Client, CreateBucketCommand, ListBucketsCommand, PutObjectCommand, GetObjectCommand, DeleteObjectCommand, DeleteBucketCommand } from '@aws-sdk/client-s3';
|
||||||
|
import { Readable } from 'stream';
|
||||||
|
import * as smarts3 from '../ts/index.js';
|
||||||
|
|
||||||
|
let testSmarts3Instance: smarts3.Smarts3;
|
||||||
|
let s3Client: S3Client;
|
||||||
|
|
||||||
|
// Helper to convert stream to string
|
||||||
|
async function streamToString(stream: Readable): Promise<string> {
|
||||||
|
const chunks: Buffer[] = [];
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
stream.on('data', (chunk) => chunks.push(Buffer.from(chunk)));
|
||||||
|
stream.on('error', reject);
|
||||||
|
stream.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
tap.test('should start the S3 server and configure client', async () => {
|
||||||
|
testSmarts3Instance = await smarts3.Smarts3.createAndStart({
|
||||||
|
port: 3337,
|
||||||
|
cleanSlate: true,
|
||||||
|
silent: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
const descriptor = await testSmarts3Instance.getS3Descriptor();
|
||||||
|
|
||||||
|
s3Client = new S3Client({
|
||||||
|
endpoint: `http://${descriptor.endpoint}:${descriptor.port}`,
|
||||||
|
region: 'us-east-1',
|
||||||
|
credentials: {
|
||||||
|
accessKeyId: descriptor.accessKey,
|
||||||
|
secretAccessKey: descriptor.accessSecret,
|
||||||
|
},
|
||||||
|
forcePathStyle: true,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should list buckets (empty)', async () => {
|
||||||
|
const response = await s3Client.send(new ListBucketsCommand({}));
|
||||||
|
expect(Array.isArray(response.Buckets)).toEqual(true);
|
||||||
|
expect(response.Buckets!.length).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should create a bucket', async () => {
|
||||||
|
const response = await s3Client.send(new CreateBucketCommand({ Bucket: 'test-bucket' }));
|
||||||
|
expect(response.$metadata.httpStatusCode).toEqual(200);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should list buckets (showing created bucket)', async () => {
|
||||||
|
const response = await s3Client.send(new ListBucketsCommand({}));
|
||||||
|
expect(response.Buckets!.length).toEqual(1);
|
||||||
|
expect(response.Buckets![0].Name).toEqual('test-bucket');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should upload an object', async () => {
|
||||||
|
const response = await s3Client.send(new PutObjectCommand({
|
||||||
|
Bucket: 'test-bucket',
|
||||||
|
Key: 'test-file.txt',
|
||||||
|
Body: 'Hello from AWS SDK!',
|
||||||
|
ContentType: 'text/plain',
|
||||||
|
}));
|
||||||
|
expect(response.$metadata.httpStatusCode).toEqual(200);
|
||||||
|
expect(response.ETag).toBeTypeofString();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should download the object', async () => {
|
||||||
|
const response = await s3Client.send(new GetObjectCommand({
|
||||||
|
Bucket: 'test-bucket',
|
||||||
|
Key: 'test-file.txt',
|
||||||
|
}));
|
||||||
|
|
||||||
|
expect(response.$metadata.httpStatusCode).toEqual(200);
|
||||||
|
const content = await streamToString(response.Body as Readable);
|
||||||
|
expect(content).toEqual('Hello from AWS SDK!');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should delete the object', async () => {
|
||||||
|
const response = await s3Client.send(new DeleteObjectCommand({
|
||||||
|
Bucket: 'test-bucket',
|
||||||
|
Key: 'test-file.txt',
|
||||||
|
}));
|
||||||
|
expect(response.$metadata.httpStatusCode).toEqual(204);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should fail to get deleted object', async () => {
|
||||||
|
await expect(
|
||||||
|
s3Client.send(new GetObjectCommand({
|
||||||
|
Bucket: 'test-bucket',
|
||||||
|
Key: 'test-file.txt',
|
||||||
|
}))
|
||||||
|
).rejects.toThrow();
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should delete the bucket', async () => {
|
||||||
|
const response = await s3Client.send(new DeleteBucketCommand({ Bucket: 'test-bucket' }));
|
||||||
|
expect(response.$metadata.httpStatusCode).toEqual(204);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('should stop the S3 server', async () => {
|
||||||
|
await testSmarts3Instance.stop();
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
@@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@push.rocks/smarts3',
|
name: '@push.rocks/smarts3',
|
||||||
version: '2.2.6',
|
version: '2.3.0',
|
||||||
description: 'A Node.js TypeScript package to create a local S3 endpoint for simulating AWS S3 operations using mapped local directories for development and testing purposes.'
|
description: 'A Node.js TypeScript package to create a local S3 endpoint for simulating AWS S3 operations using mapped local directories for development and testing purposes.'
|
||||||
}
|
}
|
||||||
|
|||||||
114
ts/classes/context.ts
Normal file
114
ts/classes/context.ts
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import { S3Error } from './s3-error.js';
|
||||||
|
import { createXml } from '../utils/xml.utils.js';
|
||||||
|
import type { FilesystemStore } from './filesystem-store.js';
|
||||||
|
import type { Readable } from 'stream';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* S3 request context with helper methods
|
||||||
|
*/
|
||||||
|
export class S3Context {
|
||||||
|
public method: string;
|
||||||
|
public url: URL;
|
||||||
|
public headers: plugins.http.IncomingHttpHeaders;
|
||||||
|
public params: Record<string, string> = {};
|
||||||
|
public query: Record<string, string> = {};
|
||||||
|
public store: FilesystemStore;
|
||||||
|
|
||||||
|
private req: plugins.http.IncomingMessage;
|
||||||
|
private res: plugins.http.ServerResponse;
|
||||||
|
private statusCode: number = 200;
|
||||||
|
private responseHeaders: Record<string, string> = {};
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
req: plugins.http.IncomingMessage,
|
||||||
|
res: plugins.http.ServerResponse,
|
||||||
|
store: FilesystemStore
|
||||||
|
) {
|
||||||
|
this.req = req;
|
||||||
|
this.res = res;
|
||||||
|
this.store = store;
|
||||||
|
this.method = req.method || 'GET';
|
||||||
|
this.headers = req.headers;
|
||||||
|
|
||||||
|
// Parse URL and query string
|
||||||
|
const fullUrl = `http://${req.headers.host || 'localhost'}${req.url || '/'}`;
|
||||||
|
this.url = new URL(fullUrl);
|
||||||
|
|
||||||
|
// Parse query string into object
|
||||||
|
this.url.searchParams.forEach((value, key) => {
|
||||||
|
this.query[key] = value;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set response status code
|
||||||
|
*/
|
||||||
|
public status(code: number): this {
|
||||||
|
this.statusCode = code;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set response header
|
||||||
|
*/
|
||||||
|
public setHeader(name: string, value: string | number): this {
|
||||||
|
this.responseHeaders[name] = value.toString();
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send response body (string, Buffer, or Stream)
|
||||||
|
*/
|
||||||
|
public async send(body: string | Buffer | Readable | NodeJS.ReadableStream): Promise<void> {
|
||||||
|
// Write status and headers
|
||||||
|
this.res.writeHead(this.statusCode, this.responseHeaders);
|
||||||
|
|
||||||
|
// Handle different body types
|
||||||
|
if (typeof body === 'string' || body instanceof Buffer) {
|
||||||
|
this.res.end(body);
|
||||||
|
} else if (body && typeof (body as any).pipe === 'function') {
|
||||||
|
// It's a stream
|
||||||
|
(body as Readable).pipe(this.res);
|
||||||
|
} else {
|
||||||
|
this.res.end();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Send XML response
|
||||||
|
*/
|
||||||
|
public async sendXML(obj: any): Promise<void> {
|
||||||
|
const xml = createXml(obj, { format: true });
|
||||||
|
this.setHeader('Content-Type', 'application/xml');
|
||||||
|
this.setHeader('Content-Length', Buffer.byteLength(xml));
|
||||||
|
await this.send(xml);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Throw an S3 error
|
||||||
|
*/
|
||||||
|
public throw(code: string, message: string, detail?: Record<string, any>): never {
|
||||||
|
throw new S3Error(code, message, detail);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read and parse request body as string
|
||||||
|
*/
|
||||||
|
public async readBody(): Promise<string> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const chunks: Buffer[] = [];
|
||||||
|
|
||||||
|
this.req.on('data', (chunk) => chunks.push(chunk));
|
||||||
|
this.req.on('end', () => resolve(Buffer.concat(chunks).toString('utf8')));
|
||||||
|
this.req.on('error', reject);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the request stream (for streaming uploads)
|
||||||
|
*/
|
||||||
|
public getRequestStream(): NodeJS.ReadableStream {
|
||||||
|
return this.req;
|
||||||
|
}
|
||||||
|
}
|
||||||
495
ts/classes/filesystem-store.ts
Normal file
495
ts/classes/filesystem-store.ts
Normal file
@@ -0,0 +1,495 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import { S3Error } from './s3-error.js';
|
||||||
|
import type { Readable } from 'stream';
|
||||||
|
|
||||||
|
export interface IS3Bucket {
|
||||||
|
name: string;
|
||||||
|
creationDate: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IS3Object {
|
||||||
|
key: string;
|
||||||
|
size: number;
|
||||||
|
lastModified: Date;
|
||||||
|
md5: string;
|
||||||
|
metadata: Record<string, string>;
|
||||||
|
content?: Readable;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IListObjectsOptions {
|
||||||
|
prefix?: string;
|
||||||
|
delimiter?: string;
|
||||||
|
maxKeys?: number;
|
||||||
|
continuationToken?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IListObjectsResult {
|
||||||
|
contents: IS3Object[];
|
||||||
|
commonPrefixes: string[];
|
||||||
|
isTruncated: boolean;
|
||||||
|
nextContinuationToken?: string;
|
||||||
|
prefix: string;
|
||||||
|
delimiter: string;
|
||||||
|
maxKeys: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface IRangeOptions {
|
||||||
|
start: number;
|
||||||
|
end: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Filesystem-backed storage for S3 objects
|
||||||
|
*/
|
||||||
|
export class FilesystemStore {
|
||||||
|
constructor(private rootDir: string) {}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize store (ensure root directory exists)
|
||||||
|
*/
|
||||||
|
public async initialize(): Promise<void> {
|
||||||
|
await plugins.fs.promises.mkdir(this.rootDir, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reset store (delete all buckets)
|
||||||
|
*/
|
||||||
|
public async reset(): Promise<void> {
|
||||||
|
await plugins.smartfile.fs.ensureEmptyDir(this.rootDir);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================
|
||||||
|
// BUCKET OPERATIONS
|
||||||
|
// ============================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List all buckets
|
||||||
|
*/
|
||||||
|
public async listBuckets(): Promise<IS3Bucket[]> {
|
||||||
|
const dirs = await plugins.smartfile.fs.listFolders(this.rootDir);
|
||||||
|
const buckets: IS3Bucket[] = [];
|
||||||
|
|
||||||
|
for (const dir of dirs) {
|
||||||
|
const bucketPath = plugins.path.join(this.rootDir, dir);
|
||||||
|
const stats = await plugins.smartfile.fs.stat(bucketPath);
|
||||||
|
|
||||||
|
buckets.push({
|
||||||
|
name: dir,
|
||||||
|
creationDate: stats.birthtime,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return buckets.sort((a, b) => a.name.localeCompare(b.name));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if bucket exists
|
||||||
|
*/
|
||||||
|
public async bucketExists(bucket: string): Promise<boolean> {
|
||||||
|
const bucketPath = this.getBucketPath(bucket);
|
||||||
|
return plugins.smartfile.fs.isDirectory(bucketPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create bucket
|
||||||
|
*/
|
||||||
|
public async createBucket(bucket: string): Promise<void> {
|
||||||
|
const bucketPath = this.getBucketPath(bucket);
|
||||||
|
await plugins.fs.promises.mkdir(bucketPath, { recursive: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete bucket (must be empty)
|
||||||
|
*/
|
||||||
|
public async deleteBucket(bucket: string): Promise<void> {
|
||||||
|
const bucketPath = this.getBucketPath(bucket);
|
||||||
|
|
||||||
|
// Check if bucket exists
|
||||||
|
if (!(await this.bucketExists(bucket))) {
|
||||||
|
throw new S3Error('NoSuchBucket', 'The specified bucket does not exist');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if bucket is empty
|
||||||
|
const files = await plugins.smartfile.fs.listFileTree(bucketPath, '**/*');
|
||||||
|
if (files.length > 0) {
|
||||||
|
throw new S3Error('BucketNotEmpty', 'The bucket you tried to delete is not empty');
|
||||||
|
}
|
||||||
|
|
||||||
|
await plugins.smartfile.fs.remove(bucketPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================
|
||||||
|
// OBJECT OPERATIONS
|
||||||
|
// ============================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List objects in bucket
|
||||||
|
*/
|
||||||
|
public async listObjects(
|
||||||
|
bucket: string,
|
||||||
|
options: IListObjectsOptions = {}
|
||||||
|
): Promise<IListObjectsResult> {
|
||||||
|
const bucketPath = this.getBucketPath(bucket);
|
||||||
|
|
||||||
|
if (!(await this.bucketExists(bucket))) {
|
||||||
|
throw new S3Error('NoSuchBucket', 'The specified bucket does not exist');
|
||||||
|
}
|
||||||
|
|
||||||
|
const {
|
||||||
|
prefix = '',
|
||||||
|
delimiter = '',
|
||||||
|
maxKeys = 1000,
|
||||||
|
continuationToken,
|
||||||
|
} = options;
|
||||||
|
|
||||||
|
// List all object files
|
||||||
|
const objectPattern = '**/*._S3_object';
|
||||||
|
const objectFiles = await plugins.smartfile.fs.listFileTree(bucketPath, objectPattern);
|
||||||
|
|
||||||
|
// Convert file paths to keys
|
||||||
|
let keys = objectFiles.map((filePath) => {
|
||||||
|
const relativePath = plugins.path.relative(bucketPath, filePath);
|
||||||
|
const key = this.decodeKey(relativePath.replace(/\._S3_object$/, ''));
|
||||||
|
return key;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Apply prefix filter
|
||||||
|
if (prefix) {
|
||||||
|
keys = keys.filter((key) => key.startsWith(prefix));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort keys
|
||||||
|
keys = keys.sort();
|
||||||
|
|
||||||
|
// Handle continuation token (simple implementation using key name)
|
||||||
|
if (continuationToken) {
|
||||||
|
const startIndex = keys.findIndex((key) => key > continuationToken);
|
||||||
|
if (startIndex > 0) {
|
||||||
|
keys = keys.slice(startIndex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle delimiter (common prefixes)
|
||||||
|
const commonPrefixes: Set<string> = new Set();
|
||||||
|
const contents: IS3Object[] = [];
|
||||||
|
|
||||||
|
for (const key of keys) {
|
||||||
|
if (delimiter) {
|
||||||
|
// Find first delimiter after prefix
|
||||||
|
const remainingKey = key.slice(prefix.length);
|
||||||
|
const delimiterIndex = remainingKey.indexOf(delimiter);
|
||||||
|
|
||||||
|
if (delimiterIndex !== -1) {
|
||||||
|
// This key has a delimiter, add to common prefixes
|
||||||
|
const commonPrefix = prefix + remainingKey.slice(0, delimiterIndex + delimiter.length);
|
||||||
|
commonPrefixes.add(commonPrefix);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add to contents (limited by maxKeys)
|
||||||
|
if (contents.length >= maxKeys) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const objectInfo = await this.getObjectInfo(bucket, key);
|
||||||
|
contents.push(objectInfo);
|
||||||
|
} catch (err) {
|
||||||
|
// Skip if object no longer exists
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const isTruncated = keys.length > contents.length + commonPrefixes.size;
|
||||||
|
const nextContinuationToken = isTruncated
|
||||||
|
? contents[contents.length - 1]?.key
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
return {
|
||||||
|
contents,
|
||||||
|
commonPrefixes: Array.from(commonPrefixes).sort(),
|
||||||
|
isTruncated,
|
||||||
|
nextContinuationToken,
|
||||||
|
prefix,
|
||||||
|
delimiter,
|
||||||
|
maxKeys,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get object info (without content)
|
||||||
|
*/
|
||||||
|
private async getObjectInfo(bucket: string, key: string): Promise<IS3Object> {
|
||||||
|
const objectPath = this.getObjectPath(bucket, key);
|
||||||
|
const metadataPath = `${objectPath}.metadata.json`;
|
||||||
|
const md5Path = `${objectPath}.md5`;
|
||||||
|
|
||||||
|
const [stats, metadata, md5] = await Promise.all([
|
||||||
|
plugins.smartfile.fs.stat(objectPath),
|
||||||
|
this.readMetadata(metadataPath),
|
||||||
|
this.readMD5(objectPath, md5Path),
|
||||||
|
]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
key,
|
||||||
|
size: stats.size,
|
||||||
|
lastModified: stats.mtime,
|
||||||
|
md5,
|
||||||
|
metadata,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if object exists
|
||||||
|
*/
|
||||||
|
public async objectExists(bucket: string, key: string): Promise<boolean> {
|
||||||
|
const objectPath = this.getObjectPath(bucket, key);
|
||||||
|
return plugins.smartfile.fs.fileExists(objectPath);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Put object (upload with streaming)
|
||||||
|
*/
|
||||||
|
public async putObject(
|
||||||
|
bucket: string,
|
||||||
|
key: string,
|
||||||
|
stream: NodeJS.ReadableStream,
|
||||||
|
metadata: Record<string, string> = {}
|
||||||
|
): Promise<{ size: number; md5: string }> {
|
||||||
|
const objectPath = this.getObjectPath(bucket, key);
|
||||||
|
|
||||||
|
// Ensure bucket exists
|
||||||
|
if (!(await this.bucketExists(bucket))) {
|
||||||
|
throw new S3Error('NoSuchBucket', 'The specified bucket does not exist');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure parent directory exists
|
||||||
|
await plugins.fs.promises.mkdir(plugins.path.dirname(objectPath), { recursive: true });
|
||||||
|
|
||||||
|
// Write with MD5 calculation
|
||||||
|
const result = await this.writeStreamWithMD5(stream, objectPath);
|
||||||
|
|
||||||
|
// Save metadata
|
||||||
|
const metadataPath = `${objectPath}.metadata.json`;
|
||||||
|
await plugins.fs.promises.writeFile(metadataPath, JSON.stringify(metadata, null, 2));
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get object (download with streaming)
|
||||||
|
*/
|
||||||
|
public async getObject(
|
||||||
|
bucket: string,
|
||||||
|
key: string,
|
||||||
|
range?: IRangeOptions
|
||||||
|
): Promise<IS3Object> {
|
||||||
|
const objectPath = this.getObjectPath(bucket, key);
|
||||||
|
|
||||||
|
if (!(await this.objectExists(bucket, key))) {
|
||||||
|
throw new S3Error('NoSuchKey', 'The specified key does not exist');
|
||||||
|
}
|
||||||
|
|
||||||
|
const info = await this.getObjectInfo(bucket, key);
|
||||||
|
|
||||||
|
// Create read stream with optional range (using native fs for range support)
|
||||||
|
const stream = range
|
||||||
|
? plugins.fs.createReadStream(objectPath, { start: range.start, end: range.end })
|
||||||
|
: plugins.fs.createReadStream(objectPath);
|
||||||
|
|
||||||
|
return {
|
||||||
|
...info,
|
||||||
|
content: stream,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete object
|
||||||
|
*/
|
||||||
|
public async deleteObject(bucket: string, key: string): Promise<void> {
|
||||||
|
const objectPath = this.getObjectPath(bucket, key);
|
||||||
|
const metadataPath = `${objectPath}.metadata.json`;
|
||||||
|
const md5Path = `${objectPath}.md5`;
|
||||||
|
|
||||||
|
// S3 doesn't throw error if object doesn't exist
|
||||||
|
await Promise.all([
|
||||||
|
plugins.smartfile.fs.remove(objectPath).catch(() => {}),
|
||||||
|
plugins.smartfile.fs.remove(metadataPath).catch(() => {}),
|
||||||
|
plugins.smartfile.fs.remove(md5Path).catch(() => {}),
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Copy object
|
||||||
|
*/
|
||||||
|
public async copyObject(
|
||||||
|
srcBucket: string,
|
||||||
|
srcKey: string,
|
||||||
|
destBucket: string,
|
||||||
|
destKey: string,
|
||||||
|
metadataDirective: 'COPY' | 'REPLACE' = 'COPY',
|
||||||
|
newMetadata?: Record<string, string>
|
||||||
|
): Promise<{ size: number; md5: string }> {
|
||||||
|
const srcObjectPath = this.getObjectPath(srcBucket, srcKey);
|
||||||
|
const destObjectPath = this.getObjectPath(destBucket, destKey);
|
||||||
|
|
||||||
|
// Check source exists
|
||||||
|
if (!(await this.objectExists(srcBucket, srcKey))) {
|
||||||
|
throw new S3Error('NoSuchKey', 'The specified key does not exist');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure dest bucket exists
|
||||||
|
if (!(await this.bucketExists(destBucket))) {
|
||||||
|
throw new S3Error('NoSuchBucket', 'The specified bucket does not exist');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Ensure parent directory exists
|
||||||
|
await plugins.fs.promises.mkdir(plugins.path.dirname(destObjectPath), { recursive: true });
|
||||||
|
|
||||||
|
// Copy object file
|
||||||
|
await plugins.smartfile.fs.copy(srcObjectPath, destObjectPath);
|
||||||
|
|
||||||
|
// Handle metadata
|
||||||
|
if (metadataDirective === 'COPY') {
|
||||||
|
// Copy metadata
|
||||||
|
const srcMetadataPath = `${srcObjectPath}.metadata.json`;
|
||||||
|
const destMetadataPath = `${destObjectPath}.metadata.json`;
|
||||||
|
await plugins.smartfile.fs.copy(srcMetadataPath, destMetadataPath).catch(() => {});
|
||||||
|
} else if (newMetadata) {
|
||||||
|
// Replace with new metadata
|
||||||
|
const destMetadataPath = `${destObjectPath}.metadata.json`;
|
||||||
|
await plugins.fs.promises.writeFile(destMetadataPath, JSON.stringify(newMetadata, null, 2));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy MD5
|
||||||
|
const srcMD5Path = `${srcObjectPath}.md5`;
|
||||||
|
const destMD5Path = `${destObjectPath}.md5`;
|
||||||
|
await plugins.smartfile.fs.copy(srcMD5Path, destMD5Path).catch(() => {});
|
||||||
|
|
||||||
|
// Get result info
|
||||||
|
const stats = await plugins.smartfile.fs.stat(destObjectPath);
|
||||||
|
const md5 = await this.readMD5(destObjectPath, destMD5Path);
|
||||||
|
|
||||||
|
return { size: stats.size, md5 };
|
||||||
|
}
|
||||||
|
|
||||||
|
// ============================
|
||||||
|
// HELPER METHODS
|
||||||
|
// ============================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get bucket directory path
|
||||||
|
*/
|
||||||
|
private getBucketPath(bucket: string): string {
|
||||||
|
return plugins.path.join(this.rootDir, bucket);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get object file path
|
||||||
|
*/
|
||||||
|
private getObjectPath(bucket: string, key: string): string {
|
||||||
|
return plugins.path.join(
|
||||||
|
this.rootDir,
|
||||||
|
bucket,
|
||||||
|
this.encodeKey(key) + '._S3_object'
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Encode key for Windows compatibility
|
||||||
|
*/
|
||||||
|
private encodeKey(key: string): string {
|
||||||
|
if (process.platform === 'win32') {
|
||||||
|
// Replace invalid Windows filename chars with hex encoding
|
||||||
|
return key.replace(/[<>:"\\|?*]/g, (ch) =>
|
||||||
|
'&' + Buffer.from(ch, 'utf8').toString('hex')
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return key;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Decode key from filesystem path
|
||||||
|
*/
|
||||||
|
private decodeKey(encodedKey: string): string {
|
||||||
|
if (process.platform === 'win32') {
|
||||||
|
// Decode hex-encoded chars
|
||||||
|
return encodedKey.replace(/&([0-9a-f]{2})/gi, (_, hex) =>
|
||||||
|
Buffer.from(hex, 'hex').toString('utf8')
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return encodedKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Write stream to file with MD5 calculation
|
||||||
|
*/
|
||||||
|
private async writeStreamWithMD5(
|
||||||
|
input: NodeJS.ReadableStream,
|
||||||
|
destPath: string
|
||||||
|
): Promise<{ size: number; md5: string }> {
|
||||||
|
const hash = plugins.crypto.createHash('md5');
|
||||||
|
let totalSize = 0;
|
||||||
|
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const output = plugins.fs.createWriteStream(destPath);
|
||||||
|
|
||||||
|
input.on('data', (chunk: Buffer) => {
|
||||||
|
hash.update(chunk);
|
||||||
|
totalSize += chunk.length;
|
||||||
|
});
|
||||||
|
|
||||||
|
input.on('error', reject);
|
||||||
|
output.on('error', reject);
|
||||||
|
|
||||||
|
input.pipe(output).on('finish', async () => {
|
||||||
|
const md5 = hash.digest('hex');
|
||||||
|
|
||||||
|
// Save MD5 to separate file
|
||||||
|
const md5Path = `${destPath}.md5`;
|
||||||
|
await plugins.fs.promises.writeFile(md5Path, md5);
|
||||||
|
|
||||||
|
resolve({ size: totalSize, md5 });
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read MD5 hash (calculate if missing)
|
||||||
|
*/
|
||||||
|
private async readMD5(objectPath: string, md5Path: string): Promise<string> {
|
||||||
|
try {
|
||||||
|
// Try to read cached MD5
|
||||||
|
const md5 = await plugins.smartfile.fs.toStringSync(md5Path);
|
||||||
|
return md5.trim();
|
||||||
|
} catch (err) {
|
||||||
|
// Calculate MD5 if not cached
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const hash = plugins.crypto.createHash('md5');
|
||||||
|
const stream = plugins.fs.createReadStream(objectPath);
|
||||||
|
|
||||||
|
stream.on('data', (chunk: Buffer) => hash.update(chunk));
|
||||||
|
stream.on('end', async () => {
|
||||||
|
const md5 = hash.digest('hex');
|
||||||
|
// Cache it
|
||||||
|
await plugins.fs.promises.writeFile(md5Path, md5);
|
||||||
|
resolve(md5);
|
||||||
|
});
|
||||||
|
stream.on('error', reject);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Read metadata from JSON file
|
||||||
|
*/
|
||||||
|
private async readMetadata(metadataPath: string): Promise<Record<string, string>> {
|
||||||
|
try {
|
||||||
|
const content = await plugins.smartfile.fs.toStringSync(metadataPath);
|
||||||
|
return JSON.parse(content);
|
||||||
|
} catch (err) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
43
ts/classes/middleware-stack.ts
Normal file
43
ts/classes/middleware-stack.ts
Normal file
@@ -0,0 +1,43 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { S3Context } from './context.js';
|
||||||
|
|
||||||
|
export type Middleware = (
|
||||||
|
req: plugins.http.IncomingMessage,
|
||||||
|
res: plugins.http.ServerResponse,
|
||||||
|
ctx: S3Context,
|
||||||
|
next: () => Promise<void>
|
||||||
|
) => Promise<void>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Middleware stack for composing request handlers
|
||||||
|
*/
|
||||||
|
export class MiddlewareStack {
|
||||||
|
private middlewares: Middleware[] = [];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add middleware to the stack
|
||||||
|
*/
|
||||||
|
public use(middleware: Middleware): void {
|
||||||
|
this.middlewares.push(middleware);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Execute all middlewares in order
|
||||||
|
*/
|
||||||
|
public async execute(
|
||||||
|
req: plugins.http.IncomingMessage,
|
||||||
|
res: plugins.http.ServerResponse,
|
||||||
|
ctx: S3Context
|
||||||
|
): Promise<void> {
|
||||||
|
let index = 0;
|
||||||
|
|
||||||
|
const next = async (): Promise<void> => {
|
||||||
|
if (index < this.middlewares.length) {
|
||||||
|
const middleware = this.middlewares[index++];
|
||||||
|
await middleware(req, res, ctx, next);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
await next();
|
||||||
|
}
|
||||||
|
}
|
||||||
129
ts/classes/router.ts
Normal file
129
ts/classes/router.ts
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { S3Context } from './context.js';
|
||||||
|
|
||||||
|
export type RouteHandler = (
|
||||||
|
req: plugins.http.IncomingMessage,
|
||||||
|
res: plugins.http.ServerResponse,
|
||||||
|
ctx: S3Context,
|
||||||
|
params: Record<string, string>
|
||||||
|
) => Promise<void>;
|
||||||
|
|
||||||
|
export interface IRouteMatch {
|
||||||
|
handler: RouteHandler;
|
||||||
|
params: Record<string, string>;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface IRoute {
|
||||||
|
method: string;
|
||||||
|
pattern: RegExp;
|
||||||
|
paramNames: string[];
|
||||||
|
handler: RouteHandler;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Simple HTTP router with pattern matching for S3 routes
|
||||||
|
*/
|
||||||
|
export class S3Router {
|
||||||
|
private routes: IRoute[] = [];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a route with pattern matching
|
||||||
|
* Supports patterns like:
|
||||||
|
* - "/" (exact match)
|
||||||
|
* - "/:bucket" (single param)
|
||||||
|
* - "/:bucket/:key*" (param with wildcard - captures everything after)
|
||||||
|
*/
|
||||||
|
public add(method: string, pattern: string, handler: RouteHandler): void {
|
||||||
|
const { regex, paramNames } = this.convertPatternToRegex(pattern);
|
||||||
|
|
||||||
|
this.routes.push({
|
||||||
|
method: method.toUpperCase(),
|
||||||
|
pattern: regex,
|
||||||
|
paramNames,
|
||||||
|
handler,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Match a request to a route
|
||||||
|
*/
|
||||||
|
public match(method: string, pathname: string): IRouteMatch | null {
|
||||||
|
// Normalize pathname: remove trailing slash unless it's root
|
||||||
|
const normalizedPath = pathname === '/' ? pathname : pathname.replace(/\/$/, '');
|
||||||
|
|
||||||
|
for (const route of this.routes) {
|
||||||
|
if (route.method !== method.toUpperCase()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const match = normalizedPath.match(route.pattern);
|
||||||
|
if (match) {
|
||||||
|
// Extract params from captured groups
|
||||||
|
const params: Record<string, string> = {};
|
||||||
|
for (let i = 0; i < route.paramNames.length; i++) {
|
||||||
|
params[route.paramNames[i]] = decodeURIComponent(match[i + 1] || '');
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
handler: route.handler,
|
||||||
|
params,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert path pattern to RegExp
|
||||||
|
* Examples:
|
||||||
|
* - "/" → /^\/$/
|
||||||
|
* - "/:bucket" → /^\/([^/]+)$/
|
||||||
|
* - "/:bucket/:key*" → /^\/([^/]+)\/(.+)$/
|
||||||
|
*/
|
||||||
|
private convertPatternToRegex(pattern: string): { regex: RegExp; paramNames: string[] } {
|
||||||
|
const paramNames: string[] = [];
|
||||||
|
let regexStr = pattern;
|
||||||
|
|
||||||
|
// Process all params in a single pass to maintain order
|
||||||
|
regexStr = regexStr.replace(/:(\w+)(\*)?/g, (match, paramName, isWildcard) => {
|
||||||
|
paramNames.push(paramName);
|
||||||
|
// :param* captures rest of path, :param captures single segment
|
||||||
|
return isWildcard ? '(.+)' : '([^/]+)';
|
||||||
|
});
|
||||||
|
|
||||||
|
// Escape special regex characters
|
||||||
|
regexStr = regexStr.replace(/\//g, '\\/');
|
||||||
|
|
||||||
|
// Add anchors
|
||||||
|
regexStr = `^${regexStr}$`;
|
||||||
|
|
||||||
|
return {
|
||||||
|
regex: new RegExp(regexStr),
|
||||||
|
paramNames,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convenience methods for common HTTP methods
|
||||||
|
*/
|
||||||
|
public get(pattern: string, handler: RouteHandler): void {
|
||||||
|
this.add('GET', pattern, handler);
|
||||||
|
}
|
||||||
|
|
||||||
|
public put(pattern: string, handler: RouteHandler): void {
|
||||||
|
this.add('PUT', pattern, handler);
|
||||||
|
}
|
||||||
|
|
||||||
|
public post(pattern: string, handler: RouteHandler): void {
|
||||||
|
this.add('POST', pattern, handler);
|
||||||
|
}
|
||||||
|
|
||||||
|
public delete(pattern: string, handler: RouteHandler): void {
|
||||||
|
this.add('DELETE', pattern, handler);
|
||||||
|
}
|
||||||
|
|
||||||
|
public head(pattern: string, handler: RouteHandler): void {
|
||||||
|
this.add('HEAD', pattern, handler);
|
||||||
|
}
|
||||||
|
}
|
||||||
145
ts/classes/s3-error.ts
Normal file
145
ts/classes/s3-error.ts
Normal file
@@ -0,0 +1,145 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* S3 error codes mapped to HTTP status codes
|
||||||
|
*/
|
||||||
|
const S3_ERROR_CODES: Record<string, number> = {
|
||||||
|
'AccessDenied': 403,
|
||||||
|
'BadDigest': 400,
|
||||||
|
'BadRequest': 400,
|
||||||
|
'BucketAlreadyExists': 409,
|
||||||
|
'BucketAlreadyOwnedByYou': 409,
|
||||||
|
'BucketNotEmpty': 409,
|
||||||
|
'CredentialsNotSupported': 400,
|
||||||
|
'EntityTooSmall': 400,
|
||||||
|
'EntityTooLarge': 400,
|
||||||
|
'ExpiredToken': 400,
|
||||||
|
'IncompleteBody': 400,
|
||||||
|
'IncorrectNumberOfFilesInPostRequest': 400,
|
||||||
|
'InlineDataTooLarge': 400,
|
||||||
|
'InternalError': 500,
|
||||||
|
'InvalidArgument': 400,
|
||||||
|
'InvalidBucketName': 400,
|
||||||
|
'InvalidDigest': 400,
|
||||||
|
'InvalidLocationConstraint': 400,
|
||||||
|
'InvalidPart': 400,
|
||||||
|
'InvalidPartOrder': 400,
|
||||||
|
'InvalidRange': 416,
|
||||||
|
'InvalidRequest': 400,
|
||||||
|
'InvalidSecurity': 403,
|
||||||
|
'InvalidSOAPRequest': 400,
|
||||||
|
'InvalidStorageClass': 400,
|
||||||
|
'InvalidTargetBucketForLogging': 400,
|
||||||
|
'InvalidToken': 400,
|
||||||
|
'InvalidURI': 400,
|
||||||
|
'KeyTooLongError': 400,
|
||||||
|
'MalformedACLError': 400,
|
||||||
|
'MalformedPOSTRequest': 400,
|
||||||
|
'MalformedXML': 400,
|
||||||
|
'MaxMessageLengthExceeded': 400,
|
||||||
|
'MaxPostPreDataLengthExceededError': 400,
|
||||||
|
'MetadataTooLarge': 400,
|
||||||
|
'MethodNotAllowed': 405,
|
||||||
|
'MissingContentLength': 411,
|
||||||
|
'MissingRequestBodyError': 400,
|
||||||
|
'MissingSecurityElement': 400,
|
||||||
|
'MissingSecurityHeader': 400,
|
||||||
|
'NoLoggingStatusForKey': 400,
|
||||||
|
'NoSuchBucket': 404,
|
||||||
|
'NoSuchKey': 404,
|
||||||
|
'NoSuchLifecycleConfiguration': 404,
|
||||||
|
'NoSuchUpload': 404,
|
||||||
|
'NoSuchVersion': 404,
|
||||||
|
'NotImplemented': 501,
|
||||||
|
'NotSignedUp': 403,
|
||||||
|
'OperationAborted': 409,
|
||||||
|
'PermanentRedirect': 301,
|
||||||
|
'PreconditionFailed': 412,
|
||||||
|
'Redirect': 307,
|
||||||
|
'RequestIsNotMultiPartContent': 400,
|
||||||
|
'RequestTimeout': 400,
|
||||||
|
'RequestTimeTooSkewed': 403,
|
||||||
|
'RequestTorrentOfBucketError': 400,
|
||||||
|
'SignatureDoesNotMatch': 403,
|
||||||
|
'ServiceUnavailable': 503,
|
||||||
|
'SlowDown': 503,
|
||||||
|
'TemporaryRedirect': 307,
|
||||||
|
'TokenRefreshRequired': 400,
|
||||||
|
'TooManyBuckets': 400,
|
||||||
|
'UnexpectedContent': 400,
|
||||||
|
'UnresolvableGrantByEmailAddress': 400,
|
||||||
|
'UserKeyMustBeSpecified': 400,
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* S3-compatible error class that formats errors as XML responses
|
||||||
|
*/
|
||||||
|
export class S3Error extends Error {
|
||||||
|
public status: number;
|
||||||
|
public code: string;
|
||||||
|
public detail: Record<string, any>;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
code: string,
|
||||||
|
message: string,
|
||||||
|
detail: Record<string, any> = {}
|
||||||
|
) {
|
||||||
|
super(message);
|
||||||
|
this.name = 'S3Error';
|
||||||
|
this.code = code;
|
||||||
|
this.status = S3_ERROR_CODES[code] || 500;
|
||||||
|
this.detail = detail;
|
||||||
|
|
||||||
|
// Maintain proper stack trace
|
||||||
|
if (Error.captureStackTrace) {
|
||||||
|
Error.captureStackTrace(this, S3Error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert error to S3-compatible XML format
|
||||||
|
*/
|
||||||
|
public toXML(): string {
|
||||||
|
const smartXmlInstance = new plugins.SmartXml();
|
||||||
|
const errorObj: any = {
|
||||||
|
Error: {
|
||||||
|
Code: this.code,
|
||||||
|
Message: this.message,
|
||||||
|
...this.detail,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = smartXmlInstance.createXmlFromObject(errorObj);
|
||||||
|
|
||||||
|
// Ensure XML declaration
|
||||||
|
if (!xml.startsWith('<?xml')) {
|
||||||
|
return `<?xml version="1.0" encoding="UTF-8"?>\n${xml}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return xml;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create S3Error from a generic Error
|
||||||
|
*/
|
||||||
|
public static fromError(err: any): S3Error {
|
||||||
|
if (err instanceof S3Error) {
|
||||||
|
return err;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Map common errors
|
||||||
|
if (err.code === 'ENOENT') {
|
||||||
|
return new S3Error('NoSuchKey', 'The specified key does not exist.');
|
||||||
|
}
|
||||||
|
if (err.code === 'EACCES') {
|
||||||
|
return new S3Error('AccessDenied', 'Access Denied');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default to internal error
|
||||||
|
return new S3Error(
|
||||||
|
'InternalError',
|
||||||
|
'We encountered an internal error. Please try again.',
|
||||||
|
{ OriginalError: err.message }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
239
ts/classes/smarts3-server.ts
Normal file
239
ts/classes/smarts3-server.ts
Normal file
@@ -0,0 +1,239 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import { S3Router } from './router.js';
|
||||||
|
import { MiddlewareStack } from './middleware-stack.js';
|
||||||
|
import { S3Context } from './context.js';
|
||||||
|
import { FilesystemStore } from './filesystem-store.js';
|
||||||
|
import { S3Error } from './s3-error.js';
|
||||||
|
import { ServiceController } from '../controllers/service.controller.js';
|
||||||
|
import { BucketController } from '../controllers/bucket.controller.js';
|
||||||
|
import { ObjectController } from '../controllers/object.controller.js';
|
||||||
|
|
||||||
|
export interface ISmarts3ServerOptions {
|
||||||
|
port?: number;
|
||||||
|
address?: string;
|
||||||
|
directory?: string;
|
||||||
|
cleanSlate?: boolean;
|
||||||
|
silent?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Custom S3-compatible server implementation
|
||||||
|
* Built on native Node.js http module with zero framework dependencies
|
||||||
|
*/
|
||||||
|
export class Smarts3Server {
|
||||||
|
private httpServer?: plugins.http.Server;
|
||||||
|
private router: S3Router;
|
||||||
|
private middlewares: MiddlewareStack;
|
||||||
|
private store: FilesystemStore;
|
||||||
|
private options: Required<ISmarts3ServerOptions>;
|
||||||
|
|
||||||
|
constructor(options: ISmarts3ServerOptions = {}) {
|
||||||
|
this.options = {
|
||||||
|
port: 3000,
|
||||||
|
address: '0.0.0.0',
|
||||||
|
directory: plugins.path.join(process.cwd(), '.nogit/bucketsDir'),
|
||||||
|
cleanSlate: false,
|
||||||
|
silent: false,
|
||||||
|
...options,
|
||||||
|
};
|
||||||
|
|
||||||
|
this.store = new FilesystemStore(this.options.directory);
|
||||||
|
this.router = new S3Router();
|
||||||
|
this.middlewares = new MiddlewareStack();
|
||||||
|
|
||||||
|
this.setupMiddlewares();
|
||||||
|
this.setupRoutes();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup middleware stack
|
||||||
|
*/
|
||||||
|
private setupMiddlewares(): void {
|
||||||
|
// Logger middleware
|
||||||
|
if (!this.options.silent) {
|
||||||
|
this.middlewares.use(async (req, res, ctx, next) => {
|
||||||
|
const start = Date.now();
|
||||||
|
console.log(`→ ${req.method} ${req.url}`);
|
||||||
|
console.log(` Headers:`, JSON.stringify(req.headers, null, 2).slice(0, 200));
|
||||||
|
await next();
|
||||||
|
const duration = Date.now() - start;
|
||||||
|
console.log(`← ${req.method} ${req.url} - ${res.statusCode} (${duration}ms)`);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: Add authentication middleware
|
||||||
|
// TODO: Add CORS middleware
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup routes
|
||||||
|
*/
|
||||||
|
private setupRoutes(): void {
|
||||||
|
// Service level (/)
|
||||||
|
this.router.get('/', ServiceController.listBuckets);
|
||||||
|
|
||||||
|
// Bucket level (/:bucket)
|
||||||
|
this.router.put('/:bucket', BucketController.createBucket);
|
||||||
|
this.router.delete('/:bucket', BucketController.deleteBucket);
|
||||||
|
this.router.get('/:bucket', BucketController.listObjects);
|
||||||
|
this.router.head('/:bucket', BucketController.headBucket);
|
||||||
|
|
||||||
|
// Object level (/:bucket/:key*)
|
||||||
|
this.router.put('/:bucket/:key*', ObjectController.putObject);
|
||||||
|
this.router.get('/:bucket/:key*', ObjectController.getObject);
|
||||||
|
this.router.head('/:bucket/:key*', ObjectController.headObject);
|
||||||
|
this.router.delete('/:bucket/:key*', ObjectController.deleteObject);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle incoming HTTP request
|
||||||
|
*/
|
||||||
|
private async handleRequest(
|
||||||
|
req: plugins.http.IncomingMessage,
|
||||||
|
res: plugins.http.ServerResponse
|
||||||
|
): Promise<void> {
|
||||||
|
const context = new S3Context(req, res, this.store);
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Execute middleware stack
|
||||||
|
await this.middlewares.execute(req, res, context);
|
||||||
|
|
||||||
|
// Route to handler
|
||||||
|
const match = this.router.match(context.method, context.url.pathname);
|
||||||
|
|
||||||
|
if (match) {
|
||||||
|
context.params = match.params;
|
||||||
|
await match.handler(req, res, context, match.params);
|
||||||
|
} else {
|
||||||
|
context.throw('NoSuchKey', 'The specified resource does not exist');
|
||||||
|
}
|
||||||
|
} catch (err) {
|
||||||
|
await this.handleError(err, context, res);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle errors and send S3-compatible error responses
|
||||||
|
*/
|
||||||
|
private async handleError(
|
||||||
|
err: any,
|
||||||
|
context: S3Context,
|
||||||
|
res: plugins.http.ServerResponse
|
||||||
|
): Promise<void> {
|
||||||
|
const s3Error = err instanceof S3Error ? err : S3Error.fromError(err);
|
||||||
|
|
||||||
|
if (!this.options.silent) {
|
||||||
|
console.error(`[S3Error] ${s3Error.code}: ${s3Error.message}`);
|
||||||
|
if (s3Error.status >= 500) {
|
||||||
|
console.error(err.stack || err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send error response
|
||||||
|
const errorXml = s3Error.toXML();
|
||||||
|
|
||||||
|
res.writeHead(s3Error.status, {
|
||||||
|
'Content-Type': 'application/xml',
|
||||||
|
'Content-Length': Buffer.byteLength(errorXml),
|
||||||
|
});
|
||||||
|
|
||||||
|
res.end(errorXml);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Start the server
|
||||||
|
*/
|
||||||
|
public async start(): Promise<void> {
|
||||||
|
// Initialize store
|
||||||
|
await this.store.initialize();
|
||||||
|
|
||||||
|
// Clean slate if requested
|
||||||
|
if (this.options.cleanSlate) {
|
||||||
|
await this.store.reset();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create HTTP server
|
||||||
|
this.httpServer = plugins.http.createServer((req, res) => {
|
||||||
|
this.handleRequest(req, res).catch((err) => {
|
||||||
|
console.error('Fatal error in request handler:', err);
|
||||||
|
if (!res.headersSent) {
|
||||||
|
res.writeHead(500, { 'Content-Type': 'text/plain' });
|
||||||
|
res.end('Internal Server Error');
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start listening
|
||||||
|
await new Promise<void>((resolve, reject) => {
|
||||||
|
this.httpServer!.listen(this.options.port, this.options.address, (err?: Error) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
if (!this.options.silent) {
|
||||||
|
console.log(`S3 server listening on ${this.options.address}:${this.options.port}`);
|
||||||
|
}
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Stop the server
|
||||||
|
*/
|
||||||
|
public async stop(): Promise<void> {
|
||||||
|
if (!this.httpServer) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await new Promise<void>((resolve, reject) => {
|
||||||
|
this.httpServer!.close((err?: Error) => {
|
||||||
|
if (err) {
|
||||||
|
reject(err);
|
||||||
|
} else {
|
||||||
|
if (!this.options.silent) {
|
||||||
|
console.log('S3 server stopped');
|
||||||
|
}
|
||||||
|
resolve();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
this.httpServer = undefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get server port (useful for testing with random ports)
|
||||||
|
*/
|
||||||
|
public getPort(): number {
|
||||||
|
if (!this.httpServer) {
|
||||||
|
throw new Error('Server not started');
|
||||||
|
}
|
||||||
|
|
||||||
|
const address = this.httpServer.address();
|
||||||
|
if (typeof address === 'string') {
|
||||||
|
throw new Error('Unix socket not supported');
|
||||||
|
}
|
||||||
|
|
||||||
|
return address?.port || this.options.port;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get S3 descriptor for client configuration
|
||||||
|
*/
|
||||||
|
public getS3Descriptor(): {
|
||||||
|
accessKey: string;
|
||||||
|
accessSecret: string;
|
||||||
|
endpoint: string;
|
||||||
|
port: number;
|
||||||
|
useSsl: boolean;
|
||||||
|
} {
|
||||||
|
return {
|
||||||
|
accessKey: 'S3RVER',
|
||||||
|
accessSecret: 'S3RVER',
|
||||||
|
endpoint: this.options.address === '0.0.0.0' ? '127.0.0.1' : this.options.address,
|
||||||
|
port: this.getPort(),
|
||||||
|
useSsl: false,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
130
ts/controllers/bucket.controller.ts
Normal file
130
ts/controllers/bucket.controller.ts
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { S3Context } from '../classes/context.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Bucket-level operations
|
||||||
|
*/
|
||||||
|
export class BucketController {
|
||||||
|
/**
|
||||||
|
* HEAD /:bucket - Check if bucket exists
|
||||||
|
*/
|
||||||
|
public static async headBucket(
|
||||||
|
req: plugins.http.IncomingMessage,
|
||||||
|
res: plugins.http.ServerResponse,
|
||||||
|
ctx: S3Context,
|
||||||
|
params: Record<string, string>
|
||||||
|
): Promise<void> {
|
||||||
|
const { bucket } = params;
|
||||||
|
|
||||||
|
if (await ctx.store.bucketExists(bucket)) {
|
||||||
|
ctx.status(200).send('');
|
||||||
|
} else {
|
||||||
|
ctx.throw('NoSuchBucket', 'The specified bucket does not exist');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* PUT /:bucket - Create bucket
|
||||||
|
*/
|
||||||
|
public static async createBucket(
|
||||||
|
req: plugins.http.IncomingMessage,
|
||||||
|
res: plugins.http.ServerResponse,
|
||||||
|
ctx: S3Context,
|
||||||
|
params: Record<string, string>
|
||||||
|
): Promise<void> {
|
||||||
|
const { bucket } = params;
|
||||||
|
|
||||||
|
await ctx.store.createBucket(bucket);
|
||||||
|
ctx.status(200).send('');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DELETE /:bucket - Delete bucket
|
||||||
|
*/
|
||||||
|
public static async deleteBucket(
|
||||||
|
req: plugins.http.IncomingMessage,
|
||||||
|
res: plugins.http.ServerResponse,
|
||||||
|
ctx: S3Context,
|
||||||
|
params: Record<string, string>
|
||||||
|
): Promise<void> {
|
||||||
|
const { bucket } = params;
|
||||||
|
|
||||||
|
await ctx.store.deleteBucket(bucket);
|
||||||
|
ctx.status(204).send('');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /:bucket - List objects
|
||||||
|
* Supports both V1 and V2 listing (V2 uses list-type=2 query param)
|
||||||
|
*/
|
||||||
|
public static async listObjects(
|
||||||
|
req: plugins.http.IncomingMessage,
|
||||||
|
res: plugins.http.ServerResponse,
|
||||||
|
ctx: S3Context,
|
||||||
|
params: Record<string, string>
|
||||||
|
): Promise<void> {
|
||||||
|
const { bucket } = params;
|
||||||
|
const isV2 = ctx.query['list-type'] === '2';
|
||||||
|
|
||||||
|
const result = await ctx.store.listObjects(bucket, {
|
||||||
|
prefix: ctx.query.prefix,
|
||||||
|
delimiter: ctx.query.delimiter,
|
||||||
|
maxKeys: ctx.query['max-keys'] ? parseInt(ctx.query['max-keys']) : 1000,
|
||||||
|
continuationToken: ctx.query['continuation-token'],
|
||||||
|
});
|
||||||
|
|
||||||
|
if (isV2) {
|
||||||
|
// List Objects V2 response
|
||||||
|
await ctx.sendXML({
|
||||||
|
ListBucketResult: {
|
||||||
|
'@_xmlns': 'http://s3.amazonaws.com/doc/2006-03-01/',
|
||||||
|
Name: bucket,
|
||||||
|
Prefix: result.prefix || '',
|
||||||
|
MaxKeys: result.maxKeys,
|
||||||
|
KeyCount: result.contents.length,
|
||||||
|
IsTruncated: result.isTruncated,
|
||||||
|
...(result.delimiter && { Delimiter: result.delimiter }),
|
||||||
|
...(result.nextContinuationToken && {
|
||||||
|
NextContinuationToken: result.nextContinuationToken,
|
||||||
|
}),
|
||||||
|
...(result.commonPrefixes.length > 0 && {
|
||||||
|
CommonPrefixes: result.commonPrefixes.map((prefix) => ({
|
||||||
|
Prefix: prefix,
|
||||||
|
})),
|
||||||
|
}),
|
||||||
|
Contents: result.contents.map((obj) => ({
|
||||||
|
Key: obj.key,
|
||||||
|
LastModified: obj.lastModified.toISOString(),
|
||||||
|
ETag: `"${obj.md5}"`,
|
||||||
|
Size: obj.size,
|
||||||
|
StorageClass: 'STANDARD',
|
||||||
|
})),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
// List Objects V1 response
|
||||||
|
await ctx.sendXML({
|
||||||
|
ListBucketResult: {
|
||||||
|
'@_xmlns': 'http://s3.amazonaws.com/doc/2006-03-01/',
|
||||||
|
Name: bucket,
|
||||||
|
Prefix: result.prefix || '',
|
||||||
|
MaxKeys: result.maxKeys,
|
||||||
|
IsTruncated: result.isTruncated,
|
||||||
|
...(result.delimiter && { Delimiter: result.delimiter }),
|
||||||
|
...(result.commonPrefixes.length > 0 && {
|
||||||
|
CommonPrefixes: result.commonPrefixes.map((prefix) => ({
|
||||||
|
Prefix: prefix,
|
||||||
|
})),
|
||||||
|
}),
|
||||||
|
Contents: result.contents.map((obj) => ({
|
||||||
|
Key: obj.key,
|
||||||
|
LastModified: obj.lastModified.toISOString(),
|
||||||
|
ETag: `"${obj.md5}"`,
|
||||||
|
Size: obj.size,
|
||||||
|
StorageClass: 'STANDARD',
|
||||||
|
})),
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
204
ts/controllers/object.controller.ts
Normal file
204
ts/controllers/object.controller.ts
Normal file
@@ -0,0 +1,204 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { S3Context } from '../classes/context.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Object-level operations
|
||||||
|
*/
|
||||||
|
export class ObjectController {
|
||||||
|
/**
|
||||||
|
* PUT /:bucket/:key* - Upload object or copy object
|
||||||
|
*/
|
||||||
|
public static async putObject(
|
||||||
|
req: plugins.http.IncomingMessage,
|
||||||
|
res: plugins.http.ServerResponse,
|
||||||
|
ctx: S3Context,
|
||||||
|
params: Record<string, string>
|
||||||
|
): Promise<void> {
|
||||||
|
const { bucket, key } = params;
|
||||||
|
|
||||||
|
// Check if this is a COPY operation
|
||||||
|
const copySource = ctx.headers['x-amz-copy-source'] as string | undefined;
|
||||||
|
if (copySource) {
|
||||||
|
return ObjectController.copyObject(req, res, ctx, params);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract metadata from headers
|
||||||
|
const metadata: Record<string, string> = {};
|
||||||
|
for (const [header, value] of Object.entries(ctx.headers)) {
|
||||||
|
if (header.startsWith('x-amz-meta-')) {
|
||||||
|
metadata[header] = value as string;
|
||||||
|
}
|
||||||
|
if (header === 'content-type' && value) {
|
||||||
|
metadata['content-type'] = value as string;
|
||||||
|
}
|
||||||
|
if (header === 'cache-control' && value) {
|
||||||
|
metadata['cache-control'] = value as string;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// If no content-type, default to binary/octet-stream
|
||||||
|
if (!metadata['content-type']) {
|
||||||
|
metadata['content-type'] = 'binary/octet-stream';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stream upload
|
||||||
|
const result = await ctx.store.putObject(bucket, key, ctx.getRequestStream(), metadata);
|
||||||
|
|
||||||
|
ctx.setHeader('ETag', `"${result.md5}"`);
|
||||||
|
ctx.status(200).send('');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* GET /:bucket/:key* - Download object
|
||||||
|
*/
|
||||||
|
public static async getObject(
|
||||||
|
req: plugins.http.IncomingMessage,
|
||||||
|
res: plugins.http.ServerResponse,
|
||||||
|
ctx: S3Context,
|
||||||
|
params: Record<string, string>
|
||||||
|
): Promise<void> {
|
||||||
|
const { bucket, key } = params;
|
||||||
|
|
||||||
|
// Parse Range header if present
|
||||||
|
const rangeHeader = ctx.headers.range as string | undefined;
|
||||||
|
let range: { start: number; end: number } | undefined;
|
||||||
|
|
||||||
|
if (rangeHeader) {
|
||||||
|
const matches = rangeHeader.match(/bytes=(\d+)-(\d*)/);
|
||||||
|
if (matches) {
|
||||||
|
const start = parseInt(matches[1]);
|
||||||
|
const end = matches[2] ? parseInt(matches[2]) : undefined;
|
||||||
|
range = { start, end: end || start + 1024 * 1024 }; // Default to 1MB if no end
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get object
|
||||||
|
const object = await ctx.store.getObject(bucket, key, range);
|
||||||
|
|
||||||
|
// Set response headers
|
||||||
|
ctx.setHeader('ETag', `"${object.md5}"`);
|
||||||
|
ctx.setHeader('Last-Modified', object.lastModified.toUTCString());
|
||||||
|
ctx.setHeader('Content-Type', object.metadata['content-type'] || 'binary/octet-stream');
|
||||||
|
ctx.setHeader('Accept-Ranges', 'bytes');
|
||||||
|
|
||||||
|
// Handle custom metadata headers
|
||||||
|
for (const [key, value] of Object.entries(object.metadata)) {
|
||||||
|
if (key.startsWith('x-amz-meta-')) {
|
||||||
|
ctx.setHeader(key, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (range) {
|
||||||
|
ctx.status(206);
|
||||||
|
ctx.setHeader('Content-Length', (range.end - range.start + 1).toString());
|
||||||
|
ctx.setHeader('Content-Range', `bytes ${range.start}-${range.end}/${object.size}`);
|
||||||
|
} else {
|
||||||
|
ctx.status(200);
|
||||||
|
ctx.setHeader('Content-Length', object.size.toString());
|
||||||
|
}
|
||||||
|
|
||||||
|
// Stream response
|
||||||
|
await ctx.send(object.content!);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* HEAD /:bucket/:key* - Get object metadata
|
||||||
|
*/
|
||||||
|
public static async headObject(
|
||||||
|
req: plugins.http.IncomingMessage,
|
||||||
|
res: plugins.http.ServerResponse,
|
||||||
|
ctx: S3Context,
|
||||||
|
params: Record<string, string>
|
||||||
|
): Promise<void> {
|
||||||
|
const { bucket, key } = params;
|
||||||
|
|
||||||
|
// Get object (without content)
|
||||||
|
const object = await ctx.store.getObject(bucket, key);
|
||||||
|
|
||||||
|
// Set response headers (same as GET but no body)
|
||||||
|
ctx.setHeader('ETag', `"${object.md5}"`);
|
||||||
|
ctx.setHeader('Last-Modified', object.lastModified.toUTCString());
|
||||||
|
ctx.setHeader('Content-Type', object.metadata['content-type'] || 'binary/octet-stream');
|
||||||
|
ctx.setHeader('Content-Length', object.size.toString());
|
||||||
|
ctx.setHeader('Accept-Ranges', 'bytes');
|
||||||
|
|
||||||
|
// Handle custom metadata headers
|
||||||
|
for (const [key, value] of Object.entries(object.metadata)) {
|
||||||
|
if (key.startsWith('x-amz-meta-')) {
|
||||||
|
ctx.setHeader(key, value);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.status(200).send('');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* DELETE /:bucket/:key* - Delete object
|
||||||
|
*/
|
||||||
|
public static async deleteObject(
|
||||||
|
req: plugins.http.IncomingMessage,
|
||||||
|
res: plugins.http.ServerResponse,
|
||||||
|
ctx: S3Context,
|
||||||
|
params: Record<string, string>
|
||||||
|
): Promise<void> {
|
||||||
|
const { bucket, key } = params;
|
||||||
|
|
||||||
|
await ctx.store.deleteObject(bucket, key);
|
||||||
|
ctx.status(204).send('');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* COPY operation (PUT with x-amz-copy-source header)
|
||||||
|
*/
|
||||||
|
private static async copyObject(
|
||||||
|
req: plugins.http.IncomingMessage,
|
||||||
|
res: plugins.http.ServerResponse,
|
||||||
|
ctx: S3Context,
|
||||||
|
params: Record<string, string>
|
||||||
|
): Promise<void> {
|
||||||
|
const { bucket: destBucket, key: destKey } = params;
|
||||||
|
const copySource = ctx.headers['x-amz-copy-source'] as string;
|
||||||
|
|
||||||
|
// Parse source bucket and key from copy source
|
||||||
|
// Format: /bucket/key or bucket/key
|
||||||
|
const sourcePath = copySource.startsWith('/') ? copySource.slice(1) : copySource;
|
||||||
|
const firstSlash = sourcePath.indexOf('/');
|
||||||
|
const srcBucket = decodeURIComponent(sourcePath.slice(0, firstSlash));
|
||||||
|
const srcKey = decodeURIComponent(sourcePath.slice(firstSlash + 1));
|
||||||
|
|
||||||
|
// Get metadata directive (COPY or REPLACE)
|
||||||
|
const metadataDirective = (ctx.headers['x-amz-metadata-directive'] as string)?.toUpperCase() || 'COPY';
|
||||||
|
|
||||||
|
// Extract new metadata if REPLACE
|
||||||
|
let newMetadata: Record<string, string> | undefined;
|
||||||
|
if (metadataDirective === 'REPLACE') {
|
||||||
|
newMetadata = {};
|
||||||
|
for (const [header, value] of Object.entries(ctx.headers)) {
|
||||||
|
if (header.startsWith('x-amz-meta-')) {
|
||||||
|
newMetadata[header] = value as string;
|
||||||
|
}
|
||||||
|
if (header === 'content-type' && value) {
|
||||||
|
newMetadata['content-type'] = value as string;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Perform copy
|
||||||
|
const result = await ctx.store.copyObject(
|
||||||
|
srcBucket,
|
||||||
|
srcKey,
|
||||||
|
destBucket,
|
||||||
|
destKey,
|
||||||
|
metadataDirective as 'COPY' | 'REPLACE',
|
||||||
|
newMetadata
|
||||||
|
);
|
||||||
|
|
||||||
|
// Send XML response
|
||||||
|
await ctx.sendXML({
|
||||||
|
CopyObjectResult: {
|
||||||
|
LastModified: new Date().toISOString(),
|
||||||
|
ETag: `"${result.md5}"`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
35
ts/controllers/service.controller.ts
Normal file
35
ts/controllers/service.controller.ts
Normal file
@@ -0,0 +1,35 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type { S3Context } from '../classes/context.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Service-level operations (root /)
|
||||||
|
*/
|
||||||
|
export class ServiceController {
|
||||||
|
/**
|
||||||
|
* GET / - List all buckets
|
||||||
|
*/
|
||||||
|
public static async listBuckets(
|
||||||
|
req: plugins.http.IncomingMessage,
|
||||||
|
res: plugins.http.ServerResponse,
|
||||||
|
ctx: S3Context,
|
||||||
|
params: Record<string, string>
|
||||||
|
): Promise<void> {
|
||||||
|
const buckets = await ctx.store.listBuckets();
|
||||||
|
|
||||||
|
await ctx.sendXML({
|
||||||
|
ListAllMyBucketsResult: {
|
||||||
|
'@_xmlns': 'http://s3.amazonaws.com/doc/2006-03-01/',
|
||||||
|
Owner: {
|
||||||
|
ID: '123456789000',
|
||||||
|
DisplayName: 'S3rver',
|
||||||
|
},
|
||||||
|
Buckets: {
|
||||||
|
Bucket: buckets.map((bucket) => ({
|
||||||
|
Name: bucket.name,
|
||||||
|
CreationDate: bucket.creationDate.toISOString(),
|
||||||
|
})),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
75
ts/index.ts
75
ts/index.ts
@@ -1,14 +1,18 @@
|
|||||||
import * as plugins from './plugins.js';
|
import * as plugins from './plugins.js';
|
||||||
import * as paths from './paths.js';
|
import * as paths from './paths.js';
|
||||||
|
import { Smarts3Server } from './classes/smarts3-server.js';
|
||||||
|
|
||||||
export interface ISmarts3ContructorOptions {
|
export interface ISmarts3ContructorOptions {
|
||||||
port?: number;
|
port?: number;
|
||||||
cleanSlate?: boolean;
|
cleanSlate?: boolean;
|
||||||
|
useCustomServer?: boolean; // Feature flag for custom server
|
||||||
}
|
}
|
||||||
|
|
||||||
export class Smarts3 {
|
export class Smarts3 {
|
||||||
// STATIC
|
// STATIC
|
||||||
public static async createAndStart(optionsArg: ConstructorParameters<typeof Smarts3>[0]) {
|
public static async createAndStart(
|
||||||
|
optionsArg: ConstructorParameters<typeof Smarts3>[0],
|
||||||
|
) {
|
||||||
const smartS3Instance = new Smarts3(optionsArg);
|
const smartS3Instance = new Smarts3(optionsArg);
|
||||||
await smartS3Instance.start();
|
await smartS3Instance.start();
|
||||||
return smartS3Instance;
|
return smartS3Instance;
|
||||||
@@ -16,54 +20,85 @@ export class Smarts3 {
|
|||||||
|
|
||||||
// INSTANCE
|
// INSTANCE
|
||||||
public options: ISmarts3ContructorOptions;
|
public options: ISmarts3ContructorOptions;
|
||||||
public s3Instance: plugins.s3rver;
|
public s3Instance: plugins.s3rver | Smarts3Server;
|
||||||
|
|
||||||
constructor(optionsArg: ISmarts3ContructorOptions) {
|
constructor(optionsArg: ISmarts3ContructorOptions) {
|
||||||
this.options = optionsArg;
|
|
||||||
this.options = {
|
this.options = {
|
||||||
...this.options,
|
useCustomServer: true, // Default to custom server
|
||||||
...optionsArg,
|
...optionsArg,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
public async start() {
|
public async start() {
|
||||||
if (this.options.cleanSlate) {
|
if (this.options.useCustomServer) {
|
||||||
await plugins.smartfile.fs.ensureEmptyDir(paths.bucketsDir);
|
// Use new custom server
|
||||||
|
this.s3Instance = new Smarts3Server({
|
||||||
|
port: this.options.port || 3000,
|
||||||
|
address: '0.0.0.0',
|
||||||
|
directory: paths.bucketsDir,
|
||||||
|
cleanSlate: this.options.cleanSlate || false,
|
||||||
|
silent: false,
|
||||||
|
});
|
||||||
|
await this.s3Instance.start();
|
||||||
|
console.log('s3 server is running (custom implementation)');
|
||||||
} else {
|
} else {
|
||||||
await plugins.smartfile.fs.ensureDir(paths.bucketsDir);
|
// Use legacy s3rver
|
||||||
|
if (this.options.cleanSlate) {
|
||||||
|
await plugins.smartfile.fs.ensureEmptyDir(paths.bucketsDir);
|
||||||
|
} else {
|
||||||
|
await plugins.smartfile.fs.ensureDir(paths.bucketsDir);
|
||||||
|
}
|
||||||
|
this.s3Instance = new plugins.s3rver({
|
||||||
|
port: this.options.port || 3000,
|
||||||
|
address: '0.0.0.0',
|
||||||
|
silent: false,
|
||||||
|
directory: paths.bucketsDir,
|
||||||
|
});
|
||||||
|
await (this.s3Instance as plugins.s3rver).run();
|
||||||
|
console.log('s3 server is running (legacy s3rver)');
|
||||||
}
|
}
|
||||||
this.s3Instance = new plugins.s3rver({
|
|
||||||
port: this.options.port || 3000,
|
|
||||||
address: '0.0.0.0',
|
|
||||||
silent: false,
|
|
||||||
directory: paths.bucketsDir,
|
|
||||||
});
|
|
||||||
await this.s3Instance.run();
|
|
||||||
console.log('s3 server is running');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public async getS3Descriptor(
|
public async getS3Descriptor(
|
||||||
optionsArg?: Partial<plugins.tsclass.storage.IS3Descriptor>
|
optionsArg?: Partial<plugins.tsclass.storage.IS3Descriptor>,
|
||||||
): Promise<plugins.tsclass.storage.IS3Descriptor> {
|
): Promise<plugins.tsclass.storage.IS3Descriptor> {
|
||||||
|
if (this.options.useCustomServer && this.s3Instance instanceof Smarts3Server) {
|
||||||
|
const descriptor = this.s3Instance.getS3Descriptor();
|
||||||
|
return {
|
||||||
|
...descriptor,
|
||||||
|
...(optionsArg ? optionsArg : {}),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Legacy s3rver descriptor
|
||||||
return {
|
return {
|
||||||
...{
|
...{
|
||||||
accessKey: 'S3RVER',
|
accessKey: 'S3RVER',
|
||||||
accessSecret: 'S3RVER',
|
accessSecret: 'S3RVER',
|
||||||
endpoint: '127.0.0.1',
|
endpoint: '127.0.0.1',
|
||||||
port: this.options.port,
|
port: this.options.port || 3000,
|
||||||
useSsl: false,
|
useSsl: false,
|
||||||
},
|
},
|
||||||
...(optionsArg? optionsArg : {})
|
...(optionsArg ? optionsArg : {}),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
public async createBucket(bucketNameArg: string) {
|
public async createBucket(bucketNameArg: string) {
|
||||||
const smartbucketInstance = new plugins.smartbucket.SmartBucket(await this.getS3Descriptor());
|
const smartbucketInstance = new plugins.smartbucket.SmartBucket(
|
||||||
|
await this.getS3Descriptor(),
|
||||||
|
);
|
||||||
const bucket = await smartbucketInstance.createBucket(bucketNameArg);
|
const bucket = await smartbucketInstance.createBucket(bucketNameArg);
|
||||||
return bucket;
|
return bucket;
|
||||||
}
|
}
|
||||||
|
|
||||||
public async stop() {
|
public async stop() {
|
||||||
await this.s3Instance.close();
|
if (this.s3Instance instanceof Smarts3Server) {
|
||||||
|
await this.s3Instance.stop();
|
||||||
|
} else {
|
||||||
|
await (this.s3Instance as plugins.s3rver).close();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Export the custom server class for direct use
|
||||||
|
export { Smarts3Server } from './classes/smarts3-server.js';
|
||||||
|
|||||||
@@ -1,14 +1,19 @@
|
|||||||
// node native
|
// node native
|
||||||
import * as path from 'path';
|
import * as path from 'path';
|
||||||
|
import * as http from 'http';
|
||||||
|
import * as crypto from 'crypto';
|
||||||
|
import * as url from 'url';
|
||||||
|
import * as fs from 'fs';
|
||||||
|
|
||||||
export { path };
|
export { path, http, crypto, url, fs };
|
||||||
|
|
||||||
// @push.rocks scope
|
// @push.rocks scope
|
||||||
import * as smartbucket from '@push.rocks/smartbucket';
|
import * as smartbucket from '@push.rocks/smartbucket';
|
||||||
import * as smartfile from '@push.rocks/smartfile';
|
import * as smartfile from '@push.rocks/smartfile';
|
||||||
import * as smartpath from '@push.rocks/smartpath';
|
import * as smartpath from '@push.rocks/smartpath';
|
||||||
|
import { SmartXml } from '@push.rocks/smartxml';
|
||||||
|
|
||||||
export { smartbucket, smartfile, smartpath };
|
export { smartbucket, smartfile, smartpath, SmartXml };
|
||||||
|
|
||||||
// @tsclass scope
|
// @tsclass scope
|
||||||
import * as tsclass from '@tsclass/tsclass';
|
import * as tsclass from '@tsclass/tsclass';
|
||||||
|
|||||||
39
ts/utils/xml.utils.ts
Normal file
39
ts/utils/xml.utils.ts
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
|
||||||
|
// Create a singleton instance of SmartXml
|
||||||
|
const smartXmlInstance = new plugins.SmartXml();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse XML string to JavaScript object
|
||||||
|
*/
|
||||||
|
export function parseXml(xmlString: string): any {
|
||||||
|
return smartXmlInstance.parseXmlToObject(xmlString);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert JavaScript object to XML string with XML declaration
|
||||||
|
*/
|
||||||
|
export function createXml(obj: any, options: { format?: boolean } = {}): string {
|
||||||
|
const xml = smartXmlInstance.createXmlFromObject(obj);
|
||||||
|
|
||||||
|
// Ensure XML declaration is present
|
||||||
|
if (!xml.startsWith('<?xml')) {
|
||||||
|
return `<?xml version="1.0" encoding="UTF-8"?>\n${xml}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
return xml;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to create S3-compatible XML responses with proper namespace
|
||||||
|
*/
|
||||||
|
export function createS3Xml(rootElement: string, content: any, namespace = 'http://s3.amazonaws.com/doc/2006-03-01/'): string {
|
||||||
|
const obj: any = {
|
||||||
|
[rootElement]: {
|
||||||
|
'@_xmlns': namespace,
|
||||||
|
...content,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
return createXml(obj, { format: true });
|
||||||
|
}
|
||||||
@@ -1,7 +1,5 @@
|
|||||||
{
|
{
|
||||||
"compilerOptions": {
|
"compilerOptions": {
|
||||||
"experimentalDecorators": true,
|
|
||||||
"useDefineForClassFields": false,
|
|
||||||
"target": "ES2022",
|
"target": "ES2022",
|
||||||
"module": "NodeNext",
|
"module": "NodeNext",
|
||||||
"moduleResolution": "NodeNext",
|
"moduleResolution": "NodeNext",
|
||||||
@@ -10,7 +8,5 @@
|
|||||||
"baseUrl": ".",
|
"baseUrl": ".",
|
||||||
"paths": {}
|
"paths": {}
|
||||||
},
|
},
|
||||||
"exclude": [
|
"exclude": ["dist_*/**/*.d.ts"]
|
||||||
"dist_*/**/*.d.ts"
|
|
||||||
]
|
|
||||||
}
|
}
|
||||||
Reference in New Issue
Block a user