Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| c074a5d2ed | |||
| a9ba9de6be | |||
| 263e7a58b9 | |||
| 74b81d7ba8 | |||
| 0d4837184f | |||
| 7f3de92961 |
25
changelog.md
25
changelog.md
@@ -1,5 +1,30 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 2025-11-23 - 4.0.0 - BREAKING CHANGE(Smarts3)
|
||||||
|
Migrate Smarts3 configuration to nested server/storage objects and remove legacy flat config support
|
||||||
|
|
||||||
|
- Smarts3.createAndStart() and Smarts3 constructor now accept ISmarts3Config with nested `server` and `storage` objects.
|
||||||
|
- Removed support for the legacy flat config shape (top-level `port` and `cleanSlate`) / ILegacySmarts3Config.
|
||||||
|
- Updated tests to use new config shape (server:{ port, silent } and storage:{ cleanSlate }).
|
||||||
|
- mergeConfig and Smarts3Server now rely on the nested config shape; consumers must update their initialization code.
|
||||||
|
|
||||||
|
## 2025-11-23 - 3.2.0 - feat(multipart)
|
||||||
|
Add multipart upload support with MultipartUploadManager and controller integration
|
||||||
|
|
||||||
|
- Introduce MultipartUploadManager (ts/classes/multipart-manager.ts) to manage multipart upload lifecycle and store parts on disk
|
||||||
|
- Wire multipart manager into server and request context (S3Context, Smarts3Server) and initialize multipart storage on server start
|
||||||
|
- Add multipart-related routes and handlers in ObjectController: initiate (POST ?uploads), upload part (PUT ?partNumber&uploadId), complete (POST ?uploadId), and abort (DELETE ?uploadId)
|
||||||
|
- On complete, combine parts into final object and store via existing FilesystemStore workflow
|
||||||
|
- Expose multipart manager on Smarts3Server for controller access
|
||||||
|
|
||||||
|
## 2025-11-23 - 3.1.0 - feat(logging)
|
||||||
|
Add structured Logger and integrate into Smarts3Server; pass full config to server
|
||||||
|
|
||||||
|
- Introduce a new Logger class (ts/classes/logger.ts) providing leveled logging (error, warn, info, debug), text/json formats and an enable flag.
|
||||||
|
- Integrate Logger into Smarts3Server: use structured logging for server lifecycle events, HTTP request/response logging and S3 errors instead of direct console usage.
|
||||||
|
- Smarts3 now passes the full merged configuration into Smarts3Server (config.logging can control logging behavior).
|
||||||
|
- Server start/stop messages and internal request/error logs are emitted via the Logger and respect the configured logging level/format and silent option.
|
||||||
|
|
||||||
## 2025-11-23 - 3.0.4 - fix(smarts3)
|
## 2025-11-23 - 3.0.4 - fix(smarts3)
|
||||||
Use filesystem store for bucket creation and remove smartbucket runtime dependency
|
Use filesystem store for bucket creation and remove smartbucket runtime dependency
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@push.rocks/smarts3",
|
"name": "@push.rocks/smarts3",
|
||||||
"version": "3.0.4",
|
"version": "4.0.0",
|
||||||
"private": false,
|
"private": false,
|
||||||
"description": "A Node.js TypeScript package to create a local S3 endpoint for simulating AWS S3 operations using mapped local directories for development and testing purposes.",
|
"description": "A Node.js TypeScript package to create a local S3 endpoint for simulating AWS S3 operations using mapped local directories for development and testing purposes.",
|
||||||
"main": "dist_ts/index.js",
|
"main": "dist_ts/index.js",
|
||||||
|
|||||||
@@ -18,9 +18,13 @@ async function streamToString(stream: Readable): Promise<string> {
|
|||||||
|
|
||||||
tap.test('should start the S3 server and configure client', async () => {
|
tap.test('should start the S3 server and configure client', async () => {
|
||||||
testSmarts3Instance = await smarts3.Smarts3.createAndStart({
|
testSmarts3Instance = await smarts3.Smarts3.createAndStart({
|
||||||
port: 3337,
|
server: {
|
||||||
cleanSlate: true,
|
port: 3337,
|
||||||
silent: true,
|
silent: true,
|
||||||
|
},
|
||||||
|
storage: {
|
||||||
|
cleanSlate: true,
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const descriptor = await testSmarts3Instance.getS3Descriptor();
|
const descriptor = await testSmarts3Instance.getS3Descriptor();
|
||||||
|
|||||||
@@ -7,8 +7,12 @@ let testSmarts3Instance: smarts3.Smarts3;
|
|||||||
|
|
||||||
tap.test('should create a smarts3 instance and run it', async (toolsArg) => {
|
tap.test('should create a smarts3 instance and run it', async (toolsArg) => {
|
||||||
testSmarts3Instance = await smarts3.Smarts3.createAndStart({
|
testSmarts3Instance = await smarts3.Smarts3.createAndStart({
|
||||||
port: 3333,
|
server: {
|
||||||
cleanSlate: true,
|
port: 3333,
|
||||||
|
},
|
||||||
|
storage: {
|
||||||
|
cleanSlate: true,
|
||||||
|
},
|
||||||
});
|
});
|
||||||
console.log(`Let the instance run for 2 seconds`);
|
console.log(`Let the instance run for 2 seconds`);
|
||||||
await toolsArg.delayFor(2000);
|
await toolsArg.delayFor(2000);
|
||||||
|
|||||||
@@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@push.rocks/smarts3',
|
name: '@push.rocks/smarts3',
|
||||||
version: '3.0.4',
|
version: '4.0.0',
|
||||||
description: 'A Node.js TypeScript package to create a local S3 endpoint for simulating AWS S3 operations using mapped local directories for development and testing purposes.'
|
description: 'A Node.js TypeScript package to create a local S3 endpoint for simulating AWS S3 operations using mapped local directories for development and testing purposes.'
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,6 +2,7 @@ import * as plugins from '../plugins.js';
|
|||||||
import { S3Error } from './s3-error.js';
|
import { S3Error } from './s3-error.js';
|
||||||
import { createXml } from '../utils/xml.utils.js';
|
import { createXml } from '../utils/xml.utils.js';
|
||||||
import type { FilesystemStore } from './filesystem-store.js';
|
import type { FilesystemStore } from './filesystem-store.js';
|
||||||
|
import type { MultipartUploadManager } from './multipart-manager.js';
|
||||||
import type { Readable } from 'stream';
|
import type { Readable } from 'stream';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -14,6 +15,7 @@ export class S3Context {
|
|||||||
public params: Record<string, string> = {};
|
public params: Record<string, string> = {};
|
||||||
public query: Record<string, string> = {};
|
public query: Record<string, string> = {};
|
||||||
public store: FilesystemStore;
|
public store: FilesystemStore;
|
||||||
|
public multipart: MultipartUploadManager;
|
||||||
|
|
||||||
private req: plugins.http.IncomingMessage;
|
private req: plugins.http.IncomingMessage;
|
||||||
private res: plugins.http.ServerResponse;
|
private res: plugins.http.ServerResponse;
|
||||||
@@ -23,11 +25,13 @@ export class S3Context {
|
|||||||
constructor(
|
constructor(
|
||||||
req: plugins.http.IncomingMessage,
|
req: plugins.http.IncomingMessage,
|
||||||
res: plugins.http.ServerResponse,
|
res: plugins.http.ServerResponse,
|
||||||
store: FilesystemStore
|
store: FilesystemStore,
|
||||||
|
multipart: MultipartUploadManager
|
||||||
) {
|
) {
|
||||||
this.req = req;
|
this.req = req;
|
||||||
this.res = res;
|
this.res = res;
|
||||||
this.store = store;
|
this.store = store;
|
||||||
|
this.multipart = multipart;
|
||||||
this.method = req.method || 'GET';
|
this.method = req.method || 'GET';
|
||||||
this.headers = req.headers;
|
this.headers = req.headers;
|
||||||
|
|
||||||
|
|||||||
130
ts/classes/logger.ts
Normal file
130
ts/classes/logger.ts
Normal file
@@ -0,0 +1,130 @@
|
|||||||
|
import type { ILoggingConfig } from '../index.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log levels in order of severity
|
||||||
|
*/
|
||||||
|
const LOG_LEVELS = {
|
||||||
|
error: 0,
|
||||||
|
warn: 1,
|
||||||
|
info: 2,
|
||||||
|
debug: 3,
|
||||||
|
} as const;
|
||||||
|
|
||||||
|
type LogLevel = keyof typeof LOG_LEVELS;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Structured logger with configurable levels and formats
|
||||||
|
*/
|
||||||
|
export class Logger {
|
||||||
|
private config: Required<ILoggingConfig>;
|
||||||
|
private minLevel: number;
|
||||||
|
|
||||||
|
constructor(config: ILoggingConfig) {
|
||||||
|
// Apply defaults for any missing config
|
||||||
|
this.config = {
|
||||||
|
level: config.level ?? 'info',
|
||||||
|
format: config.format ?? 'text',
|
||||||
|
enabled: config.enabled ?? true,
|
||||||
|
};
|
||||||
|
this.minLevel = LOG_LEVELS[this.config.level];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a log level should be output
|
||||||
|
*/
|
||||||
|
private shouldLog(level: LogLevel): boolean {
|
||||||
|
if (!this.config.enabled) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return LOG_LEVELS[level] <= this.minLevel;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format a log message
|
||||||
|
*/
|
||||||
|
private format(level: LogLevel, message: string, meta?: Record<string, any>): string {
|
||||||
|
const timestamp = new Date().toISOString();
|
||||||
|
|
||||||
|
if (this.config.format === 'json') {
|
||||||
|
return JSON.stringify({
|
||||||
|
timestamp,
|
||||||
|
level,
|
||||||
|
message,
|
||||||
|
...(meta || {}),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Text format
|
||||||
|
const metaStr = meta ? ` ${JSON.stringify(meta)}` : '';
|
||||||
|
return `[${timestamp}] ${level.toUpperCase()}: ${message}${metaStr}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log at error level
|
||||||
|
*/
|
||||||
|
public error(message: string, meta?: Record<string, any>): void {
|
||||||
|
if (this.shouldLog('error')) {
|
||||||
|
console.error(this.format('error', message, meta));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log at warn level
|
||||||
|
*/
|
||||||
|
public warn(message: string, meta?: Record<string, any>): void {
|
||||||
|
if (this.shouldLog('warn')) {
|
||||||
|
console.warn(this.format('warn', message, meta));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log at info level
|
||||||
|
*/
|
||||||
|
public info(message: string, meta?: Record<string, any>): void {
|
||||||
|
if (this.shouldLog('info')) {
|
||||||
|
console.log(this.format('info', message, meta));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log at debug level
|
||||||
|
*/
|
||||||
|
public debug(message: string, meta?: Record<string, any>): void {
|
||||||
|
if (this.shouldLog('debug')) {
|
||||||
|
console.log(this.format('debug', message, meta));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log HTTP request
|
||||||
|
*/
|
||||||
|
public request(method: string, url: string, meta?: Record<string, any>): void {
|
||||||
|
this.info(`→ ${method} ${url}`, meta);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log HTTP response
|
||||||
|
*/
|
||||||
|
public response(method: string, url: string, statusCode: number, duration: number): void {
|
||||||
|
const level: LogLevel = statusCode >= 500 ? 'error' : statusCode >= 400 ? 'warn' : 'info';
|
||||||
|
|
||||||
|
if (this.shouldLog(level)) {
|
||||||
|
const message = `← ${method} ${url} - ${statusCode} (${duration}ms)`;
|
||||||
|
|
||||||
|
if (level === 'error') {
|
||||||
|
this.error(message, { statusCode, duration });
|
||||||
|
} else if (level === 'warn') {
|
||||||
|
this.warn(message, { statusCode, duration });
|
||||||
|
} else {
|
||||||
|
this.info(message, { statusCode, duration });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Log S3 error
|
||||||
|
*/
|
||||||
|
public s3Error(code: string, message: string, status: number): void {
|
||||||
|
this.error(`[S3Error] ${code}: ${message}`, { code, status });
|
||||||
|
}
|
||||||
|
}
|
||||||
238
ts/classes/multipart-manager.ts
Normal file
238
ts/classes/multipart-manager.ts
Normal file
@@ -0,0 +1,238 @@
|
|||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import { Readable } from 'stream';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Multipart upload metadata
|
||||||
|
*/
|
||||||
|
export interface IMultipartUpload {
|
||||||
|
uploadId: string;
|
||||||
|
bucket: string;
|
||||||
|
key: string;
|
||||||
|
initiated: Date;
|
||||||
|
parts: Map<number, IPartInfo>;
|
||||||
|
metadata: Record<string, string>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Part information
|
||||||
|
*/
|
||||||
|
export interface IPartInfo {
|
||||||
|
partNumber: number;
|
||||||
|
etag: string;
|
||||||
|
size: number;
|
||||||
|
lastModified: Date;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Manages multipart upload state and storage
|
||||||
|
*/
|
||||||
|
export class MultipartUploadManager {
|
||||||
|
private uploads: Map<string, IMultipartUpload> = new Map();
|
||||||
|
private uploadDir: string;
|
||||||
|
|
||||||
|
constructor(private rootDir: string) {
|
||||||
|
this.uploadDir = plugins.path.join(rootDir, '.multipart');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize multipart uploads directory
|
||||||
|
*/
|
||||||
|
public async initialize(): Promise<void> {
|
||||||
|
await plugins.smartfs.directory(this.uploadDir).recursive().create();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate a unique upload ID
|
||||||
|
*/
|
||||||
|
private generateUploadId(): string {
|
||||||
|
return plugins.crypto.randomBytes(16).toString('hex');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initiate a new multipart upload
|
||||||
|
*/
|
||||||
|
public async initiateUpload(
|
||||||
|
bucket: string,
|
||||||
|
key: string,
|
||||||
|
metadata: Record<string, string>
|
||||||
|
): Promise<string> {
|
||||||
|
const uploadId = this.generateUploadId();
|
||||||
|
|
||||||
|
this.uploads.set(uploadId, {
|
||||||
|
uploadId,
|
||||||
|
bucket,
|
||||||
|
key,
|
||||||
|
initiated: new Date(),
|
||||||
|
parts: new Map(),
|
||||||
|
metadata,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Create directory for this upload's parts
|
||||||
|
const uploadPath = plugins.path.join(this.uploadDir, uploadId);
|
||||||
|
await plugins.smartfs.directory(uploadPath).recursive().create();
|
||||||
|
|
||||||
|
return uploadId;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Upload a part
|
||||||
|
*/
|
||||||
|
public async uploadPart(
|
||||||
|
uploadId: string,
|
||||||
|
partNumber: number,
|
||||||
|
stream: Readable
|
||||||
|
): Promise<IPartInfo> {
|
||||||
|
const upload = this.uploads.get(uploadId);
|
||||||
|
if (!upload) {
|
||||||
|
throw new Error('No such upload');
|
||||||
|
}
|
||||||
|
|
||||||
|
const partPath = plugins.path.join(this.uploadDir, uploadId, `part-${partNumber}`);
|
||||||
|
|
||||||
|
// Write part to disk
|
||||||
|
const webWriteStream = await plugins.smartfs.file(partPath).writeStream();
|
||||||
|
const writer = webWriteStream.getWriter();
|
||||||
|
|
||||||
|
let size = 0;
|
||||||
|
const hash = plugins.crypto.createHash('md5');
|
||||||
|
|
||||||
|
for await (const chunk of stream) {
|
||||||
|
const buffer = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk);
|
||||||
|
await writer.write(new Uint8Array(buffer));
|
||||||
|
hash.update(buffer);
|
||||||
|
size += buffer.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
await writer.close();
|
||||||
|
|
||||||
|
const etag = hash.digest('hex');
|
||||||
|
|
||||||
|
const partInfo: IPartInfo = {
|
||||||
|
partNumber,
|
||||||
|
etag,
|
||||||
|
size,
|
||||||
|
lastModified: new Date(),
|
||||||
|
};
|
||||||
|
|
||||||
|
upload.parts.set(partNumber, partInfo);
|
||||||
|
|
||||||
|
return partInfo;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Complete multipart upload - combine all parts
|
||||||
|
*/
|
||||||
|
public async completeUpload(
|
||||||
|
uploadId: string,
|
||||||
|
parts: Array<{ PartNumber: number; ETag: string }>
|
||||||
|
): Promise<{ etag: string; size: number }> {
|
||||||
|
const upload = this.uploads.get(uploadId);
|
||||||
|
if (!upload) {
|
||||||
|
throw new Error('No such upload');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify all parts are uploaded
|
||||||
|
for (const part of parts) {
|
||||||
|
const uploadedPart = upload.parts.get(part.PartNumber);
|
||||||
|
if (!uploadedPart) {
|
||||||
|
throw new Error(`Part ${part.PartNumber} not uploaded`);
|
||||||
|
}
|
||||||
|
// Normalize ETag format (remove quotes if present)
|
||||||
|
const normalizedETag = part.ETag.replace(/"/g, '');
|
||||||
|
if (uploadedPart.etag !== normalizedETag) {
|
||||||
|
throw new Error(`Part ${part.PartNumber} ETag mismatch`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sort parts by part number
|
||||||
|
const sortedParts = parts.sort((a, b) => a.PartNumber - b.PartNumber);
|
||||||
|
|
||||||
|
// Combine parts into final object
|
||||||
|
const finalPath = plugins.path.join(this.uploadDir, uploadId, 'final');
|
||||||
|
const webWriteStream = await plugins.smartfs.file(finalPath).writeStream();
|
||||||
|
const writer = webWriteStream.getWriter();
|
||||||
|
|
||||||
|
const hash = plugins.crypto.createHash('md5');
|
||||||
|
let totalSize = 0;
|
||||||
|
|
||||||
|
for (const part of sortedParts) {
|
||||||
|
const partPath = plugins.path.join(this.uploadDir, uploadId, `part-${part.PartNumber}`);
|
||||||
|
|
||||||
|
// Read part and write to final file
|
||||||
|
const partContent = await plugins.smartfs.file(partPath).read();
|
||||||
|
const buffer = Buffer.isBuffer(partContent) ? partContent : Buffer.from(partContent as string);
|
||||||
|
|
||||||
|
await writer.write(new Uint8Array(buffer));
|
||||||
|
hash.update(buffer);
|
||||||
|
totalSize += buffer.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
await writer.close();
|
||||||
|
|
||||||
|
const etag = hash.digest('hex');
|
||||||
|
|
||||||
|
return { etag, size: totalSize };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get the final combined file path
|
||||||
|
*/
|
||||||
|
public getFinalPath(uploadId: string): string {
|
||||||
|
return plugins.path.join(this.uploadDir, uploadId, 'final');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get upload metadata
|
||||||
|
*/
|
||||||
|
public getUpload(uploadId: string): IMultipartUpload | undefined {
|
||||||
|
return this.uploads.get(uploadId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Abort multipart upload - clean up parts
|
||||||
|
*/
|
||||||
|
public async abortUpload(uploadId: string): Promise<void> {
|
||||||
|
const upload = this.uploads.get(uploadId);
|
||||||
|
if (!upload) {
|
||||||
|
throw new Error('No such upload');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete upload directory
|
||||||
|
const uploadPath = plugins.path.join(this.uploadDir, uploadId);
|
||||||
|
await plugins.smartfs.directory(uploadPath).recursive().delete();
|
||||||
|
|
||||||
|
// Remove from memory
|
||||||
|
this.uploads.delete(uploadId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Clean up upload after completion
|
||||||
|
*/
|
||||||
|
public async cleanupUpload(uploadId: string): Promise<void> {
|
||||||
|
const uploadPath = plugins.path.join(this.uploadDir, uploadId);
|
||||||
|
await plugins.smartfs.directory(uploadPath).recursive().delete();
|
||||||
|
this.uploads.delete(uploadId);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List all in-progress uploads for a bucket
|
||||||
|
*/
|
||||||
|
public listUploads(bucket?: string): IMultipartUpload[] {
|
||||||
|
const uploads = Array.from(this.uploads.values());
|
||||||
|
if (bucket) {
|
||||||
|
return uploads.filter((u) => u.bucket === bucket);
|
||||||
|
}
|
||||||
|
return uploads;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List parts for an upload
|
||||||
|
*/
|
||||||
|
public listParts(uploadId: string): IPartInfo[] {
|
||||||
|
const upload = this.uploads.get(uploadId);
|
||||||
|
if (!upload) {
|
||||||
|
throw new Error('No such upload');
|
||||||
|
}
|
||||||
|
return Array.from(upload.parts.values()).sort((a, b) => a.partNumber - b.partNumber);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -4,9 +4,12 @@ import { MiddlewareStack } from './middleware-stack.js';
|
|||||||
import { S3Context } from './context.js';
|
import { S3Context } from './context.js';
|
||||||
import { FilesystemStore } from './filesystem-store.js';
|
import { FilesystemStore } from './filesystem-store.js';
|
||||||
import { S3Error } from './s3-error.js';
|
import { S3Error } from './s3-error.js';
|
||||||
|
import { Logger } from './logger.js';
|
||||||
|
import { MultipartUploadManager } from './multipart-manager.js';
|
||||||
import { ServiceController } from '../controllers/service.controller.js';
|
import { ServiceController } from '../controllers/service.controller.js';
|
||||||
import { BucketController } from '../controllers/bucket.controller.js';
|
import { BucketController } from '../controllers/bucket.controller.js';
|
||||||
import { ObjectController } from '../controllers/object.controller.js';
|
import { ObjectController } from '../controllers/object.controller.js';
|
||||||
|
import type { ISmarts3Config } from '../index.js';
|
||||||
|
|
||||||
export interface ISmarts3ServerOptions {
|
export interface ISmarts3ServerOptions {
|
||||||
port?: number;
|
port?: number;
|
||||||
@@ -14,6 +17,7 @@ export interface ISmarts3ServerOptions {
|
|||||||
directory?: string;
|
directory?: string;
|
||||||
cleanSlate?: boolean;
|
cleanSlate?: boolean;
|
||||||
silent?: boolean;
|
silent?: boolean;
|
||||||
|
config?: Required<ISmarts3Config>;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -25,19 +29,60 @@ export class Smarts3Server {
|
|||||||
private router: S3Router;
|
private router: S3Router;
|
||||||
private middlewares: MiddlewareStack;
|
private middlewares: MiddlewareStack;
|
||||||
public store: FilesystemStore; // Made public for direct access from Smarts3 class
|
public store: FilesystemStore; // Made public for direct access from Smarts3 class
|
||||||
private options: Required<ISmarts3ServerOptions>;
|
public multipart: MultipartUploadManager; // Made public for controller access
|
||||||
|
private options: Required<Omit<ISmarts3ServerOptions, 'config'>>;
|
||||||
|
private config: Required<ISmarts3Config>;
|
||||||
|
private logger: Logger;
|
||||||
|
|
||||||
constructor(options: ISmarts3ServerOptions = {}) {
|
constructor(options: ISmarts3ServerOptions = {}) {
|
||||||
this.options = {
|
this.options = {
|
||||||
port: 3000,
|
port: options.port ?? 3000,
|
||||||
address: '0.0.0.0',
|
address: options.address ?? '0.0.0.0',
|
||||||
directory: plugins.path.join(process.cwd(), '.nogit/bucketsDir'),
|
directory: options.directory ?? plugins.path.join(process.cwd(), '.nogit/bucketsDir'),
|
||||||
cleanSlate: false,
|
cleanSlate: options.cleanSlate ?? false,
|
||||||
silent: false,
|
silent: options.silent ?? false,
|
||||||
...options,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Store config for middleware and feature configuration
|
||||||
|
// If no config provided, create minimal default (for backward compatibility)
|
||||||
|
this.config = options.config ?? {
|
||||||
|
server: {
|
||||||
|
port: this.options.port,
|
||||||
|
address: this.options.address,
|
||||||
|
silent: this.options.silent,
|
||||||
|
},
|
||||||
|
storage: {
|
||||||
|
directory: this.options.directory,
|
||||||
|
cleanSlate: this.options.cleanSlate,
|
||||||
|
},
|
||||||
|
auth: {
|
||||||
|
enabled: false,
|
||||||
|
credentials: [{ accessKeyId: 'S3RVER', secretAccessKey: 'S3RVER' }],
|
||||||
|
},
|
||||||
|
cors: {
|
||||||
|
enabled: false,
|
||||||
|
allowedOrigins: ['*'],
|
||||||
|
allowedMethods: ['GET', 'POST', 'PUT', 'DELETE', 'HEAD', 'OPTIONS'],
|
||||||
|
allowedHeaders: ['*'],
|
||||||
|
exposedHeaders: ['ETag', 'x-amz-request-id', 'x-amz-version-id'],
|
||||||
|
maxAge: 86400,
|
||||||
|
allowCredentials: false,
|
||||||
|
},
|
||||||
|
logging: {
|
||||||
|
level: 'info',
|
||||||
|
format: 'text',
|
||||||
|
enabled: true,
|
||||||
|
},
|
||||||
|
limits: {
|
||||||
|
maxObjectSize: 5 * 1024 * 1024 * 1024,
|
||||||
|
maxMetadataSize: 2048,
|
||||||
|
requestTimeout: 300000,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
this.logger = new Logger(this.config.logging);
|
||||||
this.store = new FilesystemStore(this.options.directory);
|
this.store = new FilesystemStore(this.options.directory);
|
||||||
|
this.multipart = new MultipartUploadManager(this.options.directory);
|
||||||
this.router = new S3Router();
|
this.router = new S3Router();
|
||||||
this.middlewares = new MiddlewareStack();
|
this.middlewares = new MiddlewareStack();
|
||||||
|
|
||||||
@@ -49,20 +94,118 @@ export class Smarts3Server {
|
|||||||
* Setup middleware stack
|
* Setup middleware stack
|
||||||
*/
|
*/
|
||||||
private setupMiddlewares(): void {
|
private setupMiddlewares(): void {
|
||||||
// Logger middleware
|
// CORS middleware (must be first to handle preflight requests)
|
||||||
if (!this.options.silent) {
|
if (this.config.cors.enabled) {
|
||||||
this.middlewares.use(async (req, res, ctx, next) => {
|
this.middlewares.use(async (req, res, ctx, next) => {
|
||||||
const start = Date.now();
|
const origin = req.headers.origin || req.headers.referer;
|
||||||
console.log(`→ ${req.method} ${req.url}`);
|
|
||||||
console.log(` Headers:`, JSON.stringify(req.headers, null, 2).slice(0, 200));
|
// Check if origin is allowed
|
||||||
|
const allowedOrigins = this.config.cors.allowedOrigins || ['*'];
|
||||||
|
const isOriginAllowed =
|
||||||
|
allowedOrigins.includes('*') ||
|
||||||
|
(origin && allowedOrigins.includes(origin));
|
||||||
|
|
||||||
|
if (isOriginAllowed) {
|
||||||
|
// Set CORS headers
|
||||||
|
res.setHeader(
|
||||||
|
'Access-Control-Allow-Origin',
|
||||||
|
allowedOrigins.includes('*') ? '*' : origin || '*'
|
||||||
|
);
|
||||||
|
|
||||||
|
if (this.config.cors.allowCredentials) {
|
||||||
|
res.setHeader('Access-Control-Allow-Credentials', 'true');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle preflight OPTIONS request
|
||||||
|
if (req.method === 'OPTIONS') {
|
||||||
|
res.setHeader(
|
||||||
|
'Access-Control-Allow-Methods',
|
||||||
|
(this.config.cors.allowedMethods || []).join(', ')
|
||||||
|
);
|
||||||
|
res.setHeader(
|
||||||
|
'Access-Control-Allow-Headers',
|
||||||
|
(this.config.cors.allowedHeaders || []).join(', ')
|
||||||
|
);
|
||||||
|
if (this.config.cors.maxAge) {
|
||||||
|
res.setHeader(
|
||||||
|
'Access-Control-Max-Age',
|
||||||
|
String(this.config.cors.maxAge)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
res.writeHead(204);
|
||||||
|
res.end();
|
||||||
|
return; // Don't call next() for OPTIONS
|
||||||
|
}
|
||||||
|
|
||||||
|
// Set exposed headers for actual requests
|
||||||
|
if (this.config.cors.exposedHeaders && this.config.cors.exposedHeaders.length > 0) {
|
||||||
|
res.setHeader(
|
||||||
|
'Access-Control-Expose-Headers',
|
||||||
|
this.config.cors.exposedHeaders.join(', ')
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
await next();
|
await next();
|
||||||
const duration = Date.now() - start;
|
|
||||||
console.log(`← ${req.method} ${req.url} - ${res.statusCode} (${duration}ms)`);
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Add authentication middleware
|
// Authentication middleware (simple static credentials)
|
||||||
// TODO: Add CORS middleware
|
if (this.config.auth.enabled) {
|
||||||
|
this.middlewares.use(async (req, res, ctx, next) => {
|
||||||
|
const authHeader = req.headers.authorization;
|
||||||
|
|
||||||
|
// Extract access key from Authorization header
|
||||||
|
let accessKeyId: string | undefined;
|
||||||
|
|
||||||
|
if (authHeader) {
|
||||||
|
// Support multiple auth formats:
|
||||||
|
// 1. AWS accessKeyId:signature
|
||||||
|
// 2. AWS4-HMAC-SHA256 Credential=accessKeyId/date/region/service/aws4_request, ...
|
||||||
|
if (authHeader.startsWith('AWS ')) {
|
||||||
|
accessKeyId = authHeader.substring(4).split(':')[0];
|
||||||
|
} else if (authHeader.startsWith('AWS4-HMAC-SHA256')) {
|
||||||
|
const credentialMatch = authHeader.match(/Credential=([^/]+)\//);
|
||||||
|
accessKeyId = credentialMatch ? credentialMatch[1] : undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if access key is valid
|
||||||
|
const isValid = this.config.auth.credentials.some(
|
||||||
|
(cred) => cred.accessKeyId === accessKeyId
|
||||||
|
);
|
||||||
|
|
||||||
|
if (!isValid) {
|
||||||
|
ctx.throw('AccessDenied', 'Access Denied');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await next();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Logger middleware
|
||||||
|
if (!this.options.silent && this.config.logging.enabled) {
|
||||||
|
this.middlewares.use(async (req, res, ctx, next) => {
|
||||||
|
const start = Date.now();
|
||||||
|
|
||||||
|
// Log request
|
||||||
|
this.logger.request(req.method || 'UNKNOWN', req.url || '/', {
|
||||||
|
headers: req.headers,
|
||||||
|
});
|
||||||
|
|
||||||
|
await next();
|
||||||
|
|
||||||
|
// Log response
|
||||||
|
const duration = Date.now() - start;
|
||||||
|
this.logger.response(
|
||||||
|
req.method || 'UNKNOWN',
|
||||||
|
req.url || '/',
|
||||||
|
res.statusCode || 500,
|
||||||
|
duration
|
||||||
|
);
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -80,6 +223,7 @@ export class Smarts3Server {
|
|||||||
|
|
||||||
// Object level (/:bucket/:key*)
|
// Object level (/:bucket/:key*)
|
||||||
this.router.put('/:bucket/:key*', ObjectController.putObject);
|
this.router.put('/:bucket/:key*', ObjectController.putObject);
|
||||||
|
this.router.post('/:bucket/:key*', ObjectController.postObject); // For multipart operations
|
||||||
this.router.get('/:bucket/:key*', ObjectController.getObject);
|
this.router.get('/:bucket/:key*', ObjectController.getObject);
|
||||||
this.router.head('/:bucket/:key*', ObjectController.headObject);
|
this.router.head('/:bucket/:key*', ObjectController.headObject);
|
||||||
this.router.delete('/:bucket/:key*', ObjectController.deleteObject);
|
this.router.delete('/:bucket/:key*', ObjectController.deleteObject);
|
||||||
@@ -92,7 +236,7 @@ export class Smarts3Server {
|
|||||||
req: plugins.http.IncomingMessage,
|
req: plugins.http.IncomingMessage,
|
||||||
res: plugins.http.ServerResponse
|
res: plugins.http.ServerResponse
|
||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const context = new S3Context(req, res, this.store);
|
const context = new S3Context(req, res, this.store, this.multipart);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
// Execute middleware stack
|
// Execute middleware stack
|
||||||
@@ -122,11 +266,14 @@ export class Smarts3Server {
|
|||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const s3Error = err instanceof S3Error ? err : S3Error.fromError(err);
|
const s3Error = err instanceof S3Error ? err : S3Error.fromError(err);
|
||||||
|
|
||||||
if (!this.options.silent) {
|
// Log the error
|
||||||
console.error(`[S3Error] ${s3Error.code}: ${s3Error.message}`);
|
this.logger.s3Error(s3Error.code, s3Error.message, s3Error.status);
|
||||||
if (s3Error.status >= 500) {
|
|
||||||
console.error(err.stack || err);
|
// Log stack trace for server errors
|
||||||
}
|
if (s3Error.status >= 500) {
|
||||||
|
this.logger.debug('Error stack trace', {
|
||||||
|
stack: err.stack || err.toString(),
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// Send error response
|
// Send error response
|
||||||
@@ -147,6 +294,9 @@ export class Smarts3Server {
|
|||||||
// Initialize store
|
// Initialize store
|
||||||
await this.store.initialize();
|
await this.store.initialize();
|
||||||
|
|
||||||
|
// Initialize multipart upload manager
|
||||||
|
await this.multipart.initialize();
|
||||||
|
|
||||||
// Clean slate if requested
|
// Clean slate if requested
|
||||||
if (this.options.cleanSlate) {
|
if (this.options.cleanSlate) {
|
||||||
await this.store.reset();
|
await this.store.reset();
|
||||||
@@ -155,7 +305,10 @@ export class Smarts3Server {
|
|||||||
// Create HTTP server
|
// Create HTTP server
|
||||||
this.httpServer = plugins.http.createServer((req, res) => {
|
this.httpServer = plugins.http.createServer((req, res) => {
|
||||||
this.handleRequest(req, res).catch((err) => {
|
this.handleRequest(req, res).catch((err) => {
|
||||||
console.error('Fatal error in request handler:', err);
|
this.logger.error('Fatal error in request handler', {
|
||||||
|
error: err.message,
|
||||||
|
stack: err.stack,
|
||||||
|
});
|
||||||
if (!res.headersSent) {
|
if (!res.headersSent) {
|
||||||
res.writeHead(500, { 'Content-Type': 'text/plain' });
|
res.writeHead(500, { 'Content-Type': 'text/plain' });
|
||||||
res.end('Internal Server Error');
|
res.end('Internal Server Error');
|
||||||
@@ -169,9 +322,7 @@ export class Smarts3Server {
|
|||||||
if (err) {
|
if (err) {
|
||||||
reject(err);
|
reject(err);
|
||||||
} else {
|
} else {
|
||||||
if (!this.options.silent) {
|
this.logger.info(`S3 server listening on ${this.options.address}:${this.options.port}`);
|
||||||
console.log(`S3 server listening on ${this.options.address}:${this.options.port}`);
|
|
||||||
}
|
|
||||||
resolve();
|
resolve();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
@@ -191,9 +342,7 @@ export class Smarts3Server {
|
|||||||
if (err) {
|
if (err) {
|
||||||
reject(err);
|
reject(err);
|
||||||
} else {
|
} else {
|
||||||
if (!this.options.silent) {
|
this.logger.info('S3 server stopped');
|
||||||
console.log('S3 server stopped');
|
|
||||||
}
|
|
||||||
resolve();
|
resolve();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import type { S3Context } from '../classes/context.js';
|
|||||||
*/
|
*/
|
||||||
export class ObjectController {
|
export class ObjectController {
|
||||||
/**
|
/**
|
||||||
* PUT /:bucket/:key* - Upload object or copy object
|
* PUT /:bucket/:key* - Upload object, copy object, or upload part
|
||||||
*/
|
*/
|
||||||
public static async putObject(
|
public static async putObject(
|
||||||
req: plugins.http.IncomingMessage,
|
req: plugins.http.IncomingMessage,
|
||||||
@@ -16,6 +16,11 @@ export class ObjectController {
|
|||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const { bucket, key } = params;
|
const { bucket, key } = params;
|
||||||
|
|
||||||
|
// Check if this is a multipart upload part
|
||||||
|
if (ctx.query.partNumber && ctx.query.uploadId) {
|
||||||
|
return ObjectController.uploadPart(req, res, ctx, params);
|
||||||
|
}
|
||||||
|
|
||||||
// Check if this is a COPY operation
|
// Check if this is a COPY operation
|
||||||
const copySource = ctx.headers['x-amz-copy-source'] as string | undefined;
|
const copySource = ctx.headers['x-amz-copy-source'] as string | undefined;
|
||||||
if (copySource) {
|
if (copySource) {
|
||||||
@@ -133,7 +138,7 @@ export class ObjectController {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* DELETE /:bucket/:key* - Delete object
|
* DELETE /:bucket/:key* - Delete object or abort multipart upload
|
||||||
*/
|
*/
|
||||||
public static async deleteObject(
|
public static async deleteObject(
|
||||||
req: plugins.http.IncomingMessage,
|
req: plugins.http.IncomingMessage,
|
||||||
@@ -143,6 +148,11 @@ export class ObjectController {
|
|||||||
): Promise<void> {
|
): Promise<void> {
|
||||||
const { bucket, key } = params;
|
const { bucket, key } = params;
|
||||||
|
|
||||||
|
// Check if this is an abort multipart upload
|
||||||
|
if (ctx.query.uploadId) {
|
||||||
|
return ObjectController.abortMultipartUpload(req, res, ctx, params);
|
||||||
|
}
|
||||||
|
|
||||||
await ctx.store.deleteObject(bucket, key);
|
await ctx.store.deleteObject(bucket, key);
|
||||||
ctx.status(204).send('');
|
ctx.status(204).send('');
|
||||||
}
|
}
|
||||||
@@ -201,4 +211,168 @@ export class ObjectController {
|
|||||||
},
|
},
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* POST /:bucket/:key* - Initiate or complete multipart upload
|
||||||
|
*/
|
||||||
|
public static async postObject(
|
||||||
|
req: plugins.http.IncomingMessage,
|
||||||
|
res: plugins.http.ServerResponse,
|
||||||
|
ctx: S3Context,
|
||||||
|
params: Record<string, string>
|
||||||
|
): Promise<void> {
|
||||||
|
// Check if this is initiate multipart upload
|
||||||
|
if (ctx.query.uploads !== undefined) {
|
||||||
|
return ObjectController.initiateMultipartUpload(req, res, ctx, params);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if this is complete multipart upload
|
||||||
|
if (ctx.query.uploadId) {
|
||||||
|
return ObjectController.completeMultipartUpload(req, res, ctx, params);
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.throw('InvalidRequest', 'Invalid POST request');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initiate Multipart Upload (POST with ?uploads)
|
||||||
|
*/
|
||||||
|
private static async initiateMultipartUpload(
|
||||||
|
req: plugins.http.IncomingMessage,
|
||||||
|
res: plugins.http.ServerResponse,
|
||||||
|
ctx: S3Context,
|
||||||
|
params: Record<string, string>
|
||||||
|
): Promise<void> {
|
||||||
|
const { bucket, key } = params;
|
||||||
|
|
||||||
|
// Extract metadata from headers
|
||||||
|
const metadata: Record<string, string> = {};
|
||||||
|
for (const [header, value] of Object.entries(ctx.headers)) {
|
||||||
|
if (header.startsWith('x-amz-meta-')) {
|
||||||
|
metadata[header] = value as string;
|
||||||
|
}
|
||||||
|
if (header === 'content-type' && value) {
|
||||||
|
metadata['content-type'] = value as string;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initiate upload
|
||||||
|
const uploadId = await ctx.multipart.initiateUpload(bucket, key, metadata);
|
||||||
|
|
||||||
|
// Send XML response
|
||||||
|
await ctx.sendXML({
|
||||||
|
InitiateMultipartUploadResult: {
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
UploadId: uploadId,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Upload Part (PUT with ?partNumber&uploadId)
|
||||||
|
*/
|
||||||
|
private static async uploadPart(
|
||||||
|
req: plugins.http.IncomingMessage,
|
||||||
|
res: plugins.http.ServerResponse,
|
||||||
|
ctx: S3Context,
|
||||||
|
params: Record<string, string>
|
||||||
|
): Promise<void> {
|
||||||
|
const uploadId = ctx.query.uploadId!;
|
||||||
|
const partNumber = parseInt(ctx.query.partNumber!);
|
||||||
|
|
||||||
|
if (isNaN(partNumber) || partNumber < 1 || partNumber > 10000) {
|
||||||
|
ctx.throw('InvalidPartNumber', 'Part number must be between 1 and 10000');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Upload the part
|
||||||
|
const partInfo = await ctx.multipart.uploadPart(
|
||||||
|
uploadId,
|
||||||
|
partNumber,
|
||||||
|
ctx.getRequestStream() as any as import('stream').Readable
|
||||||
|
);
|
||||||
|
|
||||||
|
// Set ETag header (part ETag)
|
||||||
|
ctx.setHeader('ETag', `"${partInfo.etag}"`);
|
||||||
|
ctx.status(200).send('');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Complete Multipart Upload (POST with ?uploadId)
|
||||||
|
*/
|
||||||
|
private static async completeMultipartUpload(
|
||||||
|
req: plugins.http.IncomingMessage,
|
||||||
|
res: plugins.http.ServerResponse,
|
||||||
|
ctx: S3Context,
|
||||||
|
params: Record<string, string>
|
||||||
|
): Promise<void> {
|
||||||
|
const { bucket, key } = params;
|
||||||
|
const uploadId = ctx.query.uploadId!;
|
||||||
|
|
||||||
|
// Read and parse request body (XML with part list)
|
||||||
|
const body = await ctx.readBody();
|
||||||
|
|
||||||
|
// Parse XML to extract parts
|
||||||
|
// Expected format: <CompleteMultipartUpload><Part><PartNumber>1</PartNumber><ETag>"etag"</ETag></Part>...</CompleteMultipartUpload>
|
||||||
|
const partMatches = body.matchAll(/<Part>.*?<PartNumber>(\d+)<\/PartNumber>.*?<ETag>(.*?)<\/ETag>.*?<\/Part>/gs);
|
||||||
|
const parts: Array<{ PartNumber: number; ETag: string }> = [];
|
||||||
|
|
||||||
|
for (const match of partMatches) {
|
||||||
|
parts.push({
|
||||||
|
PartNumber: parseInt(match[1]),
|
||||||
|
ETag: match[2],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Complete the upload
|
||||||
|
const result = await ctx.multipart.completeUpload(uploadId, parts);
|
||||||
|
|
||||||
|
// Get upload metadata
|
||||||
|
const upload = ctx.multipart.getUpload(uploadId);
|
||||||
|
if (!upload) {
|
||||||
|
ctx.throw('NoSuchUpload', 'The specified upload does not exist');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Move final file to object store
|
||||||
|
const finalPath = ctx.multipart.getFinalPath(uploadId);
|
||||||
|
const finalContent = await plugins.smartfs.file(finalPath).read();
|
||||||
|
const finalStream = plugins.http.IncomingMessage.prototype;
|
||||||
|
|
||||||
|
// Create a readable stream from the buffer
|
||||||
|
const { Readable } = await import('stream');
|
||||||
|
const finalReadableStream = Readable.from([finalContent]);
|
||||||
|
|
||||||
|
// Store the final object
|
||||||
|
await ctx.store.putObject(bucket, key, finalReadableStream, upload.metadata);
|
||||||
|
|
||||||
|
// Clean up multipart upload data
|
||||||
|
await ctx.multipart.cleanupUpload(uploadId);
|
||||||
|
|
||||||
|
// Send XML response
|
||||||
|
await ctx.sendXML({
|
||||||
|
CompleteMultipartUploadResult: {
|
||||||
|
Location: `/${bucket}/${key}`,
|
||||||
|
Bucket: bucket,
|
||||||
|
Key: key,
|
||||||
|
ETag: `"${result.etag}"`,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Abort Multipart Upload (DELETE with ?uploadId)
|
||||||
|
*/
|
||||||
|
private static async abortMultipartUpload(
|
||||||
|
req: plugins.http.IncomingMessage,
|
||||||
|
res: plugins.http.ServerResponse,
|
||||||
|
ctx: S3Context,
|
||||||
|
params: Record<string, string>
|
||||||
|
): Promise<void> {
|
||||||
|
const uploadId = ctx.query.uploadId!;
|
||||||
|
|
||||||
|
// Abort and cleanup
|
||||||
|
await ctx.multipart.abortUpload(uploadId);
|
||||||
|
|
||||||
|
ctx.status(204).send('');
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user