2025-11-21 14:32:19 +00:00
|
|
|
import * as plugins from '../plugins.js';
|
|
|
|
|
import type { S3Context } from '../classes/context.js';
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Object-level operations
|
|
|
|
|
*/
|
|
|
|
|
export class ObjectController {
|
|
|
|
|
/**
|
2025-11-23 22:41:46 +00:00
|
|
|
* PUT /:bucket/:key* - Upload object, copy object, or upload part
|
2025-11-21 14:32:19 +00:00
|
|
|
*/
|
|
|
|
|
public static async putObject(
|
|
|
|
|
req: plugins.http.IncomingMessage,
|
|
|
|
|
res: plugins.http.ServerResponse,
|
|
|
|
|
ctx: S3Context,
|
|
|
|
|
params: Record<string, string>
|
|
|
|
|
): Promise<void> {
|
|
|
|
|
const { bucket, key } = params;
|
|
|
|
|
|
2025-11-23 22:41:46 +00:00
|
|
|
// Check if this is a multipart upload part
|
|
|
|
|
if (ctx.query.partNumber && ctx.query.uploadId) {
|
|
|
|
|
return ObjectController.uploadPart(req, res, ctx, params);
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-21 14:32:19 +00:00
|
|
|
// Check if this is a COPY operation
|
|
|
|
|
const copySource = ctx.headers['x-amz-copy-source'] as string | undefined;
|
|
|
|
|
if (copySource) {
|
|
|
|
|
return ObjectController.copyObject(req, res, ctx, params);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Extract metadata from headers
|
|
|
|
|
const metadata: Record<string, string> = {};
|
|
|
|
|
for (const [header, value] of Object.entries(ctx.headers)) {
|
|
|
|
|
if (header.startsWith('x-amz-meta-')) {
|
|
|
|
|
metadata[header] = value as string;
|
|
|
|
|
}
|
|
|
|
|
if (header === 'content-type' && value) {
|
|
|
|
|
metadata['content-type'] = value as string;
|
|
|
|
|
}
|
|
|
|
|
if (header === 'cache-control' && value) {
|
|
|
|
|
metadata['cache-control'] = value as string;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// If no content-type, default to binary/octet-stream
|
|
|
|
|
if (!metadata['content-type']) {
|
|
|
|
|
metadata['content-type'] = 'binary/octet-stream';
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Stream upload
|
|
|
|
|
const result = await ctx.store.putObject(bucket, key, ctx.getRequestStream(), metadata);
|
|
|
|
|
|
|
|
|
|
ctx.setHeader('ETag', `"${result.md5}"`);
|
|
|
|
|
ctx.status(200).send('');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* GET /:bucket/:key* - Download object
|
|
|
|
|
*/
|
|
|
|
|
public static async getObject(
|
|
|
|
|
req: plugins.http.IncomingMessage,
|
|
|
|
|
res: plugins.http.ServerResponse,
|
|
|
|
|
ctx: S3Context,
|
|
|
|
|
params: Record<string, string>
|
|
|
|
|
): Promise<void> {
|
|
|
|
|
const { bucket, key } = params;
|
|
|
|
|
|
|
|
|
|
// Parse Range header if present
|
|
|
|
|
const rangeHeader = ctx.headers.range as string | undefined;
|
|
|
|
|
let range: { start: number; end: number } | undefined;
|
|
|
|
|
|
|
|
|
|
if (rangeHeader) {
|
|
|
|
|
const matches = rangeHeader.match(/bytes=(\d+)-(\d*)/);
|
|
|
|
|
if (matches) {
|
|
|
|
|
const start = parseInt(matches[1]);
|
|
|
|
|
const end = matches[2] ? parseInt(matches[2]) : undefined;
|
|
|
|
|
range = { start, end: end || start + 1024 * 1024 }; // Default to 1MB if no end
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Get object
|
|
|
|
|
const object = await ctx.store.getObject(bucket, key, range);
|
|
|
|
|
|
|
|
|
|
// Set response headers
|
|
|
|
|
ctx.setHeader('ETag', `"${object.md5}"`);
|
|
|
|
|
ctx.setHeader('Last-Modified', object.lastModified.toUTCString());
|
|
|
|
|
ctx.setHeader('Content-Type', object.metadata['content-type'] || 'binary/octet-stream');
|
|
|
|
|
ctx.setHeader('Accept-Ranges', 'bytes');
|
|
|
|
|
|
|
|
|
|
// Handle custom metadata headers
|
|
|
|
|
for (const [key, value] of Object.entries(object.metadata)) {
|
|
|
|
|
if (key.startsWith('x-amz-meta-')) {
|
|
|
|
|
ctx.setHeader(key, value);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (range) {
|
|
|
|
|
ctx.status(206);
|
|
|
|
|
ctx.setHeader('Content-Length', (range.end - range.start + 1).toString());
|
|
|
|
|
ctx.setHeader('Content-Range', `bytes ${range.start}-${range.end}/${object.size}`);
|
|
|
|
|
} else {
|
|
|
|
|
ctx.status(200);
|
|
|
|
|
ctx.setHeader('Content-Length', object.size.toString());
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Stream response
|
|
|
|
|
await ctx.send(object.content!);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* HEAD /:bucket/:key* - Get object metadata
|
|
|
|
|
*/
|
|
|
|
|
public static async headObject(
|
|
|
|
|
req: plugins.http.IncomingMessage,
|
|
|
|
|
res: plugins.http.ServerResponse,
|
|
|
|
|
ctx: S3Context,
|
|
|
|
|
params: Record<string, string>
|
|
|
|
|
): Promise<void> {
|
|
|
|
|
const { bucket, key } = params;
|
|
|
|
|
|
|
|
|
|
// Get object (without content)
|
|
|
|
|
const object = await ctx.store.getObject(bucket, key);
|
|
|
|
|
|
|
|
|
|
// Set response headers (same as GET but no body)
|
|
|
|
|
ctx.setHeader('ETag', `"${object.md5}"`);
|
|
|
|
|
ctx.setHeader('Last-Modified', object.lastModified.toUTCString());
|
|
|
|
|
ctx.setHeader('Content-Type', object.metadata['content-type'] || 'binary/octet-stream');
|
|
|
|
|
ctx.setHeader('Content-Length', object.size.toString());
|
|
|
|
|
ctx.setHeader('Accept-Ranges', 'bytes');
|
|
|
|
|
|
|
|
|
|
// Handle custom metadata headers
|
|
|
|
|
for (const [key, value] of Object.entries(object.metadata)) {
|
|
|
|
|
if (key.startsWith('x-amz-meta-')) {
|
|
|
|
|
ctx.setHeader(key, value);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ctx.status(200).send('');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2025-11-23 22:41:46 +00:00
|
|
|
* DELETE /:bucket/:key* - Delete object or abort multipart upload
|
2025-11-21 14:32:19 +00:00
|
|
|
*/
|
|
|
|
|
public static async deleteObject(
|
|
|
|
|
req: plugins.http.IncomingMessage,
|
|
|
|
|
res: plugins.http.ServerResponse,
|
|
|
|
|
ctx: S3Context,
|
|
|
|
|
params: Record<string, string>
|
|
|
|
|
): Promise<void> {
|
|
|
|
|
const { bucket, key } = params;
|
|
|
|
|
|
2025-11-23 22:41:46 +00:00
|
|
|
// Check if this is an abort multipart upload
|
|
|
|
|
if (ctx.query.uploadId) {
|
|
|
|
|
return ObjectController.abortMultipartUpload(req, res, ctx, params);
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-21 14:32:19 +00:00
|
|
|
await ctx.store.deleteObject(bucket, key);
|
|
|
|
|
ctx.status(204).send('');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* COPY operation (PUT with x-amz-copy-source header)
|
|
|
|
|
*/
|
|
|
|
|
private static async copyObject(
|
|
|
|
|
req: plugins.http.IncomingMessage,
|
|
|
|
|
res: plugins.http.ServerResponse,
|
|
|
|
|
ctx: S3Context,
|
|
|
|
|
params: Record<string, string>
|
|
|
|
|
): Promise<void> {
|
|
|
|
|
const { bucket: destBucket, key: destKey } = params;
|
|
|
|
|
const copySource = ctx.headers['x-amz-copy-source'] as string;
|
|
|
|
|
|
|
|
|
|
// Parse source bucket and key from copy source
|
|
|
|
|
// Format: /bucket/key or bucket/key
|
|
|
|
|
const sourcePath = copySource.startsWith('/') ? copySource.slice(1) : copySource;
|
|
|
|
|
const firstSlash = sourcePath.indexOf('/');
|
|
|
|
|
const srcBucket = decodeURIComponent(sourcePath.slice(0, firstSlash));
|
|
|
|
|
const srcKey = decodeURIComponent(sourcePath.slice(firstSlash + 1));
|
|
|
|
|
|
|
|
|
|
// Get metadata directive (COPY or REPLACE)
|
|
|
|
|
const metadataDirective = (ctx.headers['x-amz-metadata-directive'] as string)?.toUpperCase() || 'COPY';
|
|
|
|
|
|
|
|
|
|
// Extract new metadata if REPLACE
|
|
|
|
|
let newMetadata: Record<string, string> | undefined;
|
|
|
|
|
if (metadataDirective === 'REPLACE') {
|
|
|
|
|
newMetadata = {};
|
|
|
|
|
for (const [header, value] of Object.entries(ctx.headers)) {
|
|
|
|
|
if (header.startsWith('x-amz-meta-')) {
|
|
|
|
|
newMetadata[header] = value as string;
|
|
|
|
|
}
|
|
|
|
|
if (header === 'content-type' && value) {
|
|
|
|
|
newMetadata['content-type'] = value as string;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Perform copy
|
|
|
|
|
const result = await ctx.store.copyObject(
|
|
|
|
|
srcBucket,
|
|
|
|
|
srcKey,
|
|
|
|
|
destBucket,
|
|
|
|
|
destKey,
|
|
|
|
|
metadataDirective as 'COPY' | 'REPLACE',
|
|
|
|
|
newMetadata
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// Send XML response
|
|
|
|
|
await ctx.sendXML({
|
|
|
|
|
CopyObjectResult: {
|
|
|
|
|
LastModified: new Date().toISOString(),
|
|
|
|
|
ETag: `"${result.md5}"`,
|
|
|
|
|
},
|
|
|
|
|
});
|
|
|
|
|
}
|
2025-11-23 22:41:46 +00:00
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* POST /:bucket/:key* - Initiate or complete multipart upload
|
|
|
|
|
*/
|
|
|
|
|
public static async postObject(
|
|
|
|
|
req: plugins.http.IncomingMessage,
|
|
|
|
|
res: plugins.http.ServerResponse,
|
|
|
|
|
ctx: S3Context,
|
|
|
|
|
params: Record<string, string>
|
|
|
|
|
): Promise<void> {
|
|
|
|
|
// Check if this is initiate multipart upload
|
|
|
|
|
if (ctx.query.uploads !== undefined) {
|
|
|
|
|
return ObjectController.initiateMultipartUpload(req, res, ctx, params);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Check if this is complete multipart upload
|
|
|
|
|
if (ctx.query.uploadId) {
|
|
|
|
|
return ObjectController.completeMultipartUpload(req, res, ctx, params);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
ctx.throw('InvalidRequest', 'Invalid POST request');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Initiate Multipart Upload (POST with ?uploads)
|
|
|
|
|
*/
|
|
|
|
|
private static async initiateMultipartUpload(
|
|
|
|
|
req: plugins.http.IncomingMessage,
|
|
|
|
|
res: plugins.http.ServerResponse,
|
|
|
|
|
ctx: S3Context,
|
|
|
|
|
params: Record<string, string>
|
|
|
|
|
): Promise<void> {
|
|
|
|
|
const { bucket, key } = params;
|
|
|
|
|
|
|
|
|
|
// Extract metadata from headers
|
|
|
|
|
const metadata: Record<string, string> = {};
|
|
|
|
|
for (const [header, value] of Object.entries(ctx.headers)) {
|
|
|
|
|
if (header.startsWith('x-amz-meta-')) {
|
|
|
|
|
metadata[header] = value as string;
|
|
|
|
|
}
|
|
|
|
|
if (header === 'content-type' && value) {
|
|
|
|
|
metadata['content-type'] = value as string;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Initiate upload
|
|
|
|
|
const uploadId = await ctx.multipart.initiateUpload(bucket, key, metadata);
|
|
|
|
|
|
|
|
|
|
// Send XML response
|
|
|
|
|
await ctx.sendXML({
|
|
|
|
|
InitiateMultipartUploadResult: {
|
|
|
|
|
Bucket: bucket,
|
|
|
|
|
Key: key,
|
|
|
|
|
UploadId: uploadId,
|
|
|
|
|
},
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Upload Part (PUT with ?partNumber&uploadId)
|
|
|
|
|
*/
|
|
|
|
|
private static async uploadPart(
|
|
|
|
|
req: plugins.http.IncomingMessage,
|
|
|
|
|
res: plugins.http.ServerResponse,
|
|
|
|
|
ctx: S3Context,
|
|
|
|
|
params: Record<string, string>
|
|
|
|
|
): Promise<void> {
|
|
|
|
|
const uploadId = ctx.query.uploadId!;
|
|
|
|
|
const partNumber = parseInt(ctx.query.partNumber!);
|
|
|
|
|
|
|
|
|
|
if (isNaN(partNumber) || partNumber < 1 || partNumber > 10000) {
|
|
|
|
|
ctx.throw('InvalidPartNumber', 'Part number must be between 1 and 10000');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Upload the part
|
|
|
|
|
const partInfo = await ctx.multipart.uploadPart(
|
|
|
|
|
uploadId,
|
|
|
|
|
partNumber,
|
|
|
|
|
ctx.getRequestStream() as any as import('stream').Readable
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// Set ETag header (part ETag)
|
|
|
|
|
ctx.setHeader('ETag', `"${partInfo.etag}"`);
|
|
|
|
|
ctx.status(200).send('');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Complete Multipart Upload (POST with ?uploadId)
|
|
|
|
|
*/
|
|
|
|
|
private static async completeMultipartUpload(
|
|
|
|
|
req: plugins.http.IncomingMessage,
|
|
|
|
|
res: plugins.http.ServerResponse,
|
|
|
|
|
ctx: S3Context,
|
|
|
|
|
params: Record<string, string>
|
|
|
|
|
): Promise<void> {
|
|
|
|
|
const { bucket, key } = params;
|
|
|
|
|
const uploadId = ctx.query.uploadId!;
|
|
|
|
|
|
|
|
|
|
// Read and parse request body (XML with part list)
|
|
|
|
|
const body = await ctx.readBody();
|
|
|
|
|
|
|
|
|
|
// Parse XML to extract parts
|
|
|
|
|
// Expected format: <CompleteMultipartUpload><Part><PartNumber>1</PartNumber><ETag>"etag"</ETag></Part>...</CompleteMultipartUpload>
|
|
|
|
|
const partMatches = body.matchAll(/<Part>.*?<PartNumber>(\d+)<\/PartNumber>.*?<ETag>(.*?)<\/ETag>.*?<\/Part>/gs);
|
|
|
|
|
const parts: Array<{ PartNumber: number; ETag: string }> = [];
|
|
|
|
|
|
|
|
|
|
for (const match of partMatches) {
|
|
|
|
|
parts.push({
|
|
|
|
|
PartNumber: parseInt(match[1]),
|
|
|
|
|
ETag: match[2],
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Complete the upload
|
|
|
|
|
const result = await ctx.multipart.completeUpload(uploadId, parts);
|
|
|
|
|
|
|
|
|
|
// Get upload metadata
|
|
|
|
|
const upload = ctx.multipart.getUpload(uploadId);
|
|
|
|
|
if (!upload) {
|
|
|
|
|
ctx.throw('NoSuchUpload', 'The specified upload does not exist');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Move final file to object store
|
|
|
|
|
const finalPath = ctx.multipart.getFinalPath(uploadId);
|
|
|
|
|
const finalContent = await plugins.smartfs.file(finalPath).read();
|
|
|
|
|
const finalStream = plugins.http.IncomingMessage.prototype;
|
|
|
|
|
|
|
|
|
|
// Create a readable stream from the buffer
|
|
|
|
|
const { Readable } = await import('stream');
|
|
|
|
|
const finalReadableStream = Readable.from([finalContent]);
|
|
|
|
|
|
|
|
|
|
// Store the final object
|
|
|
|
|
await ctx.store.putObject(bucket, key, finalReadableStream, upload.metadata);
|
|
|
|
|
|
|
|
|
|
// Clean up multipart upload data
|
|
|
|
|
await ctx.multipart.cleanupUpload(uploadId);
|
|
|
|
|
|
|
|
|
|
// Send XML response
|
|
|
|
|
await ctx.sendXML({
|
|
|
|
|
CompleteMultipartUploadResult: {
|
|
|
|
|
Location: `/${bucket}/${key}`,
|
|
|
|
|
Bucket: bucket,
|
|
|
|
|
Key: key,
|
|
|
|
|
ETag: `"${result.etag}"`,
|
|
|
|
|
},
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Abort Multipart Upload (DELETE with ?uploadId)
|
|
|
|
|
*/
|
|
|
|
|
private static async abortMultipartUpload(
|
|
|
|
|
req: plugins.http.IncomingMessage,
|
|
|
|
|
res: plugins.http.ServerResponse,
|
|
|
|
|
ctx: S3Context,
|
|
|
|
|
params: Record<string, string>
|
|
|
|
|
): Promise<void> {
|
|
|
|
|
const uploadId = ctx.query.uploadId!;
|
|
|
|
|
|
|
|
|
|
// Abort and cleanup
|
|
|
|
|
await ctx.multipart.abortUpload(uploadId);
|
|
|
|
|
|
|
|
|
|
ctx.status(204).send('');
|
|
|
|
|
}
|
2025-11-21 14:32:19 +00:00
|
|
|
}
|