feat(multipart): Implement full multipart upload support with persistent manager, periodic cleanup, and API integration

This commit is contained in:
2025-11-23 23:31:26 +00:00
parent d6f178bde6
commit 648ff98c2d
6 changed files with 287 additions and 4 deletions

View File

@@ -23,15 +23,41 @@ export interface IPartInfo {
lastModified: Date;
}
/**
* Serializable version of upload metadata for disk persistence
*/
interface ISerializableUpload {
uploadId: string;
bucket: string;
key: string;
initiated: string; // ISO date string
metadata: Record<string, string>;
parts: Array<{
partNumber: number;
etag: string;
size: number;
lastModified: string; // ISO date string
}>;
}
/**
* Manages multipart upload state and storage
*/
export class MultipartUploadManager {
private uploads: Map<string, IMultipartUpload> = new Map();
private uploadDir: string;
private cleanupInterval: NodeJS.Timeout | null = null;
private expirationDays: number;
private cleanupIntervalMinutes: number;
constructor(private rootDir: string) {
constructor(
private rootDir: string,
expirationDays: number = 7,
cleanupIntervalMinutes: number = 60
) {
this.uploadDir = plugins.path.join(rootDir, '.multipart');
this.expirationDays = expirationDays;
this.cleanupIntervalMinutes = cleanupIntervalMinutes;
}
/**
@@ -39,6 +65,97 @@ export class MultipartUploadManager {
*/
public async initialize(): Promise<void> {
await plugins.smartfs.directory(this.uploadDir).recursive().create();
await this.restoreUploadsFromDisk();
}
/**
* Save upload metadata to disk for persistence
*/
private async saveUploadMetadata(uploadId: string): Promise<void> {
const upload = this.uploads.get(uploadId);
if (!upload) {
return;
}
const metadataPath = plugins.path.join(this.uploadDir, uploadId, 'metadata.json');
const serializable: ISerializableUpload = {
uploadId: upload.uploadId,
bucket: upload.bucket,
key: upload.key,
initiated: upload.initiated.toISOString(),
metadata: upload.metadata,
parts: Array.from(upload.parts.values()).map(part => ({
partNumber: part.partNumber,
etag: part.etag,
size: part.size,
lastModified: part.lastModified.toISOString(),
})),
};
await plugins.smartfs.file(metadataPath).write(JSON.stringify(serializable, null, 2));
}
/**
* Restore uploads from disk on initialization
*/
private async restoreUploadsFromDisk(): Promise<void> {
const uploadDirExists = await plugins.smartfs.directory(this.uploadDir).exists();
if (!uploadDirExists) {
return;
}
const entries = await plugins.smartfs.directory(this.uploadDir).includeStats().list();
for (const entry of entries) {
if (!entry.isDirectory) {
continue;
}
const uploadId = entry.name;
const metadataPath = plugins.path.join(this.uploadDir, uploadId, 'metadata.json');
// Check if metadata.json exists
const metadataExists = await plugins.smartfs.file(metadataPath).exists();
if (!metadataExists) {
// Orphaned upload directory - clean it up
console.warn(`Orphaned multipart upload directory found: ${uploadId}, cleaning up`);
await plugins.smartfs.directory(plugins.path.join(this.uploadDir, uploadId)).recursive().delete();
continue;
}
try {
// Read and parse metadata
const metadataContent = await plugins.smartfs.file(metadataPath).read();
const serialized: ISerializableUpload = JSON.parse(metadataContent as string);
// Restore to memory
const parts = new Map<number, IPartInfo>();
for (const part of serialized.parts) {
parts.set(part.partNumber, {
partNumber: part.partNumber,
etag: part.etag,
size: part.size,
lastModified: new Date(part.lastModified),
});
}
this.uploads.set(uploadId, {
uploadId: serialized.uploadId,
bucket: serialized.bucket,
key: serialized.key,
initiated: new Date(serialized.initiated),
parts,
metadata: serialized.metadata,
});
console.log(`Restored multipart upload: ${uploadId} (${serialized.bucket}/${serialized.key})`);
} catch (error) {
// Corrupted metadata - clean up
console.error(`Failed to restore multipart upload ${uploadId}:`, error);
await plugins.smartfs.directory(plugins.path.join(this.uploadDir, uploadId)).recursive().delete();
}
}
}
/**
@@ -71,6 +188,9 @@ export class MultipartUploadManager {
const uploadPath = plugins.path.join(this.uploadDir, uploadId);
await plugins.smartfs.directory(uploadPath).recursive().create();
// Persist metadata to disk
await this.saveUploadMetadata(uploadId);
return uploadId;
}
@@ -116,6 +236,9 @@ export class MultipartUploadManager {
upload.parts.set(partNumber, partInfo);
// Persist updated metadata
await this.saveUploadMetadata(uploadId);
return partInfo;
}
@@ -235,4 +358,73 @@ export class MultipartUploadManager {
}
return Array.from(upload.parts.values()).sort((a, b) => a.partNumber - b.partNumber);
}
/**
* Start automatic cleanup task for expired uploads
*/
public startCleanupTask(): void {
if (this.cleanupInterval) {
console.warn('Cleanup task is already running');
return;
}
// Run cleanup immediately on start
this.performCleanup().catch(err => {
console.error('Failed to perform initial multipart cleanup:', err);
});
// Then schedule periodic cleanup
const intervalMs = this.cleanupIntervalMinutes * 60 * 1000;
this.cleanupInterval = setInterval(() => {
this.performCleanup().catch(err => {
console.error('Failed to perform scheduled multipart cleanup:', err);
});
}, intervalMs);
console.log(`Multipart cleanup task started (interval: ${this.cleanupIntervalMinutes} minutes, expiration: ${this.expirationDays} days)`);
}
/**
* Stop automatic cleanup task
*/
public stopCleanupTask(): void {
if (this.cleanupInterval) {
clearInterval(this.cleanupInterval);
this.cleanupInterval = null;
console.log('Multipart cleanup task stopped');
}
}
/**
* Perform cleanup of expired uploads
*/
private async performCleanup(): Promise<void> {
const now = Date.now();
const expirationMs = this.expirationDays * 24 * 60 * 60 * 1000;
const expiredUploads: string[] = [];
// Find expired uploads
for (const [uploadId, upload] of this.uploads.entries()) {
const age = now - upload.initiated.getTime();
if (age > expirationMs) {
expiredUploads.push(uploadId);
}
}
if (expiredUploads.length === 0) {
return;
}
console.log(`Cleaning up ${expiredUploads.length} expired multipart upload(s)`);
// Delete expired uploads
for (const uploadId of expiredUploads) {
try {
await this.abortUpload(uploadId);
console.log(`Deleted expired multipart upload: ${uploadId}`);
} catch (err) {
console.error(`Failed to delete expired upload ${uploadId}:`, err);
}
}
}
}

View File

@@ -78,11 +78,19 @@ export class Smarts3Server {
maxMetadataSize: 2048,
requestTimeout: 300000,
},
multipart: {
expirationDays: 7,
cleanupIntervalMinutes: 60,
},
};
this.logger = new Logger(this.config.logging);
this.store = new FilesystemStore(this.options.directory);
this.multipart = new MultipartUploadManager(this.options.directory);
this.multipart = new MultipartUploadManager(
this.options.directory,
this.config.multipart.expirationDays,
this.config.multipart.cleanupIntervalMinutes
);
this.router = new S3Router();
this.middlewares = new MiddlewareStack();
@@ -297,6 +305,9 @@ export class Smarts3Server {
// Initialize multipart upload manager
await this.multipart.initialize();
// Start multipart cleanup task
this.multipart.startCleanupTask();
// Clean slate if requested
if (this.options.cleanSlate) {
await this.store.reset();
@@ -337,6 +348,9 @@ export class Smarts3Server {
return;
}
// Stop multipart cleanup task
this.multipart.stopCleanupTask();
await new Promise<void>((resolve, reject) => {
this.httpServer!.close((err?: Error) => {
if (err) {