Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 0d4837184f | |||
| 7f3de92961 | |||
| a7bc902dd0 | |||
| 95d78d0d08 |
16
changelog.md
16
changelog.md
@@ -1,5 +1,21 @@
|
||||
# Changelog
|
||||
|
||||
## 2025-11-23 - 3.1.0 - feat(logging)
|
||||
Add structured Logger and integrate into Smarts3Server; pass full config to server
|
||||
|
||||
- Introduce a new Logger class (ts/classes/logger.ts) providing leveled logging (error, warn, info, debug), text/json formats and an enable flag.
|
||||
- Integrate Logger into Smarts3Server: use structured logging for server lifecycle events, HTTP request/response logging and S3 errors instead of direct console usage.
|
||||
- Smarts3 now passes the full merged configuration into Smarts3Server (config.logging can control logging behavior).
|
||||
- Server start/stop messages and internal request/error logs are emitted via the Logger and respect the configured logging level/format and silent option.
|
||||
|
||||
## 2025-11-23 - 3.0.4 - fix(smarts3)
|
||||
Use filesystem store for bucket creation and remove smartbucket runtime dependency
|
||||
|
||||
- Switched createBucket to call the internal FilesystemStore.createBucket instead of using @push.rocks/smartbucket
|
||||
- Made Smarts3Server.store public so Smarts3 can access the filesystem store directly
|
||||
- Removed runtime import/export of @push.rocks/smartbucket from plugins and moved @push.rocks/smartbucket to devDependencies in package.json
|
||||
- Updated createBucket to return a simple { name } object after creating the bucket via the filesystem store
|
||||
|
||||
## 2025-11-23 - 3.0.3 - fix(filesystem)
|
||||
Migrate filesystem implementation to @push.rocks/smartfs and add Web Streams handling
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@push.rocks/smarts3",
|
||||
"version": "3.0.3",
|
||||
"version": "3.1.0",
|
||||
"private": false,
|
||||
"description": "A Node.js TypeScript package to create a local S3 endpoint for simulating AWS S3 operations using mapped local directories for development and testing purposes.",
|
||||
"main": "dist_ts/index.js",
|
||||
@@ -19,6 +19,7 @@
|
||||
"@git.zone/tsbundle": "^2.5.2",
|
||||
"@git.zone/tsrun": "^2.0.0",
|
||||
"@git.zone/tstest": "^3.1.0",
|
||||
"@push.rocks/smartbucket": "^4.3.0",
|
||||
"@types/node": "^22.9.0"
|
||||
},
|
||||
"browserslist": [
|
||||
@@ -37,7 +38,6 @@
|
||||
"readme.md"
|
||||
],
|
||||
"dependencies": {
|
||||
"@push.rocks/smartbucket": "^4.3.0",
|
||||
"@push.rocks/smartfs": "^1.1.0",
|
||||
"@push.rocks/smartpath": "^6.0.0",
|
||||
"@push.rocks/smartxml": "^2.0.0",
|
||||
|
||||
6
pnpm-lock.yaml
generated
6
pnpm-lock.yaml
generated
@@ -8,9 +8,6 @@ importers:
|
||||
|
||||
.:
|
||||
dependencies:
|
||||
'@push.rocks/smartbucket':
|
||||
specifier: ^4.3.0
|
||||
version: 4.3.0
|
||||
'@push.rocks/smartfs':
|
||||
specifier: ^1.1.0
|
||||
version: 1.1.0
|
||||
@@ -39,6 +36,9 @@ importers:
|
||||
'@git.zone/tstest':
|
||||
specifier: ^3.1.0
|
||||
version: 3.1.0(socks@2.8.7)(typescript@5.9.3)
|
||||
'@push.rocks/smartbucket':
|
||||
specifier: ^4.3.0
|
||||
version: 4.3.0
|
||||
'@types/node':
|
||||
specifier: ^22.9.0
|
||||
version: 22.19.1
|
||||
|
||||
@@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smarts3',
|
||||
version: '3.0.3',
|
||||
version: '3.1.0',
|
||||
description: 'A Node.js TypeScript package to create a local S3 endpoint for simulating AWS S3 operations using mapped local directories for development and testing purposes.'
|
||||
}
|
||||
|
||||
130
ts/classes/logger.ts
Normal file
130
ts/classes/logger.ts
Normal file
@@ -0,0 +1,130 @@
|
||||
import type { ILoggingConfig } from '../index.js';
|
||||
|
||||
/**
|
||||
* Log levels in order of severity
|
||||
*/
|
||||
const LOG_LEVELS = {
|
||||
error: 0,
|
||||
warn: 1,
|
||||
info: 2,
|
||||
debug: 3,
|
||||
} as const;
|
||||
|
||||
type LogLevel = keyof typeof LOG_LEVELS;
|
||||
|
||||
/**
|
||||
* Structured logger with configurable levels and formats
|
||||
*/
|
||||
export class Logger {
|
||||
private config: Required<ILoggingConfig>;
|
||||
private minLevel: number;
|
||||
|
||||
constructor(config: ILoggingConfig) {
|
||||
// Apply defaults for any missing config
|
||||
this.config = {
|
||||
level: config.level ?? 'info',
|
||||
format: config.format ?? 'text',
|
||||
enabled: config.enabled ?? true,
|
||||
};
|
||||
this.minLevel = LOG_LEVELS[this.config.level];
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a log level should be output
|
||||
*/
|
||||
private shouldLog(level: LogLevel): boolean {
|
||||
if (!this.config.enabled) {
|
||||
return false;
|
||||
}
|
||||
return LOG_LEVELS[level] <= this.minLevel;
|
||||
}
|
||||
|
||||
/**
|
||||
* Format a log message
|
||||
*/
|
||||
private format(level: LogLevel, message: string, meta?: Record<string, any>): string {
|
||||
const timestamp = new Date().toISOString();
|
||||
|
||||
if (this.config.format === 'json') {
|
||||
return JSON.stringify({
|
||||
timestamp,
|
||||
level,
|
||||
message,
|
||||
...(meta || {}),
|
||||
});
|
||||
}
|
||||
|
||||
// Text format
|
||||
const metaStr = meta ? ` ${JSON.stringify(meta)}` : '';
|
||||
return `[${timestamp}] ${level.toUpperCase()}: ${message}${metaStr}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Log at error level
|
||||
*/
|
||||
public error(message: string, meta?: Record<string, any>): void {
|
||||
if (this.shouldLog('error')) {
|
||||
console.error(this.format('error', message, meta));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Log at warn level
|
||||
*/
|
||||
public warn(message: string, meta?: Record<string, any>): void {
|
||||
if (this.shouldLog('warn')) {
|
||||
console.warn(this.format('warn', message, meta));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Log at info level
|
||||
*/
|
||||
public info(message: string, meta?: Record<string, any>): void {
|
||||
if (this.shouldLog('info')) {
|
||||
console.log(this.format('info', message, meta));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Log at debug level
|
||||
*/
|
||||
public debug(message: string, meta?: Record<string, any>): void {
|
||||
if (this.shouldLog('debug')) {
|
||||
console.log(this.format('debug', message, meta));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Log HTTP request
|
||||
*/
|
||||
public request(method: string, url: string, meta?: Record<string, any>): void {
|
||||
this.info(`→ ${method} ${url}`, meta);
|
||||
}
|
||||
|
||||
/**
|
||||
* Log HTTP response
|
||||
*/
|
||||
public response(method: string, url: string, statusCode: number, duration: number): void {
|
||||
const level: LogLevel = statusCode >= 500 ? 'error' : statusCode >= 400 ? 'warn' : 'info';
|
||||
|
||||
if (this.shouldLog(level)) {
|
||||
const message = `← ${method} ${url} - ${statusCode} (${duration}ms)`;
|
||||
|
||||
if (level === 'error') {
|
||||
this.error(message, { statusCode, duration });
|
||||
} else if (level === 'warn') {
|
||||
this.warn(message, { statusCode, duration });
|
||||
} else {
|
||||
this.info(message, { statusCode, duration });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Log S3 error
|
||||
*/
|
||||
public s3Error(code: string, message: string, status: number): void {
|
||||
this.error(`[S3Error] ${code}: ${message}`, { code, status });
|
||||
}
|
||||
}
|
||||
238
ts/classes/multipart-manager.ts
Normal file
238
ts/classes/multipart-manager.ts
Normal file
@@ -0,0 +1,238 @@
|
||||
import * as plugins from '../plugins.js';
|
||||
import { Readable } from 'stream';
|
||||
|
||||
/**
|
||||
* Multipart upload metadata
|
||||
*/
|
||||
export interface IMultipartUpload {
|
||||
uploadId: string;
|
||||
bucket: string;
|
||||
key: string;
|
||||
initiated: Date;
|
||||
parts: Map<number, IPartInfo>;
|
||||
metadata: Record<string, string>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Part information
|
||||
*/
|
||||
export interface IPartInfo {
|
||||
partNumber: number;
|
||||
etag: string;
|
||||
size: number;
|
||||
lastModified: Date;
|
||||
}
|
||||
|
||||
/**
|
||||
* Manages multipart upload state and storage
|
||||
*/
|
||||
export class MultipartUploadManager {
|
||||
private uploads: Map<string, IMultipartUpload> = new Map();
|
||||
private uploadDir: string;
|
||||
|
||||
constructor(private rootDir: string) {
|
||||
this.uploadDir = plugins.path.join(rootDir, '.multipart');
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize multipart uploads directory
|
||||
*/
|
||||
public async initialize(): Promise<void> {
|
||||
await plugins.smartfs.directory(this.uploadDir).recursive().create();
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a unique upload ID
|
||||
*/
|
||||
private generateUploadId(): string {
|
||||
return plugins.crypto.randomBytes(16).toString('hex');
|
||||
}
|
||||
|
||||
/**
|
||||
* Initiate a new multipart upload
|
||||
*/
|
||||
public async initiateUpload(
|
||||
bucket: string,
|
||||
key: string,
|
||||
metadata: Record<string, string>
|
||||
): Promise<string> {
|
||||
const uploadId = this.generateUploadId();
|
||||
|
||||
this.uploads.set(uploadId, {
|
||||
uploadId,
|
||||
bucket,
|
||||
key,
|
||||
initiated: new Date(),
|
||||
parts: new Map(),
|
||||
metadata,
|
||||
});
|
||||
|
||||
// Create directory for this upload's parts
|
||||
const uploadPath = plugins.path.join(this.uploadDir, uploadId);
|
||||
await plugins.smartfs.directory(uploadPath).recursive().create();
|
||||
|
||||
return uploadId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Upload a part
|
||||
*/
|
||||
public async uploadPart(
|
||||
uploadId: string,
|
||||
partNumber: number,
|
||||
stream: Readable
|
||||
): Promise<IPartInfo> {
|
||||
const upload = this.uploads.get(uploadId);
|
||||
if (!upload) {
|
||||
throw new Error('No such upload');
|
||||
}
|
||||
|
||||
const partPath = plugins.path.join(this.uploadDir, uploadId, `part-${partNumber}`);
|
||||
|
||||
// Write part to disk
|
||||
const webWriteStream = await plugins.smartfs.file(partPath).writeStream();
|
||||
const writer = webWriteStream.getWriter();
|
||||
|
||||
let size = 0;
|
||||
const hash = plugins.crypto.createHash('md5');
|
||||
|
||||
for await (const chunk of stream) {
|
||||
const buffer = Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk);
|
||||
await writer.write(new Uint8Array(buffer));
|
||||
hash.update(buffer);
|
||||
size += buffer.length;
|
||||
}
|
||||
|
||||
await writer.close();
|
||||
|
||||
const etag = hash.digest('hex');
|
||||
|
||||
const partInfo: IPartInfo = {
|
||||
partNumber,
|
||||
etag,
|
||||
size,
|
||||
lastModified: new Date(),
|
||||
};
|
||||
|
||||
upload.parts.set(partNumber, partInfo);
|
||||
|
||||
return partInfo;
|
||||
}
|
||||
|
||||
/**
|
||||
* Complete multipart upload - combine all parts
|
||||
*/
|
||||
public async completeUpload(
|
||||
uploadId: string,
|
||||
parts: Array<{ PartNumber: number; ETag: string }>
|
||||
): Promise<{ etag: string; size: number }> {
|
||||
const upload = this.uploads.get(uploadId);
|
||||
if (!upload) {
|
||||
throw new Error('No such upload');
|
||||
}
|
||||
|
||||
// Verify all parts are uploaded
|
||||
for (const part of parts) {
|
||||
const uploadedPart = upload.parts.get(part.PartNumber);
|
||||
if (!uploadedPart) {
|
||||
throw new Error(`Part ${part.PartNumber} not uploaded`);
|
||||
}
|
||||
// Normalize ETag format (remove quotes if present)
|
||||
const normalizedETag = part.ETag.replace(/"/g, '');
|
||||
if (uploadedPart.etag !== normalizedETag) {
|
||||
throw new Error(`Part ${part.PartNumber} ETag mismatch`);
|
||||
}
|
||||
}
|
||||
|
||||
// Sort parts by part number
|
||||
const sortedParts = parts.sort((a, b) => a.PartNumber - b.PartNumber);
|
||||
|
||||
// Combine parts into final object
|
||||
const finalPath = plugins.path.join(this.uploadDir, uploadId, 'final');
|
||||
const webWriteStream = await plugins.smartfs.file(finalPath).writeStream();
|
||||
const writer = webWriteStream.getWriter();
|
||||
|
||||
const hash = plugins.crypto.createHash('md5');
|
||||
let totalSize = 0;
|
||||
|
||||
for (const part of sortedParts) {
|
||||
const partPath = plugins.path.join(this.uploadDir, uploadId, `part-${part.PartNumber}`);
|
||||
|
||||
// Read part and write to final file
|
||||
const partContent = await plugins.smartfs.file(partPath).read();
|
||||
const buffer = Buffer.isBuffer(partContent) ? partContent : Buffer.from(partContent as string);
|
||||
|
||||
await writer.write(new Uint8Array(buffer));
|
||||
hash.update(buffer);
|
||||
totalSize += buffer.length;
|
||||
}
|
||||
|
||||
await writer.close();
|
||||
|
||||
const etag = hash.digest('hex');
|
||||
|
||||
return { etag, size: totalSize };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the final combined file path
|
||||
*/
|
||||
public getFinalPath(uploadId: string): string {
|
||||
return plugins.path.join(this.uploadDir, uploadId, 'final');
|
||||
}
|
||||
|
||||
/**
|
||||
* Get upload metadata
|
||||
*/
|
||||
public getUpload(uploadId: string): IMultipartUpload | undefined {
|
||||
return this.uploads.get(uploadId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Abort multipart upload - clean up parts
|
||||
*/
|
||||
public async abortUpload(uploadId: string): Promise<void> {
|
||||
const upload = this.uploads.get(uploadId);
|
||||
if (!upload) {
|
||||
throw new Error('No such upload');
|
||||
}
|
||||
|
||||
// Delete upload directory
|
||||
const uploadPath = plugins.path.join(this.uploadDir, uploadId);
|
||||
await plugins.smartfs.directory(uploadPath).recursive().delete();
|
||||
|
||||
// Remove from memory
|
||||
this.uploads.delete(uploadId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up upload after completion
|
||||
*/
|
||||
public async cleanupUpload(uploadId: string): Promise<void> {
|
||||
const uploadPath = plugins.path.join(this.uploadDir, uploadId);
|
||||
await plugins.smartfs.directory(uploadPath).recursive().delete();
|
||||
this.uploads.delete(uploadId);
|
||||
}
|
||||
|
||||
/**
|
||||
* List all in-progress uploads for a bucket
|
||||
*/
|
||||
public listUploads(bucket?: string): IMultipartUpload[] {
|
||||
const uploads = Array.from(this.uploads.values());
|
||||
if (bucket) {
|
||||
return uploads.filter((u) => u.bucket === bucket);
|
||||
}
|
||||
return uploads;
|
||||
}
|
||||
|
||||
/**
|
||||
* List parts for an upload
|
||||
*/
|
||||
public listParts(uploadId: string): IPartInfo[] {
|
||||
const upload = this.uploads.get(uploadId);
|
||||
if (!upload) {
|
||||
throw new Error('No such upload');
|
||||
}
|
||||
return Array.from(upload.parts.values()).sort((a, b) => a.partNumber - b.partNumber);
|
||||
}
|
||||
}
|
||||
@@ -4,9 +4,11 @@ import { MiddlewareStack } from './middleware-stack.js';
|
||||
import { S3Context } from './context.js';
|
||||
import { FilesystemStore } from './filesystem-store.js';
|
||||
import { S3Error } from './s3-error.js';
|
||||
import { Logger } from './logger.js';
|
||||
import { ServiceController } from '../controllers/service.controller.js';
|
||||
import { BucketController } from '../controllers/bucket.controller.js';
|
||||
import { ObjectController } from '../controllers/object.controller.js';
|
||||
import type { ISmarts3Config } from '../index.js';
|
||||
|
||||
export interface ISmarts3ServerOptions {
|
||||
port?: number;
|
||||
@@ -14,6 +16,7 @@ export interface ISmarts3ServerOptions {
|
||||
directory?: string;
|
||||
cleanSlate?: boolean;
|
||||
silent?: boolean;
|
||||
config?: Required<ISmarts3Config>;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -24,19 +27,58 @@ export class Smarts3Server {
|
||||
private httpServer?: plugins.http.Server;
|
||||
private router: S3Router;
|
||||
private middlewares: MiddlewareStack;
|
||||
private store: FilesystemStore;
|
||||
private options: Required<ISmarts3ServerOptions>;
|
||||
public store: FilesystemStore; // Made public for direct access from Smarts3 class
|
||||
private options: Required<Omit<ISmarts3ServerOptions, 'config'>>;
|
||||
private config: Required<ISmarts3Config>;
|
||||
private logger: Logger;
|
||||
|
||||
constructor(options: ISmarts3ServerOptions = {}) {
|
||||
this.options = {
|
||||
port: 3000,
|
||||
address: '0.0.0.0',
|
||||
directory: plugins.path.join(process.cwd(), '.nogit/bucketsDir'),
|
||||
cleanSlate: false,
|
||||
silent: false,
|
||||
...options,
|
||||
port: options.port ?? 3000,
|
||||
address: options.address ?? '0.0.0.0',
|
||||
directory: options.directory ?? plugins.path.join(process.cwd(), '.nogit/bucketsDir'),
|
||||
cleanSlate: options.cleanSlate ?? false,
|
||||
silent: options.silent ?? false,
|
||||
};
|
||||
|
||||
// Store config for middleware and feature configuration
|
||||
// If no config provided, create minimal default (for backward compatibility)
|
||||
this.config = options.config ?? {
|
||||
server: {
|
||||
port: this.options.port,
|
||||
address: this.options.address,
|
||||
silent: this.options.silent,
|
||||
},
|
||||
storage: {
|
||||
directory: this.options.directory,
|
||||
cleanSlate: this.options.cleanSlate,
|
||||
},
|
||||
auth: {
|
||||
enabled: false,
|
||||
credentials: [{ accessKeyId: 'S3RVER', secretAccessKey: 'S3RVER' }],
|
||||
},
|
||||
cors: {
|
||||
enabled: false,
|
||||
allowedOrigins: ['*'],
|
||||
allowedMethods: ['GET', 'POST', 'PUT', 'DELETE', 'HEAD', 'OPTIONS'],
|
||||
allowedHeaders: ['*'],
|
||||
exposedHeaders: ['ETag', 'x-amz-request-id', 'x-amz-version-id'],
|
||||
maxAge: 86400,
|
||||
allowCredentials: false,
|
||||
},
|
||||
logging: {
|
||||
level: 'info',
|
||||
format: 'text',
|
||||
enabled: true,
|
||||
},
|
||||
limits: {
|
||||
maxObjectSize: 5 * 1024 * 1024 * 1024,
|
||||
maxMetadataSize: 2048,
|
||||
requestTimeout: 300000,
|
||||
},
|
||||
};
|
||||
|
||||
this.logger = new Logger(this.config.logging);
|
||||
this.store = new FilesystemStore(this.options.directory);
|
||||
this.router = new S3Router();
|
||||
this.middlewares = new MiddlewareStack();
|
||||
@@ -49,20 +91,118 @@ export class Smarts3Server {
|
||||
* Setup middleware stack
|
||||
*/
|
||||
private setupMiddlewares(): void {
|
||||
// Logger middleware
|
||||
if (!this.options.silent) {
|
||||
// CORS middleware (must be first to handle preflight requests)
|
||||
if (this.config.cors.enabled) {
|
||||
this.middlewares.use(async (req, res, ctx, next) => {
|
||||
const start = Date.now();
|
||||
console.log(`→ ${req.method} ${req.url}`);
|
||||
console.log(` Headers:`, JSON.stringify(req.headers, null, 2).slice(0, 200));
|
||||
const origin = req.headers.origin || req.headers.referer;
|
||||
|
||||
// Check if origin is allowed
|
||||
const allowedOrigins = this.config.cors.allowedOrigins || ['*'];
|
||||
const isOriginAllowed =
|
||||
allowedOrigins.includes('*') ||
|
||||
(origin && allowedOrigins.includes(origin));
|
||||
|
||||
if (isOriginAllowed) {
|
||||
// Set CORS headers
|
||||
res.setHeader(
|
||||
'Access-Control-Allow-Origin',
|
||||
allowedOrigins.includes('*') ? '*' : origin || '*'
|
||||
);
|
||||
|
||||
if (this.config.cors.allowCredentials) {
|
||||
res.setHeader('Access-Control-Allow-Credentials', 'true');
|
||||
}
|
||||
|
||||
// Handle preflight OPTIONS request
|
||||
if (req.method === 'OPTIONS') {
|
||||
res.setHeader(
|
||||
'Access-Control-Allow-Methods',
|
||||
(this.config.cors.allowedMethods || []).join(', ')
|
||||
);
|
||||
res.setHeader(
|
||||
'Access-Control-Allow-Headers',
|
||||
(this.config.cors.allowedHeaders || []).join(', ')
|
||||
);
|
||||
if (this.config.cors.maxAge) {
|
||||
res.setHeader(
|
||||
'Access-Control-Max-Age',
|
||||
String(this.config.cors.maxAge)
|
||||
);
|
||||
}
|
||||
res.writeHead(204);
|
||||
res.end();
|
||||
return; // Don't call next() for OPTIONS
|
||||
}
|
||||
|
||||
// Set exposed headers for actual requests
|
||||
if (this.config.cors.exposedHeaders && this.config.cors.exposedHeaders.length > 0) {
|
||||
res.setHeader(
|
||||
'Access-Control-Expose-Headers',
|
||||
this.config.cors.exposedHeaders.join(', ')
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
await next();
|
||||
const duration = Date.now() - start;
|
||||
console.log(`← ${req.method} ${req.url} - ${res.statusCode} (${duration}ms)`);
|
||||
});
|
||||
}
|
||||
|
||||
// TODO: Add authentication middleware
|
||||
// TODO: Add CORS middleware
|
||||
// Authentication middleware (simple static credentials)
|
||||
if (this.config.auth.enabled) {
|
||||
this.middlewares.use(async (req, res, ctx, next) => {
|
||||
const authHeader = req.headers.authorization;
|
||||
|
||||
// Extract access key from Authorization header
|
||||
let accessKeyId: string | undefined;
|
||||
|
||||
if (authHeader) {
|
||||
// Support multiple auth formats:
|
||||
// 1. AWS accessKeyId:signature
|
||||
// 2. AWS4-HMAC-SHA256 Credential=accessKeyId/date/region/service/aws4_request, ...
|
||||
if (authHeader.startsWith('AWS ')) {
|
||||
accessKeyId = authHeader.substring(4).split(':')[0];
|
||||
} else if (authHeader.startsWith('AWS4-HMAC-SHA256')) {
|
||||
const credentialMatch = authHeader.match(/Credential=([^/]+)\//);
|
||||
accessKeyId = credentialMatch ? credentialMatch[1] : undefined;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if access key is valid
|
||||
const isValid = this.config.auth.credentials.some(
|
||||
(cred) => cred.accessKeyId === accessKeyId
|
||||
);
|
||||
|
||||
if (!isValid) {
|
||||
ctx.throw('AccessDenied', 'Access Denied');
|
||||
return;
|
||||
}
|
||||
|
||||
await next();
|
||||
});
|
||||
}
|
||||
|
||||
// Logger middleware
|
||||
if (!this.options.silent && this.config.logging.enabled) {
|
||||
this.middlewares.use(async (req, res, ctx, next) => {
|
||||
const start = Date.now();
|
||||
|
||||
// Log request
|
||||
this.logger.request(req.method || 'UNKNOWN', req.url || '/', {
|
||||
headers: req.headers,
|
||||
});
|
||||
|
||||
await next();
|
||||
|
||||
// Log response
|
||||
const duration = Date.now() - start;
|
||||
this.logger.response(
|
||||
req.method || 'UNKNOWN',
|
||||
req.url || '/',
|
||||
res.statusCode || 500,
|
||||
duration
|
||||
);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -122,11 +262,14 @@ export class Smarts3Server {
|
||||
): Promise<void> {
|
||||
const s3Error = err instanceof S3Error ? err : S3Error.fromError(err);
|
||||
|
||||
if (!this.options.silent) {
|
||||
console.error(`[S3Error] ${s3Error.code}: ${s3Error.message}`);
|
||||
if (s3Error.status >= 500) {
|
||||
console.error(err.stack || err);
|
||||
}
|
||||
// Log the error
|
||||
this.logger.s3Error(s3Error.code, s3Error.message, s3Error.status);
|
||||
|
||||
// Log stack trace for server errors
|
||||
if (s3Error.status >= 500) {
|
||||
this.logger.debug('Error stack trace', {
|
||||
stack: err.stack || err.toString(),
|
||||
});
|
||||
}
|
||||
|
||||
// Send error response
|
||||
@@ -155,7 +298,10 @@ export class Smarts3Server {
|
||||
// Create HTTP server
|
||||
this.httpServer = plugins.http.createServer((req, res) => {
|
||||
this.handleRequest(req, res).catch((err) => {
|
||||
console.error('Fatal error in request handler:', err);
|
||||
this.logger.error('Fatal error in request handler', {
|
||||
error: err.message,
|
||||
stack: err.stack,
|
||||
});
|
||||
if (!res.headersSent) {
|
||||
res.writeHead(500, { 'Content-Type': 'text/plain' });
|
||||
res.end('Internal Server Error');
|
||||
@@ -169,9 +315,7 @@ export class Smarts3Server {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
if (!this.options.silent) {
|
||||
console.log(`S3 server listening on ${this.options.address}:${this.options.port}`);
|
||||
}
|
||||
this.logger.info(`S3 server listening on ${this.options.address}:${this.options.port}`);
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
@@ -191,9 +335,7 @@ export class Smarts3Server {
|
||||
if (err) {
|
||||
reject(err);
|
||||
} else {
|
||||
if (!this.options.silent) {
|
||||
console.log('S3 server stopped');
|
||||
}
|
||||
this.logger.info('S3 server stopped');
|
||||
resolve();
|
||||
}
|
||||
});
|
||||
|
||||
182
ts/index.ts
182
ts/index.ts
@@ -2,39 +2,185 @@ import * as plugins from './plugins.js';
|
||||
import * as paths from './paths.js';
|
||||
import { Smarts3Server } from './classes/smarts3-server.js';
|
||||
|
||||
export interface ISmarts3ContructorOptions {
|
||||
/**
|
||||
* Authentication configuration
|
||||
*/
|
||||
export interface IAuthConfig {
|
||||
enabled: boolean;
|
||||
credentials: Array<{
|
||||
accessKeyId: string;
|
||||
secretAccessKey: string;
|
||||
}>;
|
||||
}
|
||||
|
||||
/**
|
||||
* CORS configuration
|
||||
*/
|
||||
export interface ICorsConfig {
|
||||
enabled: boolean;
|
||||
allowedOrigins?: string[];
|
||||
allowedMethods?: string[];
|
||||
allowedHeaders?: string[];
|
||||
exposedHeaders?: string[];
|
||||
maxAge?: number;
|
||||
allowCredentials?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Logging configuration
|
||||
*/
|
||||
export interface ILoggingConfig {
|
||||
level?: 'error' | 'warn' | 'info' | 'debug';
|
||||
format?: 'text' | 'json';
|
||||
enabled?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Request limits configuration
|
||||
*/
|
||||
export interface ILimitsConfig {
|
||||
maxObjectSize?: number;
|
||||
maxMetadataSize?: number;
|
||||
requestTimeout?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Server configuration
|
||||
*/
|
||||
export interface IServerConfig {
|
||||
port?: number;
|
||||
address?: string;
|
||||
silent?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Storage configuration
|
||||
*/
|
||||
export interface IStorageConfig {
|
||||
directory?: string;
|
||||
cleanSlate?: boolean;
|
||||
}
|
||||
|
||||
/**
|
||||
* Complete smarts3 configuration
|
||||
*/
|
||||
export interface ISmarts3Config {
|
||||
server?: IServerConfig;
|
||||
storage?: IStorageConfig;
|
||||
auth?: IAuthConfig;
|
||||
cors?: ICorsConfig;
|
||||
logging?: ILoggingConfig;
|
||||
limits?: ILimitsConfig;
|
||||
}
|
||||
|
||||
/**
|
||||
* Default configuration values
|
||||
*/
|
||||
const DEFAULT_CONFIG: ISmarts3Config = {
|
||||
server: {
|
||||
port: 3000,
|
||||
address: '0.0.0.0',
|
||||
silent: false,
|
||||
},
|
||||
storage: {
|
||||
directory: paths.bucketsDir,
|
||||
cleanSlate: false,
|
||||
},
|
||||
auth: {
|
||||
enabled: false,
|
||||
credentials: [
|
||||
{
|
||||
accessKeyId: 'S3RVER',
|
||||
secretAccessKey: 'S3RVER',
|
||||
},
|
||||
],
|
||||
},
|
||||
cors: {
|
||||
enabled: false,
|
||||
allowedOrigins: ['*'],
|
||||
allowedMethods: ['GET', 'POST', 'PUT', 'DELETE', 'HEAD', 'OPTIONS'],
|
||||
allowedHeaders: ['*'],
|
||||
exposedHeaders: ['ETag', 'x-amz-request-id', 'x-amz-version-id'],
|
||||
maxAge: 86400,
|
||||
allowCredentials: false,
|
||||
},
|
||||
logging: {
|
||||
level: 'info',
|
||||
format: 'text',
|
||||
enabled: true,
|
||||
},
|
||||
limits: {
|
||||
maxObjectSize: 5 * 1024 * 1024 * 1024, // 5GB
|
||||
maxMetadataSize: 2048,
|
||||
requestTimeout: 300000, // 5 minutes
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* Merge user config with defaults (deep merge)
|
||||
*/
|
||||
function mergeConfig(userConfig: ISmarts3Config): Required<ISmarts3Config> {
|
||||
return {
|
||||
server: {
|
||||
...DEFAULT_CONFIG.server!,
|
||||
...(userConfig.server || {}),
|
||||
},
|
||||
storage: {
|
||||
...DEFAULT_CONFIG.storage!,
|
||||
...(userConfig.storage || {}),
|
||||
},
|
||||
auth: {
|
||||
...DEFAULT_CONFIG.auth!,
|
||||
...(userConfig.auth || {}),
|
||||
},
|
||||
cors: {
|
||||
...DEFAULT_CONFIG.cors!,
|
||||
...(userConfig.cors || {}),
|
||||
},
|
||||
logging: {
|
||||
...DEFAULT_CONFIG.logging!,
|
||||
...(userConfig.logging || {}),
|
||||
},
|
||||
limits: {
|
||||
...DEFAULT_CONFIG.limits!,
|
||||
...(userConfig.limits || {}),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Main Smarts3 class - production-ready S3-compatible server
|
||||
*/
|
||||
export class Smarts3 {
|
||||
// STATIC
|
||||
public static async createAndStart(
|
||||
optionsArg: ConstructorParameters<typeof Smarts3>[0],
|
||||
) {
|
||||
const smartS3Instance = new Smarts3(optionsArg);
|
||||
public static async createAndStart(configArg: ISmarts3Config = {}) {
|
||||
const smartS3Instance = new Smarts3(configArg);
|
||||
await smartS3Instance.start();
|
||||
return smartS3Instance;
|
||||
}
|
||||
|
||||
// INSTANCE
|
||||
public options: ISmarts3ContructorOptions;
|
||||
public config: Required<ISmarts3Config>;
|
||||
public s3Instance: Smarts3Server;
|
||||
|
||||
constructor(optionsArg: ISmarts3ContructorOptions) {
|
||||
this.options = optionsArg;
|
||||
constructor(configArg: ISmarts3Config = {}) {
|
||||
this.config = mergeConfig(configArg);
|
||||
}
|
||||
|
||||
public async start() {
|
||||
this.s3Instance = new Smarts3Server({
|
||||
port: this.options.port || 3000,
|
||||
address: '0.0.0.0',
|
||||
directory: paths.bucketsDir,
|
||||
cleanSlate: this.options.cleanSlate || false,
|
||||
silent: false,
|
||||
port: this.config.server.port,
|
||||
address: this.config.server.address,
|
||||
directory: this.config.storage.directory,
|
||||
cleanSlate: this.config.storage.cleanSlate,
|
||||
silent: this.config.server.silent,
|
||||
config: this.config, // Pass full config to server
|
||||
});
|
||||
await this.s3Instance.start();
|
||||
console.log('s3 server is running');
|
||||
|
||||
if (!this.config.server.silent) {
|
||||
console.log('s3 server is running');
|
||||
}
|
||||
}
|
||||
|
||||
public async getS3Descriptor(
|
||||
@@ -48,11 +194,9 @@ export class Smarts3 {
|
||||
}
|
||||
|
||||
public async createBucket(bucketNameArg: string) {
|
||||
const smartbucketInstance = new plugins.smartbucket.SmartBucket(
|
||||
await this.getS3Descriptor(),
|
||||
);
|
||||
const bucket = await smartbucketInstance.createBucket(bucketNameArg);
|
||||
return bucket;
|
||||
// Call the filesystem store directly instead of using the client library
|
||||
await this.s3Instance.store.createBucket(bucketNameArg);
|
||||
return { name: bucketNameArg };
|
||||
}
|
||||
|
||||
public async stop() {
|
||||
|
||||
@@ -7,7 +7,6 @@ import * as url from 'url';
|
||||
export { path, http, crypto, url };
|
||||
|
||||
// @push.rocks scope
|
||||
import * as smartbucket from '@push.rocks/smartbucket';
|
||||
import { SmartFs, SmartFsProviderNode } from '@push.rocks/smartfs';
|
||||
import * as smartpath from '@push.rocks/smartpath';
|
||||
import { SmartXml } from '@push.rocks/smartxml';
|
||||
@@ -15,7 +14,7 @@ import { SmartXml } from '@push.rocks/smartxml';
|
||||
// Create SmartFs instance with Node.js provider
|
||||
export const smartfs = new SmartFs(new SmartFsProviderNode());
|
||||
|
||||
export { smartbucket, smartpath, SmartXml };
|
||||
export { smartpath, SmartXml };
|
||||
|
||||
// @tsclass scope
|
||||
import * as tsclass from '@tsclass/tsclass';
|
||||
|
||||
Reference in New Issue
Block a user