2025-11-19 15:32:00 +00:00
|
|
|
import * as qenv from '@push.rocks/qenv';
|
2025-11-19 20:45:37 +00:00
|
|
|
import * as crypto from 'crypto';
|
2025-11-25 14:28:19 +00:00
|
|
|
import * as smartarchive from '@push.rocks/smartarchive';
|
2025-11-27 12:41:38 +00:00
|
|
|
import * as smartbucket from '@push.rocks/smartbucket';
|
2025-11-19 20:45:37 +00:00
|
|
|
import { SmartRegistry } from '../../ts/classes.smartregistry.js';
|
2025-11-27 22:12:52 +00:00
|
|
|
import type { IRegistryConfig, IAuthToken, TRegistryProtocol } from '../../ts/core/interfaces.core.js';
|
|
|
|
|
import type { IAuthProvider, ITokenOptions } from '../../ts/core/interfaces.auth.js';
|
|
|
|
|
import type { IStorageHooks, IStorageHookContext, IBeforePutResult, IBeforeDeleteResult } from '../../ts/core/interfaces.storage.js';
|
2025-12-03 22:16:40 +00:00
|
|
|
import { StaticUpstreamProvider } from '../../ts/upstream/interfaces.upstream.js';
|
|
|
|
|
import type { IUpstreamProvider, IUpstreamResolutionContext, IProtocolUpstreamConfig } from '../../ts/upstream/interfaces.upstream.js';
|
2025-11-19 15:32:00 +00:00
|
|
|
|
|
|
|
|
const testQenv = new qenv.Qenv('./', './.nogit');
|
|
|
|
|
|
2025-11-27 12:41:38 +00:00
|
|
|
/**
|
|
|
|
|
* Clean up S3 bucket contents for a fresh test run
|
|
|
|
|
* @param prefix Optional prefix to delete (e.g., 'cargo/', 'npm/', 'composer/')
|
|
|
|
|
*/
|
|
|
|
|
/**
|
|
|
|
|
* Generate a unique test run ID for avoiding conflicts between test runs
|
|
|
|
|
* Uses timestamp + random suffix for uniqueness
|
|
|
|
|
*/
|
|
|
|
|
export function generateTestRunId(): string {
|
|
|
|
|
const timestamp = Date.now().toString(36);
|
|
|
|
|
const random = Math.random().toString(36).substring(2, 6);
|
|
|
|
|
return `${timestamp}${random}`;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
export async function cleanupS3Bucket(prefix?: string): Promise<void> {
|
|
|
|
|
const s3AccessKey = await testQenv.getEnvVarOnDemand('S3_ACCESSKEY');
|
|
|
|
|
const s3SecretKey = await testQenv.getEnvVarOnDemand('S3_SECRETKEY');
|
|
|
|
|
const s3Endpoint = await testQenv.getEnvVarOnDemand('S3_ENDPOINT');
|
|
|
|
|
const s3Port = await testQenv.getEnvVarOnDemand('S3_PORT');
|
|
|
|
|
|
|
|
|
|
const s3 = new smartbucket.SmartBucket({
|
|
|
|
|
accessKey: s3AccessKey || 'minioadmin',
|
|
|
|
|
accessSecret: s3SecretKey || 'minioadmin',
|
|
|
|
|
endpoint: s3Endpoint || 'localhost',
|
|
|
|
|
port: parseInt(s3Port || '9000', 10),
|
|
|
|
|
useSsl: false,
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
const bucket = await s3.getBucket('test-registry');
|
|
|
|
|
if (bucket) {
|
|
|
|
|
if (prefix) {
|
|
|
|
|
// Delete only objects with the given prefix
|
|
|
|
|
const files = await bucket.fastList({ prefix });
|
|
|
|
|
for (const file of files) {
|
|
|
|
|
await bucket.fastRemove({ path: file.name });
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
// Delete all objects in the bucket
|
|
|
|
|
const files = await bucket.fastList({});
|
|
|
|
|
for (const file of files) {
|
|
|
|
|
await bucket.fastRemove({ path: file.name });
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} catch (error) {
|
|
|
|
|
// Bucket might not exist yet, that's fine
|
|
|
|
|
console.log('Cleanup: No bucket to clean or error:', error);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-19 15:32:00 +00:00
|
|
|
/**
|
2025-11-21 17:13:06 +00:00
|
|
|
* Create a test SmartRegistry instance with all protocols enabled
|
2025-11-19 15:32:00 +00:00
|
|
|
*/
|
|
|
|
|
export async function createTestRegistry(): Promise<SmartRegistry> {
|
|
|
|
|
// Read S3 config from env.json
|
2025-11-19 20:45:37 +00:00
|
|
|
const s3AccessKey = await testQenv.getEnvVarOnDemand('S3_ACCESSKEY');
|
|
|
|
|
const s3SecretKey = await testQenv.getEnvVarOnDemand('S3_SECRETKEY');
|
2025-11-19 15:32:00 +00:00
|
|
|
const s3Endpoint = await testQenv.getEnvVarOnDemand('S3_ENDPOINT');
|
|
|
|
|
const s3Port = await testQenv.getEnvVarOnDemand('S3_PORT');
|
|
|
|
|
|
|
|
|
|
const config: IRegistryConfig = {
|
|
|
|
|
storage: {
|
|
|
|
|
accessKey: s3AccessKey || 'minioadmin',
|
|
|
|
|
accessSecret: s3SecretKey || 'minioadmin',
|
|
|
|
|
endpoint: s3Endpoint || 'localhost',
|
|
|
|
|
port: parseInt(s3Port || '9000', 10),
|
|
|
|
|
useSsl: false,
|
|
|
|
|
region: 'us-east-1',
|
|
|
|
|
bucketName: 'test-registry',
|
|
|
|
|
},
|
|
|
|
|
auth: {
|
|
|
|
|
jwtSecret: 'test-secret-key',
|
|
|
|
|
tokenStore: 'memory',
|
|
|
|
|
npmTokens: {
|
|
|
|
|
enabled: true,
|
|
|
|
|
},
|
|
|
|
|
ociTokens: {
|
|
|
|
|
enabled: true,
|
|
|
|
|
realm: 'https://auth.example.com/token',
|
|
|
|
|
service: 'test-registry',
|
|
|
|
|
},
|
2025-11-21 17:13:06 +00:00
|
|
|
pypiTokens: {
|
|
|
|
|
enabled: true,
|
|
|
|
|
},
|
|
|
|
|
rubygemsTokens: {
|
|
|
|
|
enabled: true,
|
|
|
|
|
},
|
2025-11-19 15:32:00 +00:00
|
|
|
},
|
|
|
|
|
oci: {
|
|
|
|
|
enabled: true,
|
|
|
|
|
basePath: '/oci',
|
|
|
|
|
},
|
|
|
|
|
npm: {
|
|
|
|
|
enabled: true,
|
|
|
|
|
basePath: '/npm',
|
|
|
|
|
},
|
2025-11-21 08:58:29 +00:00
|
|
|
maven: {
|
|
|
|
|
enabled: true,
|
|
|
|
|
basePath: '/maven',
|
|
|
|
|
},
|
2025-11-21 09:13:02 +00:00
|
|
|
composer: {
|
|
|
|
|
enabled: true,
|
|
|
|
|
basePath: '/composer',
|
|
|
|
|
},
|
2025-11-21 14:23:18 +00:00
|
|
|
cargo: {
|
|
|
|
|
enabled: true,
|
|
|
|
|
basePath: '/cargo',
|
|
|
|
|
},
|
2025-11-21 17:13:06 +00:00
|
|
|
pypi: {
|
|
|
|
|
enabled: true,
|
|
|
|
|
basePath: '/pypi',
|
|
|
|
|
},
|
|
|
|
|
rubygems: {
|
|
|
|
|
enabled: true,
|
|
|
|
|
basePath: '/rubygems',
|
|
|
|
|
},
|
2025-11-19 15:32:00 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
const registry = new SmartRegistry(config);
|
|
|
|
|
await registry.init();
|
|
|
|
|
|
|
|
|
|
return registry;
|
|
|
|
|
}
|
|
|
|
|
|
2025-12-03 22:16:40 +00:00
|
|
|
/**
|
|
|
|
|
* Create a test SmartRegistry instance with upstream provider configured
|
|
|
|
|
*/
|
|
|
|
|
export async function createTestRegistryWithUpstream(
|
|
|
|
|
upstreamProvider?: IUpstreamProvider
|
|
|
|
|
): Promise<SmartRegistry> {
|
|
|
|
|
// Read S3 config from env.json
|
|
|
|
|
const s3AccessKey = await testQenv.getEnvVarOnDemand('S3_ACCESSKEY');
|
|
|
|
|
const s3SecretKey = await testQenv.getEnvVarOnDemand('S3_SECRETKEY');
|
|
|
|
|
const s3Endpoint = await testQenv.getEnvVarOnDemand('S3_ENDPOINT');
|
|
|
|
|
const s3Port = await testQenv.getEnvVarOnDemand('S3_PORT');
|
|
|
|
|
|
|
|
|
|
// Default to StaticUpstreamProvider with npm.js configured
|
|
|
|
|
const defaultProvider = new StaticUpstreamProvider({
|
|
|
|
|
npm: {
|
|
|
|
|
enabled: true,
|
|
|
|
|
upstreams: [{ id: 'npmjs', url: 'https://registry.npmjs.org', priority: 1, enabled: true }],
|
|
|
|
|
},
|
|
|
|
|
oci: {
|
|
|
|
|
enabled: true,
|
|
|
|
|
upstreams: [{ id: 'dockerhub', url: 'https://registry-1.docker.io', priority: 1, enabled: true }],
|
|
|
|
|
},
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const config: IRegistryConfig = {
|
|
|
|
|
storage: {
|
|
|
|
|
accessKey: s3AccessKey || 'minioadmin',
|
|
|
|
|
accessSecret: s3SecretKey || 'minioadmin',
|
|
|
|
|
endpoint: s3Endpoint || 'localhost',
|
|
|
|
|
port: parseInt(s3Port || '9000', 10),
|
|
|
|
|
useSsl: false,
|
|
|
|
|
region: 'us-east-1',
|
|
|
|
|
bucketName: 'test-registry',
|
|
|
|
|
},
|
|
|
|
|
auth: {
|
|
|
|
|
jwtSecret: 'test-secret-key',
|
|
|
|
|
tokenStore: 'memory',
|
|
|
|
|
npmTokens: { enabled: true },
|
|
|
|
|
ociTokens: {
|
|
|
|
|
enabled: true,
|
|
|
|
|
realm: 'https://auth.example.com/token',
|
|
|
|
|
service: 'test-registry',
|
|
|
|
|
},
|
|
|
|
|
pypiTokens: { enabled: true },
|
|
|
|
|
rubygemsTokens: { enabled: true },
|
|
|
|
|
},
|
|
|
|
|
upstreamProvider: upstreamProvider || defaultProvider,
|
|
|
|
|
oci: { enabled: true, basePath: '/oci' },
|
|
|
|
|
npm: { enabled: true, basePath: '/npm' },
|
|
|
|
|
maven: { enabled: true, basePath: '/maven' },
|
|
|
|
|
composer: { enabled: true, basePath: '/composer' },
|
|
|
|
|
cargo: { enabled: true, basePath: '/cargo' },
|
|
|
|
|
pypi: { enabled: true, basePath: '/pypi' },
|
|
|
|
|
rubygems: { enabled: true, basePath: '/rubygems' },
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
const registry = new SmartRegistry(config);
|
|
|
|
|
await registry.init();
|
|
|
|
|
|
|
|
|
|
return registry;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Create a mock upstream provider that tracks all calls for testing
|
|
|
|
|
*/
|
|
|
|
|
export function createTrackingUpstreamProvider(
|
|
|
|
|
baseConfig?: Partial<Record<TRegistryProtocol, IProtocolUpstreamConfig>>
|
|
|
|
|
): {
|
|
|
|
|
provider: IUpstreamProvider;
|
|
|
|
|
calls: IUpstreamResolutionContext[];
|
|
|
|
|
} {
|
|
|
|
|
const calls: IUpstreamResolutionContext[] = [];
|
|
|
|
|
|
|
|
|
|
const provider: IUpstreamProvider = {
|
|
|
|
|
async resolveUpstreamConfig(context: IUpstreamResolutionContext) {
|
|
|
|
|
calls.push({ ...context });
|
|
|
|
|
return baseConfig?.[context.protocol] ?? null;
|
|
|
|
|
},
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
return { provider, calls };
|
|
|
|
|
}
|
|
|
|
|
|
2025-11-19 15:32:00 +00:00
|
|
|
/**
|
|
|
|
|
* Helper to create test authentication tokens
|
|
|
|
|
*/
|
|
|
|
|
export async function createTestTokens(registry: SmartRegistry) {
|
|
|
|
|
const authManager = registry.getAuthManager();
|
|
|
|
|
|
|
|
|
|
// Authenticate and create tokens
|
|
|
|
|
const userId = await authManager.authenticate({
|
|
|
|
|
username: 'testuser',
|
|
|
|
|
password: 'testpass',
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
if (!userId) {
|
|
|
|
|
throw new Error('Failed to authenticate test user');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
// Create NPM token
|
|
|
|
|
const npmToken = await authManager.createNpmToken(userId, false);
|
|
|
|
|
|
|
|
|
|
// Create OCI token with full access
|
|
|
|
|
const ociToken = await authManager.createOciToken(
|
|
|
|
|
userId,
|
|
|
|
|
['oci:repository:*:*'],
|
|
|
|
|
3600
|
|
|
|
|
);
|
|
|
|
|
|
2025-11-21 08:58:29 +00:00
|
|
|
// Create Maven token with full access
|
|
|
|
|
const mavenToken = await authManager.createMavenToken(userId, false);
|
|
|
|
|
|
2025-11-21 09:13:02 +00:00
|
|
|
// Create Composer token with full access
|
|
|
|
|
const composerToken = await authManager.createComposerToken(userId, false);
|
|
|
|
|
|
2025-11-21 14:23:18 +00:00
|
|
|
// Create Cargo token with full access
|
|
|
|
|
const cargoToken = await authManager.createCargoToken(userId, false);
|
|
|
|
|
|
2025-11-21 17:13:06 +00:00
|
|
|
// Create PyPI token with full access
|
|
|
|
|
const pypiToken = await authManager.createPypiToken(userId, false);
|
|
|
|
|
|
|
|
|
|
// Create RubyGems token with full access
|
|
|
|
|
const rubygemsToken = await authManager.createRubyGemsToken(userId, false);
|
|
|
|
|
|
|
|
|
|
return { npmToken, ociToken, mavenToken, composerToken, cargoToken, pypiToken, rubygemsToken, userId };
|
2025-11-19 15:32:00 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Helper to calculate SHA-256 digest in OCI format
|
|
|
|
|
*/
|
|
|
|
|
export function calculateDigest(data: Buffer): string {
|
|
|
|
|
const hash = crypto.createHash('sha256').update(data).digest('hex');
|
|
|
|
|
return `sha256:${hash}`;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Helper to create a minimal valid OCI manifest
|
|
|
|
|
*/
|
|
|
|
|
export function createTestManifest(configDigest: string, layerDigest: string) {
|
|
|
|
|
return {
|
|
|
|
|
schemaVersion: 2,
|
|
|
|
|
mediaType: 'application/vnd.oci.image.manifest.v1+json',
|
|
|
|
|
config: {
|
|
|
|
|
mediaType: 'application/vnd.oci.image.config.v1+json',
|
|
|
|
|
size: 123,
|
|
|
|
|
digest: configDigest,
|
|
|
|
|
},
|
|
|
|
|
layers: [
|
|
|
|
|
{
|
|
|
|
|
mediaType: 'application/vnd.oci.image.layer.v1.tar+gzip',
|
|
|
|
|
size: 456,
|
|
|
|
|
digest: layerDigest,
|
|
|
|
|
},
|
|
|
|
|
],
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Helper to create a minimal valid NPM packument
|
|
|
|
|
*/
|
|
|
|
|
export function createTestPackument(packageName: string, version: string, tarballData: Buffer) {
|
|
|
|
|
const shasum = crypto.createHash('sha1').update(tarballData).digest('hex');
|
|
|
|
|
const integrity = `sha512-${crypto.createHash('sha512').update(tarballData).digest('base64')}`;
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
name: packageName,
|
|
|
|
|
versions: {
|
|
|
|
|
[version]: {
|
|
|
|
|
name: packageName,
|
|
|
|
|
version: version,
|
|
|
|
|
description: 'Test package',
|
|
|
|
|
main: 'index.js',
|
|
|
|
|
scripts: {},
|
|
|
|
|
dist: {
|
|
|
|
|
shasum: shasum,
|
|
|
|
|
integrity: integrity,
|
|
|
|
|
tarball: `http://localhost:5000/npm/${packageName}/-/${packageName}-${version}.tgz`,
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
'dist-tags': {
|
|
|
|
|
latest: version,
|
|
|
|
|
},
|
|
|
|
|
_attachments: {
|
|
|
|
|
[`${packageName}-${version}.tgz`]: {
|
|
|
|
|
content_type: 'application/octet-stream',
|
|
|
|
|
data: tarballData.toString('base64'),
|
|
|
|
|
length: tarballData.length,
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
};
|
|
|
|
|
}
|
2025-11-21 08:58:29 +00:00
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Helper to create a minimal valid Maven POM file
|
|
|
|
|
*/
|
|
|
|
|
export function createTestPom(
|
|
|
|
|
groupId: string,
|
|
|
|
|
artifactId: string,
|
|
|
|
|
version: string,
|
|
|
|
|
packaging: string = 'jar'
|
|
|
|
|
): string {
|
|
|
|
|
return `<?xml version="1.0" encoding="UTF-8"?>
|
|
|
|
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
|
|
|
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
|
|
|
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
|
|
|
|
|
http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
|
|
|
|
<modelVersion>4.0.0</modelVersion>
|
|
|
|
|
<groupId>${groupId}</groupId>
|
|
|
|
|
<artifactId>${artifactId}</artifactId>
|
|
|
|
|
<version>${version}</version>
|
|
|
|
|
<packaging>${packaging}</packaging>
|
|
|
|
|
<name>${artifactId}</name>
|
|
|
|
|
<description>Test Maven artifact</description>
|
|
|
|
|
</project>`;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Helper to create a test JAR file (minimal ZIP with manifest)
|
|
|
|
|
*/
|
|
|
|
|
export function createTestJar(): Buffer {
|
|
|
|
|
// Create a simple JAR structure (just a manifest)
|
|
|
|
|
// In practice, this is a ZIP file with at least META-INF/MANIFEST.MF
|
|
|
|
|
const manifestContent = `Manifest-Version: 1.0
|
|
|
|
|
Created-By: SmartRegistry Test
|
|
|
|
|
`;
|
|
|
|
|
|
|
|
|
|
// For testing, we'll just create a buffer with dummy content
|
|
|
|
|
// Real JAR would be a proper ZIP archive
|
|
|
|
|
return Buffer.from(manifestContent, 'utf-8');
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Helper to calculate Maven checksums
|
|
|
|
|
*/
|
|
|
|
|
export function calculateMavenChecksums(data: Buffer) {
|
|
|
|
|
return {
|
|
|
|
|
md5: crypto.createHash('md5').update(data).digest('hex'),
|
|
|
|
|
sha1: crypto.createHash('sha1').update(data).digest('hex'),
|
|
|
|
|
sha256: crypto.createHash('sha256').update(data).digest('hex'),
|
|
|
|
|
sha512: crypto.createHash('sha512').update(data).digest('hex'),
|
|
|
|
|
};
|
|
|
|
|
}
|
2025-11-21 09:13:02 +00:00
|
|
|
|
|
|
|
|
/**
|
2025-11-25 14:28:19 +00:00
|
|
|
* Helper to create a Composer package ZIP using smartarchive
|
2025-11-21 09:13:02 +00:00
|
|
|
*/
|
|
|
|
|
export async function createComposerZip(
|
|
|
|
|
vendorPackage: string,
|
|
|
|
|
version: string,
|
|
|
|
|
options?: {
|
|
|
|
|
description?: string;
|
|
|
|
|
license?: string[];
|
|
|
|
|
authors?: Array<{ name: string; email?: string }>;
|
|
|
|
|
}
|
|
|
|
|
): Promise<Buffer> {
|
2025-11-25 14:28:19 +00:00
|
|
|
const zipTools = new smartarchive.ZipTools();
|
2025-11-21 09:13:02 +00:00
|
|
|
|
|
|
|
|
const composerJson = {
|
|
|
|
|
name: vendorPackage,
|
|
|
|
|
version: version,
|
|
|
|
|
type: 'library',
|
|
|
|
|
description: options?.description || 'Test Composer package',
|
|
|
|
|
license: options?.license || ['MIT'],
|
|
|
|
|
authors: options?.authors || [{ name: 'Test Author', email: 'test@example.com' }],
|
|
|
|
|
require: {
|
|
|
|
|
php: '>=7.4',
|
|
|
|
|
},
|
|
|
|
|
autoload: {
|
|
|
|
|
'psr-4': {
|
|
|
|
|
'Vendor\\TestPackage\\': 'src/',
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
// Add a test PHP file
|
|
|
|
|
const [vendor, pkg] = vendorPackage.split('/');
|
|
|
|
|
const namespace = `${vendor.charAt(0).toUpperCase() + vendor.slice(1)}\\${pkg.charAt(0).toUpperCase() + pkg.slice(1).replace(/-/g, '')}`;
|
|
|
|
|
const testPhpContent = `<?php
|
|
|
|
|
namespace ${namespace};
|
|
|
|
|
|
|
|
|
|
class TestClass
|
|
|
|
|
{
|
|
|
|
|
public function greet(): string
|
|
|
|
|
{
|
|
|
|
|
return "Hello from ${vendorPackage}!";
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
`;
|
|
|
|
|
|
2025-11-25 14:28:19 +00:00
|
|
|
const entries: smartarchive.IArchiveEntry[] = [
|
|
|
|
|
{
|
|
|
|
|
archivePath: 'composer.json',
|
|
|
|
|
content: Buffer.from(JSON.stringify(composerJson, null, 2), 'utf-8'),
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
archivePath: 'src/TestClass.php',
|
|
|
|
|
content: Buffer.from(testPhpContent, 'utf-8'),
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
archivePath: 'README.md',
|
|
|
|
|
content: Buffer.from(`# ${vendorPackage}\n\nTest package`, 'utf-8'),
|
|
|
|
|
},
|
|
|
|
|
];
|
2025-11-21 09:13:02 +00:00
|
|
|
|
2025-11-25 14:28:19 +00:00
|
|
|
return zipTools.createZip(entries);
|
2025-11-21 09:13:02 +00:00
|
|
|
}
|
2025-11-21 17:13:06 +00:00
|
|
|
|
|
|
|
|
/**
|
2025-11-25 14:28:19 +00:00
|
|
|
* Helper to create a test Python wheel file (minimal ZIP structure) using smartarchive
|
2025-11-21 17:13:06 +00:00
|
|
|
*/
|
|
|
|
|
export async function createPythonWheel(
|
|
|
|
|
packageName: string,
|
|
|
|
|
version: string,
|
|
|
|
|
pyVersion: string = 'py3'
|
|
|
|
|
): Promise<Buffer> {
|
2025-11-25 14:28:19 +00:00
|
|
|
const zipTools = new smartarchive.ZipTools();
|
2025-11-21 17:13:06 +00:00
|
|
|
|
|
|
|
|
const normalizedName = packageName.replace(/-/g, '_');
|
|
|
|
|
const distInfoDir = `${normalizedName}-${version}.dist-info`;
|
|
|
|
|
|
|
|
|
|
// Create METADATA file
|
|
|
|
|
const metadata = `Metadata-Version: 2.1
|
|
|
|
|
Name: ${packageName}
|
|
|
|
|
Version: ${version}
|
|
|
|
|
Summary: Test Python package
|
|
|
|
|
Home-page: https://example.com
|
|
|
|
|
Author: Test Author
|
|
|
|
|
Author-email: test@example.com
|
|
|
|
|
License: MIT
|
|
|
|
|
Platform: UNKNOWN
|
|
|
|
|
Classifier: Programming Language :: Python :: 3
|
|
|
|
|
Requires-Python: >=3.7
|
|
|
|
|
Description-Content-Type: text/markdown
|
|
|
|
|
|
|
|
|
|
# ${packageName}
|
|
|
|
|
|
|
|
|
|
Test package for SmartRegistry
|
|
|
|
|
`;
|
|
|
|
|
|
|
|
|
|
// Create WHEEL file
|
|
|
|
|
const wheelContent = `Wheel-Version: 1.0
|
|
|
|
|
Generator: test 1.0.0
|
|
|
|
|
Root-Is-Purelib: true
|
|
|
|
|
Tag: ${pyVersion}-none-any
|
|
|
|
|
`;
|
|
|
|
|
|
|
|
|
|
// Create a simple Python module
|
|
|
|
|
const moduleContent = `"""${packageName} module"""
|
|
|
|
|
|
|
|
|
|
__version__ = "${version}"
|
|
|
|
|
|
|
|
|
|
def hello():
|
|
|
|
|
return "Hello from ${packageName}!"
|
|
|
|
|
`;
|
|
|
|
|
|
2025-11-25 14:28:19 +00:00
|
|
|
const entries: smartarchive.IArchiveEntry[] = [
|
|
|
|
|
{
|
|
|
|
|
archivePath: `${distInfoDir}/METADATA`,
|
|
|
|
|
content: Buffer.from(metadata, 'utf-8'),
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
archivePath: `${distInfoDir}/WHEEL`,
|
|
|
|
|
content: Buffer.from(wheelContent, 'utf-8'),
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
archivePath: `${distInfoDir}/RECORD`,
|
|
|
|
|
content: Buffer.from('', 'utf-8'),
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
archivePath: `${distInfoDir}/top_level.txt`,
|
|
|
|
|
content: Buffer.from(normalizedName, 'utf-8'),
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
archivePath: `${normalizedName}/__init__.py`,
|
|
|
|
|
content: Buffer.from(moduleContent, 'utf-8'),
|
|
|
|
|
},
|
|
|
|
|
];
|
2025-11-21 17:13:06 +00:00
|
|
|
|
2025-11-25 14:28:19 +00:00
|
|
|
return zipTools.createZip(entries);
|
2025-11-21 17:13:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2025-11-25 14:28:19 +00:00
|
|
|
* Helper to create a test Python source distribution (sdist) using smartarchive
|
2025-11-21 17:13:06 +00:00
|
|
|
*/
|
|
|
|
|
export async function createPythonSdist(
|
|
|
|
|
packageName: string,
|
|
|
|
|
version: string
|
|
|
|
|
): Promise<Buffer> {
|
2025-11-25 14:28:19 +00:00
|
|
|
const tarTools = new smartarchive.TarTools();
|
2025-11-21 17:13:06 +00:00
|
|
|
|
|
|
|
|
const normalizedName = packageName.replace(/-/g, '_');
|
|
|
|
|
const dirPrefix = `${packageName}-${version}`;
|
|
|
|
|
|
|
|
|
|
// PKG-INFO
|
|
|
|
|
const pkgInfo = `Metadata-Version: 2.1
|
|
|
|
|
Name: ${packageName}
|
|
|
|
|
Version: ${version}
|
|
|
|
|
Summary: Test Python package
|
|
|
|
|
Home-page: https://example.com
|
|
|
|
|
Author: Test Author
|
|
|
|
|
Author-email: test@example.com
|
|
|
|
|
License: MIT
|
|
|
|
|
`;
|
|
|
|
|
|
|
|
|
|
// setup.py
|
|
|
|
|
const setupPy = `from setuptools import setup, find_packages
|
|
|
|
|
|
|
|
|
|
setup(
|
|
|
|
|
name="${packageName}",
|
|
|
|
|
version="${version}",
|
|
|
|
|
packages=find_packages(),
|
|
|
|
|
python_requires=">=3.7",
|
|
|
|
|
)
|
|
|
|
|
`;
|
|
|
|
|
|
|
|
|
|
// Module file
|
|
|
|
|
const moduleContent = `"""${packageName} module"""
|
|
|
|
|
|
|
|
|
|
__version__ = "${version}"
|
|
|
|
|
|
|
|
|
|
def hello():
|
|
|
|
|
return "Hello from ${packageName}!"
|
|
|
|
|
`;
|
|
|
|
|
|
2025-11-25 14:28:19 +00:00
|
|
|
const entries: smartarchive.IArchiveEntry[] = [
|
|
|
|
|
{
|
|
|
|
|
archivePath: `${dirPrefix}/PKG-INFO`,
|
|
|
|
|
content: Buffer.from(pkgInfo, 'utf-8'),
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
archivePath: `${dirPrefix}/setup.py`,
|
|
|
|
|
content: Buffer.from(setupPy, 'utf-8'),
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
archivePath: `${dirPrefix}/${normalizedName}/__init__.py`,
|
|
|
|
|
content: Buffer.from(moduleContent, 'utf-8'),
|
|
|
|
|
},
|
|
|
|
|
];
|
2025-11-21 17:13:06 +00:00
|
|
|
|
2025-11-25 14:28:19 +00:00
|
|
|
return tarTools.packFilesToTarGz(entries);
|
2025-11-21 17:13:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Helper to calculate PyPI file hashes
|
|
|
|
|
*/
|
|
|
|
|
export function calculatePypiHashes(data: Buffer) {
|
|
|
|
|
return {
|
|
|
|
|
md5: crypto.createHash('md5').update(data).digest('hex'),
|
|
|
|
|
sha256: crypto.createHash('sha256').update(data).digest('hex'),
|
|
|
|
|
blake2b: crypto.createHash('blake2b512').update(data).digest('hex'),
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
2025-11-25 14:28:19 +00:00
|
|
|
* Helper to create a test RubyGem file (minimal tar.gz structure) using smartarchive
|
2025-11-21 17:13:06 +00:00
|
|
|
*/
|
|
|
|
|
export async function createRubyGem(
|
|
|
|
|
gemName: string,
|
|
|
|
|
version: string,
|
|
|
|
|
platform: string = 'ruby'
|
|
|
|
|
): Promise<Buffer> {
|
2025-11-25 14:28:19 +00:00
|
|
|
const tarTools = new smartarchive.TarTools();
|
|
|
|
|
const gzipTools = new smartarchive.GzipTools();
|
2025-11-21 17:13:06 +00:00
|
|
|
|
|
|
|
|
// Create metadata.gz (simplified)
|
|
|
|
|
const metadataYaml = `--- !ruby/object:Gem::Specification
|
|
|
|
|
name: ${gemName}
|
|
|
|
|
version: !ruby/object:Gem::Version
|
|
|
|
|
version: ${version}
|
|
|
|
|
platform: ${platform}
|
|
|
|
|
authors:
|
|
|
|
|
- Test Author
|
|
|
|
|
autorequire:
|
|
|
|
|
bindir: bin
|
|
|
|
|
cert_chain: []
|
|
|
|
|
date: ${new Date().toISOString().split('T')[0]}
|
|
|
|
|
dependencies: []
|
|
|
|
|
description: Test RubyGem
|
|
|
|
|
email: test@example.com
|
|
|
|
|
executables: []
|
|
|
|
|
extensions: []
|
|
|
|
|
extra_rdoc_files: []
|
|
|
|
|
files:
|
|
|
|
|
- lib/${gemName}.rb
|
|
|
|
|
homepage: https://example.com
|
|
|
|
|
licenses:
|
|
|
|
|
- MIT
|
|
|
|
|
metadata: {}
|
|
|
|
|
post_install_message:
|
|
|
|
|
rdoc_options: []
|
|
|
|
|
require_paths:
|
|
|
|
|
- lib
|
|
|
|
|
required_ruby_version: !ruby/object:Gem::Requirement
|
|
|
|
|
requirements:
|
|
|
|
|
- - ">="
|
|
|
|
|
- !ruby/object:Gem::Version
|
|
|
|
|
version: '2.7'
|
|
|
|
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
|
|
|
|
requirements:
|
|
|
|
|
- - ">="
|
|
|
|
|
- !ruby/object:Gem::Version
|
|
|
|
|
version: '0'
|
|
|
|
|
requirements: []
|
|
|
|
|
rubygems_version: 3.0.0
|
|
|
|
|
signing_key:
|
|
|
|
|
specification_version: 4
|
|
|
|
|
summary: Test gem for SmartRegistry
|
|
|
|
|
test_files: []
|
|
|
|
|
`;
|
|
|
|
|
|
2025-11-25 14:28:19 +00:00
|
|
|
const metadataGz = await gzipTools.compress(Buffer.from(metadataYaml, 'utf-8'));
|
2025-11-21 17:13:06 +00:00
|
|
|
|
2025-11-25 14:28:19 +00:00
|
|
|
// Create data.tar.gz content
|
2025-11-21 17:13:06 +00:00
|
|
|
const libContent = `# ${gemName}
|
|
|
|
|
|
|
|
|
|
module ${gemName.charAt(0).toUpperCase() + gemName.slice(1).replace(/-/g, '')}
|
|
|
|
|
VERSION = "${version}"
|
|
|
|
|
|
|
|
|
|
def self.hello
|
|
|
|
|
"Hello from #{gemName}!"
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
`;
|
|
|
|
|
|
2025-11-25 14:28:19 +00:00
|
|
|
const dataEntries: smartarchive.IArchiveEntry[] = [
|
|
|
|
|
{
|
|
|
|
|
archivePath: `lib/${gemName}.rb`,
|
|
|
|
|
content: Buffer.from(libContent, 'utf-8'),
|
|
|
|
|
},
|
|
|
|
|
];
|
2025-11-21 17:13:06 +00:00
|
|
|
|
2025-11-25 14:28:19 +00:00
|
|
|
const dataTarGz = await tarTools.packFilesToTarGz(dataEntries);
|
2025-11-21 17:13:06 +00:00
|
|
|
|
2025-11-25 14:28:19 +00:00
|
|
|
// Create the outer gem (tar.gz containing metadata.gz and data.tar.gz)
|
|
|
|
|
const gemEntries: smartarchive.IArchiveEntry[] = [
|
|
|
|
|
{
|
|
|
|
|
archivePath: 'metadata.gz',
|
|
|
|
|
content: metadataGz,
|
|
|
|
|
},
|
|
|
|
|
{
|
|
|
|
|
archivePath: 'data.tar.gz',
|
|
|
|
|
content: dataTarGz,
|
|
|
|
|
},
|
|
|
|
|
];
|
2025-11-21 17:13:06 +00:00
|
|
|
|
2025-11-25 15:07:59 +00:00
|
|
|
// RubyGems .gem files are plain tar archives (NOT gzipped), containing metadata.gz and data.tar.gz
|
|
|
|
|
return tarTools.packFiles(gemEntries);
|
2025-11-21 17:13:06 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Helper to calculate RubyGems checksums
|
|
|
|
|
*/
|
|
|
|
|
export function calculateRubyGemsChecksums(data: Buffer) {
|
|
|
|
|
return {
|
|
|
|
|
md5: crypto.createHash('md5').update(data).digest('hex'),
|
|
|
|
|
sha256: crypto.createHash('sha256').update(data).digest('hex'),
|
|
|
|
|
};
|
|
|
|
|
}
|
2025-11-27 22:12:52 +00:00
|
|
|
|
|
|
|
|
// ============================================================================
|
|
|
|
|
// Enterprise Extensibility Test Helpers
|
|
|
|
|
// ============================================================================
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Create a mock auth provider for testing pluggable authentication.
|
|
|
|
|
* Allows customizing behavior for different test scenarios.
|
|
|
|
|
*/
|
|
|
|
|
export function createMockAuthProvider(overrides?: Partial<IAuthProvider>): IAuthProvider {
|
|
|
|
|
const tokens = new Map<string, IAuthToken>();
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
init: async () => {},
|
|
|
|
|
authenticate: async (credentials) => {
|
|
|
|
|
// Default: always authenticate successfully
|
|
|
|
|
return credentials.username;
|
|
|
|
|
},
|
|
|
|
|
validateToken: async (token, protocol) => {
|
|
|
|
|
const stored = tokens.get(token);
|
|
|
|
|
if (stored && (!protocol || stored.type === protocol)) {
|
|
|
|
|
return stored;
|
|
|
|
|
}
|
|
|
|
|
// Mock token for tests
|
|
|
|
|
if (token === 'valid-mock-token') {
|
|
|
|
|
return {
|
|
|
|
|
type: 'npm' as TRegistryProtocol,
|
|
|
|
|
userId: 'mock-user',
|
|
|
|
|
scopes: ['npm:*:*:*'],
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
return null;
|
|
|
|
|
},
|
|
|
|
|
createToken: async (userId, protocol, options) => {
|
|
|
|
|
const tokenId = `mock-${protocol}-${Date.now()}`;
|
|
|
|
|
const authToken: IAuthToken = {
|
|
|
|
|
type: protocol,
|
|
|
|
|
userId,
|
|
|
|
|
scopes: options?.scopes || [`${protocol}:*:*:*`],
|
|
|
|
|
readonly: options?.readonly,
|
|
|
|
|
expiresAt: options?.expiresIn ? new Date(Date.now() + options.expiresIn * 1000) : undefined,
|
|
|
|
|
};
|
|
|
|
|
tokens.set(tokenId, authToken);
|
|
|
|
|
return tokenId;
|
|
|
|
|
},
|
|
|
|
|
revokeToken: async (token) => {
|
|
|
|
|
tokens.delete(token);
|
|
|
|
|
},
|
|
|
|
|
authorize: async (token, resource, action) => {
|
|
|
|
|
if (!token) return false;
|
|
|
|
|
if (token.readonly && ['write', 'push', 'delete'].includes(action)) {
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
return true;
|
|
|
|
|
},
|
|
|
|
|
listUserTokens: async (userId) => {
|
|
|
|
|
const result: Array<{ key: string; readonly: boolean; created: string; protocol?: TRegistryProtocol }> = [];
|
|
|
|
|
for (const [key, token] of tokens.entries()) {
|
|
|
|
|
if (token.userId === userId) {
|
|
|
|
|
result.push({
|
|
|
|
|
key: `hash-${key.substring(0, 8)}`,
|
|
|
|
|
readonly: token.readonly || false,
|
|
|
|
|
created: new Date().toISOString(),
|
|
|
|
|
protocol: token.type,
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return result;
|
|
|
|
|
},
|
|
|
|
|
...overrides,
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Create test storage hooks that track all calls.
|
|
|
|
|
* Useful for verifying hook invocation order and parameters.
|
|
|
|
|
*/
|
|
|
|
|
export function createTrackingHooks(options?: {
|
|
|
|
|
beforePutAllowed?: boolean;
|
|
|
|
|
beforeDeleteAllowed?: boolean;
|
|
|
|
|
throwOnAfterPut?: boolean;
|
|
|
|
|
throwOnAfterGet?: boolean;
|
|
|
|
|
}): {
|
|
|
|
|
hooks: IStorageHooks;
|
|
|
|
|
calls: Array<{ method: string; context: IStorageHookContext; timestamp: number }>;
|
|
|
|
|
} {
|
|
|
|
|
const calls: Array<{ method: string; context: IStorageHookContext; timestamp: number }> = [];
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
calls,
|
|
|
|
|
hooks: {
|
|
|
|
|
beforePut: async (ctx) => {
|
|
|
|
|
calls.push({ method: 'beforePut', context: ctx, timestamp: Date.now() });
|
|
|
|
|
return {
|
|
|
|
|
allowed: options?.beforePutAllowed !== false,
|
|
|
|
|
reason: options?.beforePutAllowed === false ? 'Blocked by test' : undefined,
|
|
|
|
|
};
|
|
|
|
|
},
|
|
|
|
|
afterPut: async (ctx) => {
|
|
|
|
|
calls.push({ method: 'afterPut', context: ctx, timestamp: Date.now() });
|
|
|
|
|
if (options?.throwOnAfterPut) {
|
|
|
|
|
throw new Error('Test error in afterPut');
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
beforeDelete: async (ctx) => {
|
|
|
|
|
calls.push({ method: 'beforeDelete', context: ctx, timestamp: Date.now() });
|
|
|
|
|
return {
|
|
|
|
|
allowed: options?.beforeDeleteAllowed !== false,
|
|
|
|
|
reason: options?.beforeDeleteAllowed === false ? 'Blocked by test' : undefined,
|
|
|
|
|
};
|
|
|
|
|
},
|
|
|
|
|
afterDelete: async (ctx) => {
|
|
|
|
|
calls.push({ method: 'afterDelete', context: ctx, timestamp: Date.now() });
|
|
|
|
|
},
|
|
|
|
|
afterGet: async (ctx) => {
|
|
|
|
|
calls.push({ method: 'afterGet', context: ctx, timestamp: Date.now() });
|
|
|
|
|
if (options?.throwOnAfterGet) {
|
|
|
|
|
throw new Error('Test error in afterGet');
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Create a blocking storage hooks implementation for quota testing.
|
|
|
|
|
*/
|
|
|
|
|
export function createQuotaHooks(maxSizeBytes: number): {
|
|
|
|
|
hooks: IStorageHooks;
|
|
|
|
|
currentUsage: { bytes: number };
|
|
|
|
|
} {
|
|
|
|
|
const currentUsage = { bytes: 0 };
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
currentUsage,
|
|
|
|
|
hooks: {
|
|
|
|
|
beforePut: async (ctx) => {
|
|
|
|
|
const size = ctx.metadata?.size || 0;
|
|
|
|
|
if (currentUsage.bytes + size > maxSizeBytes) {
|
|
|
|
|
return { allowed: false, reason: `Quota exceeded: ${currentUsage.bytes + size} > ${maxSizeBytes}` };
|
|
|
|
|
}
|
|
|
|
|
return { allowed: true };
|
|
|
|
|
},
|
|
|
|
|
afterPut: async (ctx) => {
|
|
|
|
|
currentUsage.bytes += ctx.metadata?.size || 0;
|
|
|
|
|
},
|
|
|
|
|
afterDelete: async (ctx) => {
|
|
|
|
|
currentUsage.bytes -= ctx.metadata?.size || 0;
|
|
|
|
|
if (currentUsage.bytes < 0) currentUsage.bytes = 0;
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Create a SmartBucket storage backend for upstream cache testing.
|
|
|
|
|
*/
|
|
|
|
|
export async function createTestStorageBackend(): Promise<{
|
|
|
|
|
storage: {
|
|
|
|
|
getObject: (key: string) => Promise<Buffer | null>;
|
|
|
|
|
putObject: (key: string, data: Buffer) => Promise<void>;
|
|
|
|
|
deleteObject: (key: string) => Promise<void>;
|
|
|
|
|
listObjects: (prefix: string) => Promise<string[]>;
|
|
|
|
|
};
|
|
|
|
|
bucket: smartbucket.Bucket;
|
|
|
|
|
cleanup: () => Promise<void>;
|
|
|
|
|
}> {
|
|
|
|
|
const s3AccessKey = await testQenv.getEnvVarOnDemand('S3_ACCESSKEY');
|
|
|
|
|
const s3SecretKey = await testQenv.getEnvVarOnDemand('S3_SECRETKEY');
|
|
|
|
|
const s3Endpoint = await testQenv.getEnvVarOnDemand('S3_ENDPOINT');
|
|
|
|
|
const s3Port = await testQenv.getEnvVarOnDemand('S3_PORT');
|
|
|
|
|
|
|
|
|
|
const s3 = new smartbucket.SmartBucket({
|
|
|
|
|
accessKey: s3AccessKey || 'minioadmin',
|
|
|
|
|
accessSecret: s3SecretKey || 'minioadmin',
|
|
|
|
|
endpoint: s3Endpoint || 'localhost',
|
|
|
|
|
port: parseInt(s3Port || '9000', 10),
|
|
|
|
|
useSsl: false,
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const testRunId = generateTestRunId();
|
|
|
|
|
const bucketName = 'test-cache-' + testRunId.substring(0, 8);
|
|
|
|
|
const bucket = await s3.createBucket(bucketName);
|
|
|
|
|
|
|
|
|
|
const storage = {
|
|
|
|
|
getObject: async (key: string): Promise<Buffer | null> => {
|
|
|
|
|
try {
|
|
|
|
|
const file = await bucket.fastGet({ path: key });
|
|
|
|
|
if (!file) return null;
|
|
|
|
|
const stream = await file.createReadStream();
|
|
|
|
|
const chunks: Buffer[] = [];
|
|
|
|
|
for await (const chunk of stream) {
|
|
|
|
|
chunks.push(Buffer.from(chunk));
|
|
|
|
|
}
|
|
|
|
|
return Buffer.concat(chunks);
|
|
|
|
|
} catch {
|
|
|
|
|
return null;
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
putObject: async (key: string, data: Buffer): Promise<void> => {
|
|
|
|
|
await bucket.fastPut({ path: key, contents: data, overwrite: true });
|
|
|
|
|
},
|
|
|
|
|
deleteObject: async (key: string): Promise<void> => {
|
|
|
|
|
await bucket.fastRemove({ path: key });
|
|
|
|
|
},
|
|
|
|
|
listObjects: async (prefix: string): Promise<string[]> => {
|
|
|
|
|
const files = await bucket.fastList({ prefix });
|
|
|
|
|
return files.map(f => f.name);
|
|
|
|
|
},
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
const cleanup = async () => {
|
|
|
|
|
try {
|
|
|
|
|
const files = await bucket.fastList({});
|
|
|
|
|
for (const file of files) {
|
|
|
|
|
await bucket.fastRemove({ path: file.name });
|
|
|
|
|
}
|
|
|
|
|
await s3.removeBucket(bucketName);
|
|
|
|
|
} catch {
|
|
|
|
|
// Ignore cleanup errors
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
return { storage, bucket, cleanup };
|
|
|
|
|
}
|