feat(core): Add core registry infrastructure: storage, auth, upstream cache, and protocol handlers
This commit is contained in:
11
changelog.md
11
changelog.md
@@ -1,5 +1,16 @@
|
||||
# Changelog
|
||||
|
||||
## 2025-11-27 - 2.6.0 - feat(core)
|
||||
Add core registry infrastructure: storage, auth, upstream cache, and protocol handlers
|
||||
|
||||
- Introduce RegistryStorage: unified storage abstraction with hook support (before/after put/delete/get) and helpers for OCI, NPM, Maven, Cargo, Composer, PyPI, and RubyGems paths and operations
|
||||
- Add DefaultAuthProvider and AuthManager: in-memory token store, UUID tokens for package protocols, OCI JWT creation/validation, token lifecycle (create/validate/revoke) and authorization checking
|
||||
- Add SmartRegistry orchestrator to initialize and route requests to protocol handlers (OCI, NPM, Maven, Cargo, Composer, PyPI, RubyGems)
|
||||
- Implement upstream subsystem: UpstreamCache (in-memory + optional S3 persistence), BaseUpstream with multi-upstream routing, scope rules, retries, TTLs, stale-while-revalidate and negative caching
|
||||
- Add circuit breaker implementation for upstream resilience with exponential backoff and per-upstream breakers
|
||||
- Add protocol implementations and helpers: NpmRegistry/NpmUpstream (packument/tarball handling and tarball URL rewriting), PypiRegistry (PEP 503/691 support, uploads, metadata), MavenRegistry (artifact/metadata handling and checksum generation), CargoRegistry (sparse index, publish/download/yank)
|
||||
- Utility exports and helpers: buffer helpers, plugins aggregator, path helpers, and various protocol-specific helper modules
|
||||
|
||||
## 2025-11-27 - 2.5.0 - feat(pypi,rubygems)
|
||||
Add PyPI and RubyGems protocol implementations, upstream caching, and auth/storage improvements
|
||||
|
||||
|
||||
@@ -3,7 +3,9 @@ import * as crypto from 'crypto';
|
||||
import * as smartarchive from '@push.rocks/smartarchive';
|
||||
import * as smartbucket from '@push.rocks/smartbucket';
|
||||
import { SmartRegistry } from '../../ts/classes.smartregistry.js';
|
||||
import type { IRegistryConfig } from '../../ts/core/interfaces.core.js';
|
||||
import type { IRegistryConfig, IAuthToken, TRegistryProtocol } from '../../ts/core/interfaces.core.js';
|
||||
import type { IAuthProvider, ITokenOptions } from '../../ts/core/interfaces.auth.js';
|
||||
import type { IStorageHooks, IStorageHookContext, IBeforePutResult, IBeforeDeleteResult } from '../../ts/core/interfaces.storage.js';
|
||||
|
||||
const testQenv = new qenv.Qenv('./', './.nogit');
|
||||
|
||||
@@ -608,3 +610,228 @@ export function calculateRubyGemsChecksums(data: Buffer) {
|
||||
sha256: crypto.createHash('sha256').update(data).digest('hex'),
|
||||
};
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Enterprise Extensibility Test Helpers
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Create a mock auth provider for testing pluggable authentication.
|
||||
* Allows customizing behavior for different test scenarios.
|
||||
*/
|
||||
export function createMockAuthProvider(overrides?: Partial<IAuthProvider>): IAuthProvider {
|
||||
const tokens = new Map<string, IAuthToken>();
|
||||
|
||||
return {
|
||||
init: async () => {},
|
||||
authenticate: async (credentials) => {
|
||||
// Default: always authenticate successfully
|
||||
return credentials.username;
|
||||
},
|
||||
validateToken: async (token, protocol) => {
|
||||
const stored = tokens.get(token);
|
||||
if (stored && (!protocol || stored.type === protocol)) {
|
||||
return stored;
|
||||
}
|
||||
// Mock token for tests
|
||||
if (token === 'valid-mock-token') {
|
||||
return {
|
||||
type: 'npm' as TRegistryProtocol,
|
||||
userId: 'mock-user',
|
||||
scopes: ['npm:*:*:*'],
|
||||
};
|
||||
}
|
||||
return null;
|
||||
},
|
||||
createToken: async (userId, protocol, options) => {
|
||||
const tokenId = `mock-${protocol}-${Date.now()}`;
|
||||
const authToken: IAuthToken = {
|
||||
type: protocol,
|
||||
userId,
|
||||
scopes: options?.scopes || [`${protocol}:*:*:*`],
|
||||
readonly: options?.readonly,
|
||||
expiresAt: options?.expiresIn ? new Date(Date.now() + options.expiresIn * 1000) : undefined,
|
||||
};
|
||||
tokens.set(tokenId, authToken);
|
||||
return tokenId;
|
||||
},
|
||||
revokeToken: async (token) => {
|
||||
tokens.delete(token);
|
||||
},
|
||||
authorize: async (token, resource, action) => {
|
||||
if (!token) return false;
|
||||
if (token.readonly && ['write', 'push', 'delete'].includes(action)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
listUserTokens: async (userId) => {
|
||||
const result: Array<{ key: string; readonly: boolean; created: string; protocol?: TRegistryProtocol }> = [];
|
||||
for (const [key, token] of tokens.entries()) {
|
||||
if (token.userId === userId) {
|
||||
result.push({
|
||||
key: `hash-${key.substring(0, 8)}`,
|
||||
readonly: token.readonly || false,
|
||||
created: new Date().toISOString(),
|
||||
protocol: token.type,
|
||||
});
|
||||
}
|
||||
}
|
||||
return result;
|
||||
},
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create test storage hooks that track all calls.
|
||||
* Useful for verifying hook invocation order and parameters.
|
||||
*/
|
||||
export function createTrackingHooks(options?: {
|
||||
beforePutAllowed?: boolean;
|
||||
beforeDeleteAllowed?: boolean;
|
||||
throwOnAfterPut?: boolean;
|
||||
throwOnAfterGet?: boolean;
|
||||
}): {
|
||||
hooks: IStorageHooks;
|
||||
calls: Array<{ method: string; context: IStorageHookContext; timestamp: number }>;
|
||||
} {
|
||||
const calls: Array<{ method: string; context: IStorageHookContext; timestamp: number }> = [];
|
||||
|
||||
return {
|
||||
calls,
|
||||
hooks: {
|
||||
beforePut: async (ctx) => {
|
||||
calls.push({ method: 'beforePut', context: ctx, timestamp: Date.now() });
|
||||
return {
|
||||
allowed: options?.beforePutAllowed !== false,
|
||||
reason: options?.beforePutAllowed === false ? 'Blocked by test' : undefined,
|
||||
};
|
||||
},
|
||||
afterPut: async (ctx) => {
|
||||
calls.push({ method: 'afterPut', context: ctx, timestamp: Date.now() });
|
||||
if (options?.throwOnAfterPut) {
|
||||
throw new Error('Test error in afterPut');
|
||||
}
|
||||
},
|
||||
beforeDelete: async (ctx) => {
|
||||
calls.push({ method: 'beforeDelete', context: ctx, timestamp: Date.now() });
|
||||
return {
|
||||
allowed: options?.beforeDeleteAllowed !== false,
|
||||
reason: options?.beforeDeleteAllowed === false ? 'Blocked by test' : undefined,
|
||||
};
|
||||
},
|
||||
afterDelete: async (ctx) => {
|
||||
calls.push({ method: 'afterDelete', context: ctx, timestamp: Date.now() });
|
||||
},
|
||||
afterGet: async (ctx) => {
|
||||
calls.push({ method: 'afterGet', context: ctx, timestamp: Date.now() });
|
||||
if (options?.throwOnAfterGet) {
|
||||
throw new Error('Test error in afterGet');
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a blocking storage hooks implementation for quota testing.
|
||||
*/
|
||||
export function createQuotaHooks(maxSizeBytes: number): {
|
||||
hooks: IStorageHooks;
|
||||
currentUsage: { bytes: number };
|
||||
} {
|
||||
const currentUsage = { bytes: 0 };
|
||||
|
||||
return {
|
||||
currentUsage,
|
||||
hooks: {
|
||||
beforePut: async (ctx) => {
|
||||
const size = ctx.metadata?.size || 0;
|
||||
if (currentUsage.bytes + size > maxSizeBytes) {
|
||||
return { allowed: false, reason: `Quota exceeded: ${currentUsage.bytes + size} > ${maxSizeBytes}` };
|
||||
}
|
||||
return { allowed: true };
|
||||
},
|
||||
afterPut: async (ctx) => {
|
||||
currentUsage.bytes += ctx.metadata?.size || 0;
|
||||
},
|
||||
afterDelete: async (ctx) => {
|
||||
currentUsage.bytes -= ctx.metadata?.size || 0;
|
||||
if (currentUsage.bytes < 0) currentUsage.bytes = 0;
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a SmartBucket storage backend for upstream cache testing.
|
||||
*/
|
||||
export async function createTestStorageBackend(): Promise<{
|
||||
storage: {
|
||||
getObject: (key: string) => Promise<Buffer | null>;
|
||||
putObject: (key: string, data: Buffer) => Promise<void>;
|
||||
deleteObject: (key: string) => Promise<void>;
|
||||
listObjects: (prefix: string) => Promise<string[]>;
|
||||
};
|
||||
bucket: smartbucket.Bucket;
|
||||
cleanup: () => Promise<void>;
|
||||
}> {
|
||||
const s3AccessKey = await testQenv.getEnvVarOnDemand('S3_ACCESSKEY');
|
||||
const s3SecretKey = await testQenv.getEnvVarOnDemand('S3_SECRETKEY');
|
||||
const s3Endpoint = await testQenv.getEnvVarOnDemand('S3_ENDPOINT');
|
||||
const s3Port = await testQenv.getEnvVarOnDemand('S3_PORT');
|
||||
|
||||
const s3 = new smartbucket.SmartBucket({
|
||||
accessKey: s3AccessKey || 'minioadmin',
|
||||
accessSecret: s3SecretKey || 'minioadmin',
|
||||
endpoint: s3Endpoint || 'localhost',
|
||||
port: parseInt(s3Port || '9000', 10),
|
||||
useSsl: false,
|
||||
});
|
||||
|
||||
const testRunId = generateTestRunId();
|
||||
const bucketName = 'test-cache-' + testRunId.substring(0, 8);
|
||||
const bucket = await s3.createBucket(bucketName);
|
||||
|
||||
const storage = {
|
||||
getObject: async (key: string): Promise<Buffer | null> => {
|
||||
try {
|
||||
const file = await bucket.fastGet({ path: key });
|
||||
if (!file) return null;
|
||||
const stream = await file.createReadStream();
|
||||
const chunks: Buffer[] = [];
|
||||
for await (const chunk of stream) {
|
||||
chunks.push(Buffer.from(chunk));
|
||||
}
|
||||
return Buffer.concat(chunks);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
putObject: async (key: string, data: Buffer): Promise<void> => {
|
||||
await bucket.fastPut({ path: key, contents: data, overwrite: true });
|
||||
},
|
||||
deleteObject: async (key: string): Promise<void> => {
|
||||
await bucket.fastRemove({ path: key });
|
||||
},
|
||||
listObjects: async (prefix: string): Promise<string[]> => {
|
||||
const files = await bucket.fastList({ prefix });
|
||||
return files.map(f => f.name);
|
||||
},
|
||||
};
|
||||
|
||||
const cleanup = async () => {
|
||||
try {
|
||||
const files = await bucket.fastList({});
|
||||
for (const file of files) {
|
||||
await bucket.fastRemove({ path: file.name });
|
||||
}
|
||||
await s3.removeBucket(bucketName);
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
};
|
||||
|
||||
return { storage, bucket, cleanup };
|
||||
}
|
||||
|
||||
412
test/test.auth.provider.ts
Normal file
412
test/test.auth.provider.ts
Normal file
@@ -0,0 +1,412 @@
|
||||
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||
import { DefaultAuthProvider } from '../ts/core/classes.defaultauthprovider.js';
|
||||
import { AuthManager } from '../ts/core/classes.authmanager.js';
|
||||
import type { IAuthProvider } from '../ts/core/interfaces.auth.js';
|
||||
import type { IAuthConfig, IAuthToken } from '../ts/core/interfaces.core.js';
|
||||
import { createMockAuthProvider } from './helpers/registry.js';
|
||||
|
||||
// ============================================================================
|
||||
// Test State
|
||||
// ============================================================================
|
||||
|
||||
let provider: DefaultAuthProvider;
|
||||
let authConfig: IAuthConfig;
|
||||
|
||||
// ============================================================================
|
||||
// Setup
|
||||
// ============================================================================
|
||||
|
||||
tap.test('setup: should create DefaultAuthProvider', async () => {
|
||||
authConfig = {
|
||||
jwtSecret: 'test-secret-key-for-jwt-signing',
|
||||
tokenStore: 'memory',
|
||||
npmTokens: { enabled: true },
|
||||
ociTokens: {
|
||||
enabled: true,
|
||||
realm: 'https://auth.example.com/token',
|
||||
service: 'test-registry',
|
||||
},
|
||||
mavenTokens: { enabled: true },
|
||||
cargoTokens: { enabled: true },
|
||||
composerTokens: { enabled: true },
|
||||
pypiTokens: { enabled: true },
|
||||
rubygemsTokens: { enabled: true },
|
||||
};
|
||||
|
||||
provider = new DefaultAuthProvider(authConfig);
|
||||
await provider.init();
|
||||
expect(provider).toBeInstanceOf(DefaultAuthProvider);
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Authentication Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('authenticate: should authenticate new user (auto-registration)', async () => {
|
||||
const userId = await provider.authenticate({
|
||||
username: 'newuser',
|
||||
password: 'newpassword',
|
||||
});
|
||||
|
||||
expect(userId).toEqual('newuser');
|
||||
});
|
||||
|
||||
tap.test('authenticate: should authenticate existing user with correct password', async () => {
|
||||
// First registration
|
||||
await provider.authenticate({
|
||||
username: 'existinguser',
|
||||
password: 'correctpass',
|
||||
});
|
||||
|
||||
// Second authentication with same credentials
|
||||
const userId = await provider.authenticate({
|
||||
username: 'existinguser',
|
||||
password: 'correctpass',
|
||||
});
|
||||
|
||||
expect(userId).toEqual('existinguser');
|
||||
});
|
||||
|
||||
tap.test('authenticate: should reject authentication with wrong password', async () => {
|
||||
// First registration
|
||||
await provider.authenticate({
|
||||
username: 'passworduser',
|
||||
password: 'originalpass',
|
||||
});
|
||||
|
||||
// Attempt with wrong password
|
||||
const userId = await provider.authenticate({
|
||||
username: 'passworduser',
|
||||
password: 'wrongpass',
|
||||
});
|
||||
|
||||
expect(userId).toBeNull();
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Token Creation Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('createToken: should create NPM token with correct scopes', async () => {
|
||||
const token = await provider.createToken('testuser', 'npm', {
|
||||
scopes: ['npm:package:*:*'],
|
||||
});
|
||||
|
||||
expect(token).toBeTruthy();
|
||||
expect(typeof token).toEqual('string');
|
||||
|
||||
// Validate the token
|
||||
const validated = await provider.validateToken(token, 'npm');
|
||||
expect(validated).toBeTruthy();
|
||||
expect(validated!.type).toEqual('npm');
|
||||
expect(validated!.userId).toEqual('testuser');
|
||||
expect(validated!.scopes).toContain('npm:package:*:*');
|
||||
});
|
||||
|
||||
tap.test('createToken: should create Maven token', async () => {
|
||||
const token = await provider.createToken('mavenuser', 'maven', {
|
||||
readonly: true,
|
||||
});
|
||||
|
||||
expect(token).toBeTruthy();
|
||||
|
||||
const validated = await provider.validateToken(token, 'maven');
|
||||
expect(validated).toBeTruthy();
|
||||
expect(validated!.type).toEqual('maven');
|
||||
expect(validated!.readonly).toBeTrue();
|
||||
});
|
||||
|
||||
tap.test('createToken: should create OCI JWT token with correct claims', async () => {
|
||||
const token = await provider.createToken('ociuser', 'oci', {
|
||||
scopes: ['oci:repository:myrepo:push', 'oci:repository:myrepo:pull'],
|
||||
expiresIn: 3600,
|
||||
});
|
||||
|
||||
expect(token).toBeTruthy();
|
||||
// OCI tokens are JWTs (contain dots)
|
||||
expect(token.split('.').length).toEqual(3);
|
||||
|
||||
const validated = await provider.validateToken(token, 'oci');
|
||||
expect(validated).toBeTruthy();
|
||||
expect(validated!.type).toEqual('oci');
|
||||
expect(validated!.userId).toEqual('ociuser');
|
||||
expect(validated!.scopes.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
tap.test('createToken: should create token with expiration', async () => {
|
||||
const token = await provider.createToken('expiryuser', 'npm', {
|
||||
expiresIn: 60, // 60 seconds
|
||||
});
|
||||
|
||||
const validated = await provider.validateToken(token, 'npm');
|
||||
expect(validated).toBeTruthy();
|
||||
expect(validated!.expiresAt).toBeTruthy();
|
||||
expect(validated!.expiresAt!.getTime()).toBeGreaterThan(Date.now());
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Token Validation Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('validateToken: should validate UUID token (NPM, Maven, etc.)', async () => {
|
||||
const npmToken = await provider.createToken('validateuser', 'npm');
|
||||
const validated = await provider.validateToken(npmToken);
|
||||
|
||||
expect(validated).toBeTruthy();
|
||||
expect(validated!.type).toEqual('npm');
|
||||
expect(validated!.userId).toEqual('validateuser');
|
||||
});
|
||||
|
||||
tap.test('validateToken: should validate OCI JWT token', async () => {
|
||||
const ociToken = await provider.createToken('ocivalidate', 'oci', {
|
||||
scopes: ['oci:repository:*:*'],
|
||||
});
|
||||
|
||||
const validated = await provider.validateToken(ociToken, 'oci');
|
||||
|
||||
expect(validated).toBeTruthy();
|
||||
expect(validated!.type).toEqual('oci');
|
||||
expect(validated!.userId).toEqual('ocivalidate');
|
||||
});
|
||||
|
||||
tap.test('validateToken: should reject expired tokens', async () => {
|
||||
const token = await provider.createToken('expireduser', 'npm', {
|
||||
expiresIn: -1, // Already expired (in the past)
|
||||
});
|
||||
|
||||
// The token should be created but will fail validation due to expiry
|
||||
const validated = await provider.validateToken(token, 'npm');
|
||||
|
||||
// Token should be rejected because it's expired
|
||||
expect(validated).toBeNull();
|
||||
});
|
||||
|
||||
tap.test('validateToken: should reject invalid token', async () => {
|
||||
const validated = await provider.validateToken('invalid-random-token');
|
||||
expect(validated).toBeNull();
|
||||
});
|
||||
|
||||
tap.test('validateToken: should reject token with wrong protocol', async () => {
|
||||
const npmToken = await provider.createToken('protocoluser', 'npm');
|
||||
|
||||
// Try to validate as Maven token
|
||||
const validated = await provider.validateToken(npmToken, 'maven');
|
||||
expect(validated).toBeNull();
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Token Revocation Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('revokeToken: should revoke tokens', async () => {
|
||||
const token = await provider.createToken('revokeuser', 'npm');
|
||||
|
||||
// Verify token works before revocation
|
||||
let validated = await provider.validateToken(token);
|
||||
expect(validated).toBeTruthy();
|
||||
|
||||
// Revoke the token
|
||||
await provider.revokeToken(token);
|
||||
|
||||
// Token should no longer be valid
|
||||
validated = await provider.validateToken(token);
|
||||
expect(validated).toBeNull();
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Authorization Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('authorize: should authorize read actions for readonly tokens', async () => {
|
||||
const token = await provider.createToken('readonlyuser', 'npm', {
|
||||
readonly: true,
|
||||
scopes: ['npm:package:*:read'],
|
||||
});
|
||||
|
||||
const validated = await provider.validateToken(token);
|
||||
|
||||
const canRead = await provider.authorize(validated, 'npm:package:lodash', 'read');
|
||||
expect(canRead).toBeTrue();
|
||||
|
||||
const canPull = await provider.authorize(validated, 'npm:package:lodash', 'pull');
|
||||
expect(canPull).toBeTrue();
|
||||
});
|
||||
|
||||
tap.test('authorize: should deny write actions for readonly tokens', async () => {
|
||||
const token = await provider.createToken('readonlyuser2', 'npm', {
|
||||
readonly: true,
|
||||
scopes: ['npm:package:*:*'],
|
||||
});
|
||||
|
||||
const validated = await provider.validateToken(token);
|
||||
|
||||
const canWrite = await provider.authorize(validated, 'npm:package:lodash', 'write');
|
||||
expect(canWrite).toBeFalse();
|
||||
|
||||
const canPush = await provider.authorize(validated, 'npm:package:lodash', 'push');
|
||||
expect(canPush).toBeFalse();
|
||||
|
||||
const canDelete = await provider.authorize(validated, 'npm:package:lodash', 'delete');
|
||||
expect(canDelete).toBeFalse();
|
||||
});
|
||||
|
||||
tap.test('authorize: should match scopes with wildcards', async () => {
|
||||
// The scope system uses literal * as wildcard, not glob patterns
|
||||
// npm:*:*:* means "all types, all names, all actions under npm"
|
||||
const token = await provider.createToken('wildcarduser', 'npm', {
|
||||
scopes: ['npm:*:*:*'],
|
||||
});
|
||||
|
||||
const validated = await provider.validateToken(token);
|
||||
|
||||
// Should match any npm resource with full wildcard scope
|
||||
const canAccessAnyPackage = await provider.authorize(validated, 'npm:package:lodash', 'read');
|
||||
expect(canAccessAnyPackage).toBeTrue();
|
||||
|
||||
const canAccessScopedPackage = await provider.authorize(validated, 'npm:package:@myorg/foo', 'write');
|
||||
expect(canAccessScopedPackage).toBeTrue();
|
||||
});
|
||||
|
||||
tap.test('authorize: should deny access with null token', async () => {
|
||||
const canAccess = await provider.authorize(null, 'npm:package:lodash', 'read');
|
||||
expect(canAccess).toBeFalse();
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// List Tokens Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('listUserTokens: should list user tokens', async () => {
|
||||
// Create multiple tokens for the same user
|
||||
const userId = 'listtokenuser';
|
||||
await provider.createToken(userId, 'npm');
|
||||
await provider.createToken(userId, 'maven', { readonly: true });
|
||||
await provider.createToken(userId, 'cargo');
|
||||
|
||||
const tokens = await provider.listUserTokens!(userId);
|
||||
|
||||
expect(tokens.length).toBeGreaterThanOrEqual(3);
|
||||
|
||||
// Check that tokens have expected properties
|
||||
for (const token of tokens) {
|
||||
expect(token.key).toBeTruthy();
|
||||
expect(typeof token.readonly).toEqual('boolean');
|
||||
expect(token.created).toBeTruthy();
|
||||
}
|
||||
|
||||
// Verify we have different protocols
|
||||
const protocols = tokens.map(t => t.protocol);
|
||||
expect(protocols).toContain('npm');
|
||||
expect(protocols).toContain('maven');
|
||||
expect(protocols).toContain('cargo');
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// AuthManager Integration Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('AuthManager: should accept custom IAuthProvider', async () => {
|
||||
const mockProvider = createMockAuthProvider({
|
||||
authenticate: async (credentials) => {
|
||||
if (credentials.username === 'custom' && credentials.password === 'pass') {
|
||||
return 'custom-user-id';
|
||||
}
|
||||
return null;
|
||||
},
|
||||
});
|
||||
|
||||
const manager = new AuthManager(authConfig, mockProvider);
|
||||
await manager.init();
|
||||
|
||||
// Use the custom provider
|
||||
const userId = await manager.authenticate({
|
||||
username: 'custom',
|
||||
password: 'pass',
|
||||
});
|
||||
|
||||
expect(userId).toEqual('custom-user-id');
|
||||
|
||||
// Wrong credentials should fail
|
||||
const failed = await manager.authenticate({
|
||||
username: 'custom',
|
||||
password: 'wrong',
|
||||
});
|
||||
|
||||
expect(failed).toBeNull();
|
||||
});
|
||||
|
||||
tap.test('AuthManager: should use default provider when none specified', async () => {
|
||||
const manager = new AuthManager(authConfig);
|
||||
await manager.init();
|
||||
|
||||
// Should use DefaultAuthProvider internally
|
||||
const userId = await manager.authenticate({
|
||||
username: 'defaultuser',
|
||||
password: 'defaultpass',
|
||||
});
|
||||
|
||||
expect(userId).toEqual('defaultuser');
|
||||
});
|
||||
|
||||
tap.test('AuthManager: should delegate token creation to provider', async () => {
|
||||
let tokenCreated = false;
|
||||
const mockProvider = createMockAuthProvider({
|
||||
createToken: async (userId, protocol, options) => {
|
||||
tokenCreated = true;
|
||||
return `mock-token-${protocol}-${userId}`;
|
||||
},
|
||||
});
|
||||
|
||||
const manager = new AuthManager(authConfig, mockProvider);
|
||||
await manager.init();
|
||||
|
||||
const token = await manager.createNpmToken('delegateuser', false);
|
||||
|
||||
expect(tokenCreated).toBeTrue();
|
||||
expect(token).toContain('mock-token-npm');
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Edge Cases
|
||||
// ============================================================================
|
||||
|
||||
tap.test('edge: should handle concurrent token operations', async () => {
|
||||
const promises: Promise<string>[] = [];
|
||||
|
||||
// Create 10 tokens concurrently
|
||||
for (let i = 0; i < 10; i++) {
|
||||
promises.push(provider.createToken(`concurrent-user-${i}`, 'npm'));
|
||||
}
|
||||
|
||||
const tokens = await Promise.all(promises);
|
||||
|
||||
// All tokens should be unique
|
||||
const uniqueTokens = new Set(tokens);
|
||||
expect(uniqueTokens.size).toEqual(10);
|
||||
|
||||
// All tokens should be valid
|
||||
for (const token of tokens) {
|
||||
const validated = await provider.validateToken(token);
|
||||
expect(validated).toBeTruthy();
|
||||
}
|
||||
});
|
||||
|
||||
tap.test('edge: should handle empty scopes', async () => {
|
||||
const token = await provider.createToken('emptyuser', 'npm', {
|
||||
scopes: [],
|
||||
});
|
||||
|
||||
const validated = await provider.validateToken(token);
|
||||
expect(validated).toBeTruthy();
|
||||
// Even with empty scopes, token should be valid
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Cleanup
|
||||
// ============================================================================
|
||||
|
||||
tap.test('cleanup', async () => {
|
||||
// No cleanup needed for in-memory provider
|
||||
});
|
||||
|
||||
export default tap.start();
|
||||
506
test/test.storage.hooks.ts
Normal file
506
test/test.storage.hooks.ts
Normal file
@@ -0,0 +1,506 @@
|
||||
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
import { RegistryStorage } from '../ts/core/classes.registrystorage.js';
|
||||
import type { IStorageConfig } from '../ts/core/interfaces.core.js';
|
||||
import type { IStorageHooks, IStorageHookContext } from '../ts/core/interfaces.storage.js';
|
||||
import { createTrackingHooks, createQuotaHooks, generateTestRunId } from './helpers/registry.js';
|
||||
|
||||
const testQenv = new qenv.Qenv('./', './.nogit');
|
||||
|
||||
// ============================================================================
|
||||
// Test State
|
||||
// ============================================================================
|
||||
|
||||
let storage: RegistryStorage;
|
||||
let storageConfig: IStorageConfig;
|
||||
let testRunId: string;
|
||||
|
||||
// ============================================================================
|
||||
// Setup
|
||||
// ============================================================================
|
||||
|
||||
tap.test('setup: should create storage config', async () => {
|
||||
testRunId = generateTestRunId();
|
||||
|
||||
const s3AccessKey = await testQenv.getEnvVarOnDemand('S3_ACCESSKEY');
|
||||
const s3SecretKey = await testQenv.getEnvVarOnDemand('S3_SECRETKEY');
|
||||
const s3Endpoint = await testQenv.getEnvVarOnDemand('S3_ENDPOINT');
|
||||
const s3Port = await testQenv.getEnvVarOnDemand('S3_PORT');
|
||||
|
||||
storageConfig = {
|
||||
accessKey: s3AccessKey || 'minioadmin',
|
||||
accessSecret: s3SecretKey || 'minioadmin',
|
||||
endpoint: s3Endpoint || 'localhost',
|
||||
port: parseInt(s3Port || '9000', 10),
|
||||
useSsl: false,
|
||||
region: 'us-east-1',
|
||||
bucketName: `test-hooks-${testRunId}`,
|
||||
};
|
||||
|
||||
expect(storageConfig.bucketName).toBeTruthy();
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// beforePut Hook Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('beforePut: should be called before storage', async () => {
|
||||
const tracker = createTrackingHooks();
|
||||
|
||||
storage = new RegistryStorage(storageConfig, tracker.hooks);
|
||||
await storage.init();
|
||||
|
||||
// Set context and put object
|
||||
storage.setContext({
|
||||
protocol: 'npm',
|
||||
actor: { userId: 'testuser' },
|
||||
metadata: { packageName: 'test-package' },
|
||||
});
|
||||
|
||||
await storage.putObject('test/beforeput-called.txt', Buffer.from('test data'));
|
||||
storage.clearContext();
|
||||
|
||||
// Verify beforePut was called
|
||||
const beforePutCalls = tracker.calls.filter(c => c.method === 'beforePut');
|
||||
expect(beforePutCalls.length).toEqual(1);
|
||||
expect(beforePutCalls[0].context.operation).toEqual('put');
|
||||
expect(beforePutCalls[0].context.key).toEqual('test/beforeput-called.txt');
|
||||
expect(beforePutCalls[0].context.protocol).toEqual('npm');
|
||||
});
|
||||
|
||||
tap.test('beforePut: returning {allowed: false} should block storage', async () => {
|
||||
const tracker = createTrackingHooks({ beforePutAllowed: false });
|
||||
|
||||
const blockingStorage = new RegistryStorage(storageConfig, tracker.hooks);
|
||||
await blockingStorage.init();
|
||||
|
||||
blockingStorage.setContext({
|
||||
protocol: 'npm',
|
||||
actor: { userId: 'testuser' },
|
||||
});
|
||||
|
||||
let errorThrown = false;
|
||||
try {
|
||||
await blockingStorage.putObject('test/should-not-exist.txt', Buffer.from('blocked data'));
|
||||
} catch (error) {
|
||||
errorThrown = true;
|
||||
expect((error as Error).message).toContain('Blocked by test');
|
||||
}
|
||||
|
||||
blockingStorage.clearContext();
|
||||
|
||||
expect(errorThrown).toBeTrue();
|
||||
|
||||
// Verify object was NOT stored
|
||||
const result = await blockingStorage.getObject('test/should-not-exist.txt');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// afterPut Hook Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('afterPut: should be called after successful storage', async () => {
|
||||
const tracker = createTrackingHooks();
|
||||
|
||||
const trackedStorage = new RegistryStorage(storageConfig, tracker.hooks);
|
||||
await trackedStorage.init();
|
||||
|
||||
trackedStorage.setContext({
|
||||
protocol: 'maven',
|
||||
actor: { userId: 'maven-user' },
|
||||
});
|
||||
|
||||
await trackedStorage.putObject('test/afterput-test.txt', Buffer.from('after put test'));
|
||||
trackedStorage.clearContext();
|
||||
|
||||
// Give async hook time to complete
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
const afterPutCalls = tracker.calls.filter(c => c.method === 'afterPut');
|
||||
expect(afterPutCalls.length).toEqual(1);
|
||||
expect(afterPutCalls[0].context.operation).toEqual('put');
|
||||
});
|
||||
|
||||
tap.test('afterPut: should receive correct metadata (size, key, protocol)', async () => {
|
||||
const tracker = createTrackingHooks();
|
||||
|
||||
const metadataStorage = new RegistryStorage(storageConfig, tracker.hooks);
|
||||
await metadataStorage.init();
|
||||
|
||||
const testData = Buffer.from('metadata test data - some content here');
|
||||
|
||||
metadataStorage.setContext({
|
||||
protocol: 'cargo',
|
||||
actor: { userId: 'cargo-user', ip: '192.168.1.100' },
|
||||
metadata: { packageName: 'my-crate', version: '1.0.0' },
|
||||
});
|
||||
|
||||
await metadataStorage.putObject('test/metadata-test.txt', testData);
|
||||
metadataStorage.clearContext();
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
const afterPutCalls = tracker.calls.filter(c => c.method === 'afterPut');
|
||||
expect(afterPutCalls.length).toBeGreaterThanOrEqual(1);
|
||||
|
||||
const call = afterPutCalls[afterPutCalls.length - 1];
|
||||
expect(call.context.metadata?.size).toEqual(testData.length);
|
||||
expect(call.context.key).toEqual('test/metadata-test.txt');
|
||||
expect(call.context.protocol).toEqual('cargo');
|
||||
expect(call.context.actor?.userId).toEqual('cargo-user');
|
||||
expect(call.context.actor?.ip).toEqual('192.168.1.100');
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// beforeDelete Hook Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('beforeDelete: should be called before deletion', async () => {
|
||||
const tracker = createTrackingHooks();
|
||||
|
||||
const deleteStorage = new RegistryStorage(storageConfig, tracker.hooks);
|
||||
await deleteStorage.init();
|
||||
|
||||
// First, store an object
|
||||
deleteStorage.setContext({ protocol: 'npm' });
|
||||
await deleteStorage.putObject('test/to-delete.txt', Buffer.from('delete me'));
|
||||
|
||||
// Now delete it
|
||||
await deleteStorage.deleteObject('test/to-delete.txt');
|
||||
deleteStorage.clearContext();
|
||||
|
||||
const beforeDeleteCalls = tracker.calls.filter(c => c.method === 'beforeDelete');
|
||||
expect(beforeDeleteCalls.length).toEqual(1);
|
||||
expect(beforeDeleteCalls[0].context.operation).toEqual('delete');
|
||||
expect(beforeDeleteCalls[0].context.key).toEqual('test/to-delete.txt');
|
||||
});
|
||||
|
||||
tap.test('beforeDelete: returning {allowed: false} should block deletion', async () => {
|
||||
const tracker = createTrackingHooks({ beforeDeleteAllowed: false });
|
||||
|
||||
const protectedStorage = new RegistryStorage(storageConfig, tracker.hooks);
|
||||
await protectedStorage.init();
|
||||
|
||||
// First store an object
|
||||
protectedStorage.setContext({ protocol: 'npm' });
|
||||
await protectedStorage.putObject('test/protected.txt', Buffer.from('protected data'));
|
||||
|
||||
// Try to delete - should be blocked
|
||||
let errorThrown = false;
|
||||
try {
|
||||
await protectedStorage.deleteObject('test/protected.txt');
|
||||
} catch (error) {
|
||||
errorThrown = true;
|
||||
expect((error as Error).message).toContain('Blocked by test');
|
||||
}
|
||||
|
||||
protectedStorage.clearContext();
|
||||
|
||||
expect(errorThrown).toBeTrue();
|
||||
|
||||
// Verify object still exists
|
||||
const result = await protectedStorage.getObject('test/protected.txt');
|
||||
expect(result).toBeTruthy();
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// afterDelete Hook Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('afterDelete: should be called after successful deletion', async () => {
|
||||
const tracker = createTrackingHooks();
|
||||
|
||||
const afterDeleteStorage = new RegistryStorage(storageConfig, tracker.hooks);
|
||||
await afterDeleteStorage.init();
|
||||
|
||||
afterDeleteStorage.setContext({ protocol: 'pypi' });
|
||||
await afterDeleteStorage.putObject('test/delete-tracked.txt', Buffer.from('to be deleted'));
|
||||
await afterDeleteStorage.deleteObject('test/delete-tracked.txt');
|
||||
afterDeleteStorage.clearContext();
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
const afterDeleteCalls = tracker.calls.filter(c => c.method === 'afterDelete');
|
||||
expect(afterDeleteCalls.length).toEqual(1);
|
||||
expect(afterDeleteCalls[0].context.operation).toEqual('delete');
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// afterGet Hook Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('afterGet: should be called after reading object', async () => {
|
||||
const tracker = createTrackingHooks();
|
||||
|
||||
const getStorage = new RegistryStorage(storageConfig, tracker.hooks);
|
||||
await getStorage.init();
|
||||
|
||||
// Store an object first
|
||||
getStorage.setContext({ protocol: 'rubygems' });
|
||||
await getStorage.putObject('test/read-test.txt', Buffer.from('read me'));
|
||||
|
||||
// Clear calls to focus on the get
|
||||
tracker.calls.length = 0;
|
||||
|
||||
// Read the object
|
||||
const data = await getStorage.getObject('test/read-test.txt');
|
||||
getStorage.clearContext();
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
expect(data).toBeTruthy();
|
||||
expect(data!.toString()).toEqual('read me');
|
||||
|
||||
const afterGetCalls = tracker.calls.filter(c => c.method === 'afterGet');
|
||||
expect(afterGetCalls.length).toEqual(1);
|
||||
expect(afterGetCalls[0].context.operation).toEqual('get');
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Context Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('context: hooks should receive actor information', async () => {
|
||||
const tracker = createTrackingHooks();
|
||||
|
||||
const actorStorage = new RegistryStorage(storageConfig, tracker.hooks);
|
||||
await actorStorage.init();
|
||||
|
||||
actorStorage.setContext({
|
||||
protocol: 'composer',
|
||||
actor: {
|
||||
userId: 'user-123',
|
||||
tokenId: 'token-abc',
|
||||
ip: '10.0.0.1',
|
||||
userAgent: 'composer/2.0',
|
||||
orgId: 'org-456',
|
||||
sessionId: 'session-xyz',
|
||||
},
|
||||
});
|
||||
|
||||
await actorStorage.putObject('test/actor-test.txt', Buffer.from('actor test'));
|
||||
actorStorage.clearContext();
|
||||
|
||||
const beforePutCall = tracker.calls.find(c => c.method === 'beforePut');
|
||||
expect(beforePutCall).toBeTruthy();
|
||||
expect(beforePutCall!.context.actor?.userId).toEqual('user-123');
|
||||
expect(beforePutCall!.context.actor?.tokenId).toEqual('token-abc');
|
||||
expect(beforePutCall!.context.actor?.ip).toEqual('10.0.0.1');
|
||||
expect(beforePutCall!.context.actor?.userAgent).toEqual('composer/2.0');
|
||||
expect(beforePutCall!.context.actor?.orgId).toEqual('org-456');
|
||||
expect(beforePutCall!.context.actor?.sessionId).toEqual('session-xyz');
|
||||
});
|
||||
|
||||
tap.test('withContext: should set and clear context correctly', async () => {
|
||||
const tracker = createTrackingHooks();
|
||||
|
||||
const contextStorage = new RegistryStorage(storageConfig, tracker.hooks);
|
||||
await contextStorage.init();
|
||||
|
||||
// Use withContext to ensure automatic cleanup
|
||||
await contextStorage.withContext(
|
||||
{
|
||||
protocol: 'oci',
|
||||
actor: { userId: 'oci-user' },
|
||||
},
|
||||
async () => {
|
||||
await contextStorage.putObject('test/with-context.txt', Buffer.from('context managed'));
|
||||
}
|
||||
);
|
||||
|
||||
const call = tracker.calls.find(c => c.method === 'beforePut');
|
||||
expect(call).toBeTruthy();
|
||||
expect(call!.context.protocol).toEqual('oci');
|
||||
expect(call!.context.actor?.userId).toEqual('oci-user');
|
||||
});
|
||||
|
||||
tap.test('withContext: should clear context even on error', async () => {
|
||||
const tracker = createTrackingHooks({ beforePutAllowed: false });
|
||||
|
||||
const errorStorage = new RegistryStorage(storageConfig, tracker.hooks);
|
||||
await errorStorage.init();
|
||||
|
||||
let errorThrown = false;
|
||||
try {
|
||||
await errorStorage.withContext(
|
||||
{
|
||||
protocol: 'npm',
|
||||
actor: { userId: 'error-user' },
|
||||
},
|
||||
async () => {
|
||||
await errorStorage.putObject('test/error-context.txt', Buffer.from('will fail'));
|
||||
}
|
||||
);
|
||||
} catch {
|
||||
errorThrown = true;
|
||||
}
|
||||
|
||||
expect(errorThrown).toBeTrue();
|
||||
|
||||
// Verify context was cleared - next operation without context should work
|
||||
// (hooks won't be called without context)
|
||||
tracker.hooks.beforePut = async () => ({ allowed: true });
|
||||
await errorStorage.putObject('test/after-error.txt', Buffer.from('ok'));
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Graceful Degradation Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('graceful: hooks should not fail the operation if afterPut throws', async () => {
|
||||
const tracker = createTrackingHooks({ throwOnAfterPut: true });
|
||||
|
||||
const gracefulStorage = new RegistryStorage(storageConfig, tracker.hooks);
|
||||
await gracefulStorage.init();
|
||||
|
||||
gracefulStorage.setContext({ protocol: 'npm' });
|
||||
|
||||
// This should NOT throw even though afterPut throws
|
||||
let errorThrown = false;
|
||||
try {
|
||||
await gracefulStorage.putObject('test/graceful-afterput.txt', Buffer.from('should succeed'));
|
||||
} catch {
|
||||
errorThrown = true;
|
||||
}
|
||||
|
||||
gracefulStorage.clearContext();
|
||||
|
||||
expect(errorThrown).toBeFalse();
|
||||
|
||||
// Verify object was stored
|
||||
const data = await gracefulStorage.getObject('test/graceful-afterput.txt');
|
||||
expect(data).toBeTruthy();
|
||||
});
|
||||
|
||||
tap.test('graceful: hooks should not fail the operation if afterGet throws', async () => {
|
||||
const tracker = createTrackingHooks({ throwOnAfterGet: true });
|
||||
|
||||
const gracefulGetStorage = new RegistryStorage(storageConfig, tracker.hooks);
|
||||
await gracefulGetStorage.init();
|
||||
|
||||
// Store first
|
||||
gracefulGetStorage.setContext({ protocol: 'maven' });
|
||||
await gracefulGetStorage.putObject('test/graceful-afterget.txt', Buffer.from('read me gracefully'));
|
||||
|
||||
// Read should succeed even though afterGet throws
|
||||
let errorThrown = false;
|
||||
try {
|
||||
const data = await gracefulGetStorage.getObject('test/graceful-afterget.txt');
|
||||
expect(data).toBeTruthy();
|
||||
} catch {
|
||||
errorThrown = true;
|
||||
}
|
||||
|
||||
gracefulGetStorage.clearContext();
|
||||
|
||||
expect(errorThrown).toBeFalse();
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Quota Hooks Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('quota: should block storage when quota exceeded', async () => {
|
||||
const maxSize = 100; // 100 bytes max
|
||||
const quotaTracker = createQuotaHooks(maxSize);
|
||||
|
||||
const quotaStorage = new RegistryStorage(storageConfig, quotaTracker.hooks);
|
||||
await quotaStorage.init();
|
||||
|
||||
quotaStorage.setContext({
|
||||
protocol: 'npm',
|
||||
actor: { userId: 'quota-user' },
|
||||
});
|
||||
|
||||
// Store 50 bytes - should succeed
|
||||
await quotaStorage.putObject('test/quota-1.txt', Buffer.from('x'.repeat(50)));
|
||||
expect(quotaTracker.currentUsage.bytes).toEqual(50);
|
||||
|
||||
// Try to store 60 more bytes - should fail (total would be 110)
|
||||
let errorThrown = false;
|
||||
try {
|
||||
await quotaStorage.putObject('test/quota-2.txt', Buffer.from('x'.repeat(60)));
|
||||
} catch (error) {
|
||||
errorThrown = true;
|
||||
expect((error as Error).message).toContain('Quota exceeded');
|
||||
}
|
||||
|
||||
quotaStorage.clearContext();
|
||||
|
||||
expect(errorThrown).toBeTrue();
|
||||
expect(quotaTracker.currentUsage.bytes).toEqual(50); // Still 50, not 110
|
||||
});
|
||||
|
||||
tap.test('quota: should update usage after delete', async () => {
|
||||
const maxSize = 200;
|
||||
const quotaTracker = createQuotaHooks(maxSize);
|
||||
|
||||
const quotaDelStorage = new RegistryStorage(storageConfig, quotaTracker.hooks);
|
||||
await quotaDelStorage.init();
|
||||
|
||||
quotaDelStorage.setContext({
|
||||
protocol: 'npm',
|
||||
metadata: { size: 75 },
|
||||
});
|
||||
|
||||
// Store and track
|
||||
await quotaDelStorage.putObject('test/quota-del.txt', Buffer.from('x'.repeat(75)));
|
||||
expect(quotaTracker.currentUsage.bytes).toEqual(75);
|
||||
|
||||
// Delete and verify usage decreases
|
||||
await quotaDelStorage.deleteObject('test/quota-del.txt');
|
||||
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
quotaDelStorage.clearContext();
|
||||
|
||||
// Usage should be reduced (though exact value depends on metadata)
|
||||
expect(quotaTracker.currentUsage.bytes).toBeLessThanOrEqual(75);
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// setHooks Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('setHooks: should allow setting hooks after construction', async () => {
|
||||
const lateStorage = new RegistryStorage(storageConfig);
|
||||
await lateStorage.init();
|
||||
|
||||
// Initially no hooks
|
||||
await lateStorage.putObject('test/no-hooks.txt', Buffer.from('no hooks yet'));
|
||||
|
||||
// Add hooks later
|
||||
const tracker = createTrackingHooks();
|
||||
lateStorage.setHooks(tracker.hooks);
|
||||
|
||||
lateStorage.setContext({ protocol: 'npm' });
|
||||
await lateStorage.putObject('test/with-late-hooks.txt', Buffer.from('now with hooks'));
|
||||
lateStorage.clearContext();
|
||||
|
||||
const beforePutCalls = tracker.calls.filter(c => c.method === 'beforePut');
|
||||
expect(beforePutCalls.length).toEqual(1);
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Cleanup
|
||||
// ============================================================================
|
||||
|
||||
tap.test('cleanup: should clean up test bucket', async () => {
|
||||
if (storage) {
|
||||
// Clean up test objects
|
||||
const prefixes = ['test/'];
|
||||
for (const prefix of prefixes) {
|
||||
try {
|
||||
const objects = await storage.listObjects(prefix);
|
||||
for (const obj of objects) {
|
||||
await storage.deleteObject(obj);
|
||||
}
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
export default tap.start();
|
||||
598
test/test.upstream.cache.s3.ts
Normal file
598
test/test.upstream.cache.s3.ts
Normal file
@@ -0,0 +1,598 @@
|
||||
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
import * as smartbucket from '@push.rocks/smartbucket';
|
||||
import { UpstreamCache } from '../ts/upstream/classes.upstreamcache.js';
|
||||
import type { IUpstreamFetchContext, IUpstreamCacheConfig } from '../ts/upstream/interfaces.upstream.js';
|
||||
import type { IStorageBackend } from '../ts/core/interfaces.core.js';
|
||||
import { generateTestRunId } from './helpers/registry.js';
|
||||
|
||||
const testQenv = new qenv.Qenv('./', './.nogit');
|
||||
|
||||
// ============================================================================
|
||||
// Test State
|
||||
// ============================================================================
|
||||
|
||||
let cache: UpstreamCache;
|
||||
let storageBackend: IStorageBackend;
|
||||
let s3Bucket: smartbucket.Bucket;
|
||||
let smartBucket: smartbucket.SmartBucket;
|
||||
let testRunId: string;
|
||||
let bucketName: string;
|
||||
|
||||
// ============================================================================
|
||||
// Helper Functions
|
||||
// ============================================================================
|
||||
|
||||
function createFetchContext(overrides?: Partial<IUpstreamFetchContext>): IUpstreamFetchContext {
|
||||
// Use resource name as path to ensure unique cache keys
|
||||
const resource = overrides?.resource || 'lodash';
|
||||
return {
|
||||
protocol: 'npm',
|
||||
resource,
|
||||
resourceType: 'packument',
|
||||
path: `/${resource}`,
|
||||
method: 'GET',
|
||||
headers: {},
|
||||
query: {},
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Setup
|
||||
// ============================================================================
|
||||
|
||||
tap.test('setup: should create S3 storage backend', async () => {
|
||||
testRunId = generateTestRunId();
|
||||
bucketName = `test-ucache-${testRunId.substring(0, 8)}`;
|
||||
|
||||
const s3AccessKey = await testQenv.getEnvVarOnDemand('S3_ACCESSKEY');
|
||||
const s3SecretKey = await testQenv.getEnvVarOnDemand('S3_SECRETKEY');
|
||||
const s3Endpoint = await testQenv.getEnvVarOnDemand('S3_ENDPOINT');
|
||||
const s3Port = await testQenv.getEnvVarOnDemand('S3_PORT');
|
||||
|
||||
smartBucket = new smartbucket.SmartBucket({
|
||||
accessKey: s3AccessKey || 'minioadmin',
|
||||
accessSecret: s3SecretKey || 'minioadmin',
|
||||
endpoint: s3Endpoint || 'localhost',
|
||||
port: parseInt(s3Port || '9000', 10),
|
||||
useSsl: false,
|
||||
});
|
||||
|
||||
s3Bucket = await smartBucket.createBucket(bucketName);
|
||||
|
||||
// Create storage backend adapter
|
||||
storageBackend = {
|
||||
getObject: async (key: string): Promise<Buffer | null> => {
|
||||
try {
|
||||
// fastGet returns Buffer directly (or undefined if not found)
|
||||
const data = await s3Bucket.fastGet({ path: key });
|
||||
if (!data) {
|
||||
return null;
|
||||
}
|
||||
return data;
|
||||
} catch (error) {
|
||||
// fastGet throws if object doesn't exist
|
||||
return null;
|
||||
}
|
||||
},
|
||||
putObject: async (key: string, data: Buffer): Promise<void> => {
|
||||
await s3Bucket.fastPut({ path: key, contents: data, overwrite: true });
|
||||
},
|
||||
deleteObject: async (key: string): Promise<void> => {
|
||||
await s3Bucket.fastRemove({ path: key });
|
||||
},
|
||||
listObjects: async (prefix: string): Promise<string[]> => {
|
||||
const paths: string[] = [];
|
||||
for await (const path of s3Bucket.listAllObjects(prefix)) {
|
||||
paths.push(path);
|
||||
}
|
||||
return paths;
|
||||
},
|
||||
};
|
||||
|
||||
expect(storageBackend).toBeTruthy();
|
||||
});
|
||||
|
||||
tap.test('setup: should create UpstreamCache with S3 storage', async () => {
|
||||
cache = new UpstreamCache(
|
||||
{ enabled: true, defaultTtlSeconds: 300 },
|
||||
10000,
|
||||
storageBackend
|
||||
);
|
||||
|
||||
expect(cache.isEnabled()).toBeTrue();
|
||||
expect(cache.hasStorage()).toBeTrue();
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Basic Cache Operations
|
||||
// ============================================================================
|
||||
|
||||
tap.test('cache: should store cache entry in S3', async () => {
|
||||
const context = createFetchContext({ resource: 'store-test' });
|
||||
const testData = Buffer.from(JSON.stringify({ name: 'store-test', version: '1.0.0' }));
|
||||
const upstreamUrl = 'https://registry.npmjs.org';
|
||||
|
||||
await cache.set(
|
||||
context,
|
||||
testData,
|
||||
'application/json',
|
||||
{ 'etag': '"abc123"' },
|
||||
'npmjs',
|
||||
upstreamUrl
|
||||
);
|
||||
|
||||
// Verify in S3
|
||||
const stats = cache.getStats();
|
||||
expect(stats.totalEntries).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
|
||||
tap.test('cache: should retrieve cache entry from S3', async () => {
|
||||
const context = createFetchContext({ resource: 'retrieve-test' });
|
||||
const testData = Buffer.from('retrieve test data');
|
||||
const upstreamUrl = 'https://registry.npmjs.org';
|
||||
|
||||
await cache.set(
|
||||
context,
|
||||
testData,
|
||||
'application/octet-stream',
|
||||
{},
|
||||
'npmjs',
|
||||
upstreamUrl
|
||||
);
|
||||
|
||||
const entry = await cache.get(context, upstreamUrl);
|
||||
|
||||
expect(entry).toBeTruthy();
|
||||
expect(entry!.data.toString()).toEqual('retrieve test data');
|
||||
expect(entry!.contentType).toEqual('application/octet-stream');
|
||||
expect(entry!.upstreamId).toEqual('npmjs');
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Multi-Upstream URL Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('cache: should include upstream URL in cache path', async () => {
|
||||
const context = createFetchContext({ resource: 'url-path-test' });
|
||||
const testData = Buffer.from('url path test');
|
||||
|
||||
await cache.set(
|
||||
context,
|
||||
testData,
|
||||
'text/plain',
|
||||
{},
|
||||
'npmjs',
|
||||
'https://registry.npmjs.org'
|
||||
);
|
||||
|
||||
// The cache key should include the escaped URL
|
||||
const entry = await cache.get(context, 'https://registry.npmjs.org');
|
||||
expect(entry).toBeTruthy();
|
||||
expect(entry!.data.toString()).toEqual('url path test');
|
||||
});
|
||||
|
||||
tap.test('cache: should handle multiple upstreams with different URLs', async () => {
|
||||
const context = createFetchContext({ resource: '@company/private-pkg' });
|
||||
|
||||
// Store from private upstream
|
||||
const privateData = Buffer.from('private package data');
|
||||
await cache.set(
|
||||
context,
|
||||
privateData,
|
||||
'application/json',
|
||||
{},
|
||||
'private-npm',
|
||||
'https://npm.company.com'
|
||||
);
|
||||
|
||||
// Store from public upstream (same resource name, different upstream)
|
||||
const publicData = Buffer.from('public package data');
|
||||
await cache.set(
|
||||
context,
|
||||
publicData,
|
||||
'application/json',
|
||||
{},
|
||||
'public-npm',
|
||||
'https://registry.npmjs.org'
|
||||
);
|
||||
|
||||
// Retrieve from private - should get private data
|
||||
const privateEntry = await cache.get(context, 'https://npm.company.com');
|
||||
expect(privateEntry).toBeTruthy();
|
||||
expect(privateEntry!.data.toString()).toEqual('private package data');
|
||||
expect(privateEntry!.upstreamId).toEqual('private-npm');
|
||||
|
||||
// Retrieve from public - should get public data
|
||||
const publicEntry = await cache.get(context, 'https://registry.npmjs.org');
|
||||
expect(publicEntry).toBeTruthy();
|
||||
expect(publicEntry!.data.toString()).toEqual('public package data');
|
||||
expect(publicEntry!.upstreamId).toEqual('public-npm');
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// TTL and Expiration Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('cache: should respect TTL expiration', async () => {
|
||||
// Create cache with very short TTL
|
||||
const shortTtlCache = new UpstreamCache(
|
||||
{
|
||||
enabled: true,
|
||||
defaultTtlSeconds: 1, // 1 second TTL
|
||||
staleWhileRevalidate: false,
|
||||
staleMaxAgeSeconds: 0,
|
||||
immutableTtlSeconds: 1,
|
||||
negativeCacheTtlSeconds: 1,
|
||||
},
|
||||
1000,
|
||||
storageBackend
|
||||
);
|
||||
|
||||
const context = createFetchContext({ resource: 'ttl-test' });
|
||||
const testData = Buffer.from('expires soon');
|
||||
|
||||
await shortTtlCache.set(
|
||||
context,
|
||||
testData,
|
||||
'text/plain',
|
||||
{},
|
||||
'test-upstream',
|
||||
'https://test.example.com'
|
||||
);
|
||||
|
||||
// Should exist immediately
|
||||
let entry = await shortTtlCache.get(context, 'https://test.example.com');
|
||||
expect(entry).toBeTruthy();
|
||||
|
||||
// Wait for expiration
|
||||
await new Promise(resolve => setTimeout(resolve, 1500));
|
||||
|
||||
// Should be expired now
|
||||
entry = await shortTtlCache.get(context, 'https://test.example.com');
|
||||
expect(entry).toBeNull();
|
||||
|
||||
shortTtlCache.stop();
|
||||
});
|
||||
|
||||
tap.test('cache: should serve stale content during stale-while-revalidate window', async () => {
|
||||
const staleCache = new UpstreamCache(
|
||||
{
|
||||
enabled: true,
|
||||
defaultTtlSeconds: 1, // 1 second fresh
|
||||
staleWhileRevalidate: true,
|
||||
staleMaxAgeSeconds: 60, // 60 seconds stale window
|
||||
immutableTtlSeconds: 1,
|
||||
negativeCacheTtlSeconds: 1,
|
||||
},
|
||||
1000,
|
||||
storageBackend
|
||||
);
|
||||
|
||||
const context = createFetchContext({ resource: 'stale-test' });
|
||||
const testData = Buffer.from('stale but usable');
|
||||
|
||||
await staleCache.set(
|
||||
context,
|
||||
testData,
|
||||
'text/plain',
|
||||
{},
|
||||
'stale-upstream',
|
||||
'https://stale.example.com'
|
||||
);
|
||||
|
||||
// Wait for fresh period to expire
|
||||
await new Promise(resolve => setTimeout(resolve, 1500));
|
||||
|
||||
// Should still be available but marked as stale
|
||||
const entry = await staleCache.get(context, 'https://stale.example.com');
|
||||
expect(entry).toBeTruthy();
|
||||
expect(entry!.stale).toBeTrue();
|
||||
expect(entry!.data.toString()).toEqual('stale but usable');
|
||||
|
||||
staleCache.stop();
|
||||
});
|
||||
|
||||
tap.test('cache: should reject content past stale deadline', async () => {
|
||||
const veryShortCache = new UpstreamCache(
|
||||
{
|
||||
enabled: true,
|
||||
defaultTtlSeconds: 1,
|
||||
staleWhileRevalidate: true,
|
||||
staleMaxAgeSeconds: 1, // Only 1 second stale window
|
||||
immutableTtlSeconds: 1,
|
||||
negativeCacheTtlSeconds: 1,
|
||||
},
|
||||
1000,
|
||||
storageBackend
|
||||
);
|
||||
|
||||
const context = createFetchContext({ resource: 'very-stale-test' });
|
||||
await veryShortCache.set(
|
||||
context,
|
||||
Buffer.from('will expire completely'),
|
||||
'text/plain',
|
||||
{},
|
||||
'short-upstream',
|
||||
'https://short.example.com'
|
||||
);
|
||||
|
||||
// Wait for both fresh AND stale periods to expire
|
||||
await new Promise(resolve => setTimeout(resolve, 2500));
|
||||
|
||||
const entry = await veryShortCache.get(context, 'https://short.example.com');
|
||||
expect(entry).toBeNull();
|
||||
|
||||
veryShortCache.stop();
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Negative Cache Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('cache: should store negative cache entries (404)', async () => {
|
||||
const context = createFetchContext({ resource: 'not-found-pkg' });
|
||||
const upstreamUrl = 'https://registry.npmjs.org';
|
||||
|
||||
await cache.setNegative(context, 'npmjs', upstreamUrl);
|
||||
|
||||
const hasNegative = await cache.hasNegative(context, upstreamUrl);
|
||||
expect(hasNegative).toBeTrue();
|
||||
});
|
||||
|
||||
tap.test('cache: should retrieve negative cache entries', async () => {
|
||||
const context = createFetchContext({ resource: 'negative-retrieve-test' });
|
||||
const upstreamUrl = 'https://registry.npmjs.org';
|
||||
|
||||
await cache.setNegative(context, 'npmjs', upstreamUrl);
|
||||
|
||||
const entry = await cache.get(context, upstreamUrl);
|
||||
expect(entry).toBeTruthy();
|
||||
expect(entry!.data.length).toEqual(0); // Empty buffer indicates 404
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Eviction Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('cache: should evict oldest entries when memory limit reached', async () => {
|
||||
// Create cache with very small limit
|
||||
const smallCache = new UpstreamCache(
|
||||
{ enabled: true, defaultTtlSeconds: 300 },
|
||||
5, // Only 5 entries
|
||||
storageBackend
|
||||
);
|
||||
|
||||
// Add 10 entries
|
||||
for (let i = 0; i < 10; i++) {
|
||||
const context = createFetchContext({ resource: `evict-test-${i}` });
|
||||
await smallCache.set(
|
||||
context,
|
||||
Buffer.from(`data ${i}`),
|
||||
'text/plain',
|
||||
{},
|
||||
'evict-upstream',
|
||||
'https://evict.example.com'
|
||||
);
|
||||
}
|
||||
|
||||
const stats = smallCache.getStats();
|
||||
// Should have evicted some entries
|
||||
expect(stats.totalEntries).toBeLessThanOrEqual(5);
|
||||
|
||||
smallCache.stop();
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Query Parameter Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('cache: cache key should include query parameters', async () => {
|
||||
const context1 = createFetchContext({
|
||||
resource: 'query-test',
|
||||
query: { version: '1.0.0' },
|
||||
});
|
||||
|
||||
const context2 = createFetchContext({
|
||||
resource: 'query-test',
|
||||
query: { version: '2.0.0' },
|
||||
});
|
||||
|
||||
const upstreamUrl = 'https://registry.npmjs.org';
|
||||
|
||||
// Store with v1 query
|
||||
await cache.set(
|
||||
context1,
|
||||
Buffer.from('version 1 data'),
|
||||
'text/plain',
|
||||
{},
|
||||
'query-upstream',
|
||||
upstreamUrl
|
||||
);
|
||||
|
||||
// Store with v2 query
|
||||
await cache.set(
|
||||
context2,
|
||||
Buffer.from('version 2 data'),
|
||||
'text/plain',
|
||||
{},
|
||||
'query-upstream',
|
||||
upstreamUrl
|
||||
);
|
||||
|
||||
// Retrieve v1 - should get v1 data
|
||||
const entry1 = await cache.get(context1, upstreamUrl);
|
||||
expect(entry1).toBeTruthy();
|
||||
expect(entry1!.data.toString()).toEqual('version 1 data');
|
||||
|
||||
// Retrieve v2 - should get v2 data
|
||||
const entry2 = await cache.get(context2, upstreamUrl);
|
||||
expect(entry2).toBeTruthy();
|
||||
expect(entry2!.data.toString()).toEqual('version 2 data');
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// S3 Persistence Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('cache: should load from S3 on memory cache miss', async () => {
|
||||
// Use a unique resource name for this test
|
||||
const uniqueResource = `persist-test-${Date.now()}`;
|
||||
const persistContext = createFetchContext({ resource: uniqueResource });
|
||||
const upstreamUrl = 'https://persist.example.com';
|
||||
|
||||
// Store in first cache instance
|
||||
await cache.set(
|
||||
persistContext,
|
||||
Buffer.from('persisted data'),
|
||||
'text/plain',
|
||||
{},
|
||||
'persist-upstream',
|
||||
upstreamUrl
|
||||
);
|
||||
|
||||
// Wait for S3 write to complete
|
||||
await new Promise(resolve => setTimeout(resolve, 200));
|
||||
|
||||
// Verify the entry is in the original cache's memory
|
||||
const originalEntry = await cache.get(persistContext, upstreamUrl);
|
||||
expect(originalEntry).toBeTruthy();
|
||||
|
||||
// Create a new cache instance (simulates restart) with SAME storage backend
|
||||
const freshCache = new UpstreamCache(
|
||||
{ enabled: true, defaultTtlSeconds: 300 },
|
||||
10000,
|
||||
storageBackend
|
||||
);
|
||||
|
||||
// Fresh cache has empty memory, should load from S3
|
||||
const entry = await freshCache.get(persistContext, upstreamUrl);
|
||||
|
||||
expect(entry).toBeTruthy();
|
||||
expect(entry!.data.toString()).toEqual('persisted data');
|
||||
|
||||
freshCache.stop();
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Cache Stats Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('cache: should return accurate stats', async () => {
|
||||
const statsCache = new UpstreamCache(
|
||||
{ enabled: true, defaultTtlSeconds: 300 },
|
||||
1000,
|
||||
storageBackend
|
||||
);
|
||||
|
||||
// Add some entries
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const context = createFetchContext({ resource: `stats-test-${i}` });
|
||||
await statsCache.set(
|
||||
context,
|
||||
Buffer.from(`stats data ${i}`),
|
||||
'text/plain',
|
||||
{},
|
||||
'stats-upstream',
|
||||
'https://stats.example.com'
|
||||
);
|
||||
}
|
||||
|
||||
// Add a negative entry
|
||||
const negContext = createFetchContext({ resource: 'stats-negative' });
|
||||
await statsCache.setNegative(negContext, 'stats-upstream', 'https://stats.example.com');
|
||||
|
||||
const stats = statsCache.getStats();
|
||||
|
||||
expect(stats.totalEntries).toBeGreaterThanOrEqual(4);
|
||||
expect(stats.enabled).toBeTrue();
|
||||
expect(stats.hasStorage).toBeTrue();
|
||||
expect(stats.maxEntries).toEqual(1000);
|
||||
|
||||
statsCache.stop();
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Invalidation Tests
|
||||
// ============================================================================
|
||||
|
||||
tap.test('cache: should invalidate specific cache entry', async () => {
|
||||
const invalidateContext = createFetchContext({ resource: 'invalidate-test' });
|
||||
const upstreamUrl = 'https://invalidate.example.com';
|
||||
|
||||
await cache.set(
|
||||
invalidateContext,
|
||||
Buffer.from('to be invalidated'),
|
||||
'text/plain',
|
||||
{},
|
||||
'inv-upstream',
|
||||
upstreamUrl
|
||||
);
|
||||
|
||||
// Verify it exists
|
||||
let entry = await cache.get(invalidateContext, upstreamUrl);
|
||||
expect(entry).toBeTruthy();
|
||||
|
||||
// Invalidate
|
||||
const deleted = await cache.invalidate(invalidateContext, upstreamUrl);
|
||||
expect(deleted).toBeTrue();
|
||||
|
||||
// Should be gone
|
||||
entry = await cache.get(invalidateContext, upstreamUrl);
|
||||
expect(entry).toBeNull();
|
||||
});
|
||||
|
||||
tap.test('cache: should invalidate entries matching pattern', async () => {
|
||||
const upstreamUrl = 'https://pattern.example.com';
|
||||
|
||||
// Add multiple entries
|
||||
for (const name of ['pattern-a', 'pattern-b', 'other-c']) {
|
||||
const context = createFetchContext({ resource: name });
|
||||
await cache.set(
|
||||
context,
|
||||
Buffer.from(`data for ${name}`),
|
||||
'text/plain',
|
||||
{},
|
||||
'pattern-upstream',
|
||||
upstreamUrl
|
||||
);
|
||||
}
|
||||
|
||||
// Invalidate entries matching 'pattern-*'
|
||||
const count = await cache.invalidatePattern(/pattern-/);
|
||||
expect(count).toBeGreaterThanOrEqual(2);
|
||||
|
||||
// pattern-a should be gone
|
||||
const entryA = await cache.get(createFetchContext({ resource: 'pattern-a' }), upstreamUrl);
|
||||
expect(entryA).toBeNull();
|
||||
|
||||
// other-c should still exist
|
||||
const entryC = await cache.get(createFetchContext({ resource: 'other-c' }), upstreamUrl);
|
||||
expect(entryC).toBeTruthy();
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Cleanup
|
||||
// ============================================================================
|
||||
|
||||
tap.test('cleanup: should stop cache and clean up bucket', async () => {
|
||||
if (cache) {
|
||||
cache.stop();
|
||||
}
|
||||
|
||||
// Clean up test bucket
|
||||
if (s3Bucket) {
|
||||
try {
|
||||
const files = await s3Bucket.fastList({});
|
||||
for (const file of files) {
|
||||
await s3Bucket.fastRemove({ path: file.name });
|
||||
}
|
||||
await smartBucket.removeBucket(bucketName);
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
export default tap.start();
|
||||
@@ -3,6 +3,6 @@
|
||||
*/
|
||||
export const commitinfo = {
|
||||
name: '@push.rocks/smartregistry',
|
||||
version: '2.5.0',
|
||||
version: '2.6.0',
|
||||
description: 'A composable TypeScript library implementing OCI, NPM, Maven, Cargo, Composer, PyPI, and RubyGems registries for building unified container and package registries'
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user