feat(registry): add declarative protocol routing and request-scoped storage hook context across registries
This commit is contained in:
@@ -0,0 +1,420 @@
|
||||
import * as crypto from 'crypto';
|
||||
import * as smartarchive from '@push.rocks/smartarchive';
|
||||
|
||||
/**
|
||||
* Helper to calculate SHA-256 digest in OCI format
|
||||
*/
|
||||
export function calculateDigest(data: Buffer): string {
|
||||
const hash = crypto.createHash('sha256').update(data).digest('hex');
|
||||
return `sha256:${hash}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a minimal valid OCI manifest
|
||||
*/
|
||||
export function createTestManifest(configDigest: string, layerDigest: string) {
|
||||
return {
|
||||
schemaVersion: 2,
|
||||
mediaType: 'application/vnd.oci.image.manifest.v1+json',
|
||||
config: {
|
||||
mediaType: 'application/vnd.oci.image.config.v1+json',
|
||||
size: 123,
|
||||
digest: configDigest,
|
||||
},
|
||||
layers: [
|
||||
{
|
||||
mediaType: 'application/vnd.oci.image.layer.v1.tar+gzip',
|
||||
size: 456,
|
||||
digest: layerDigest,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a minimal valid NPM packument
|
||||
*/
|
||||
export function createTestPackument(packageName: string, version: string, tarballData: Buffer) {
|
||||
const shasum = crypto.createHash('sha1').update(tarballData).digest('hex');
|
||||
const integrity = `sha512-${crypto.createHash('sha512').update(tarballData).digest('base64')}`;
|
||||
|
||||
return {
|
||||
name: packageName,
|
||||
versions: {
|
||||
[version]: {
|
||||
name: packageName,
|
||||
version: version,
|
||||
description: 'Test package',
|
||||
main: 'index.js',
|
||||
scripts: {},
|
||||
dist: {
|
||||
shasum: shasum,
|
||||
integrity: integrity,
|
||||
tarball: `http://localhost:5000/npm/${packageName}/-/${packageName}-${version}.tgz`,
|
||||
},
|
||||
},
|
||||
},
|
||||
'dist-tags': {
|
||||
latest: version,
|
||||
},
|
||||
_attachments: {
|
||||
[`${packageName}-${version}.tgz`]: {
|
||||
content_type: 'application/octet-stream',
|
||||
data: tarballData.toString('base64'),
|
||||
length: tarballData.length,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a minimal valid Maven POM file
|
||||
*/
|
||||
export function createTestPom(
|
||||
groupId: string,
|
||||
artifactId: string,
|
||||
version: string,
|
||||
packaging: string = 'jar'
|
||||
): string {
|
||||
return `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
|
||||
http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>${groupId}</groupId>
|
||||
<artifactId>${artifactId}</artifactId>
|
||||
<version>${version}</version>
|
||||
<packaging>${packaging}</packaging>
|
||||
<name>${artifactId}</name>
|
||||
<description>Test Maven artifact</description>
|
||||
</project>`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a test JAR file (minimal ZIP with manifest)
|
||||
*/
|
||||
export function createTestJar(): Buffer {
|
||||
const manifestContent = `Manifest-Version: 1.0
|
||||
Created-By: SmartRegistry Test
|
||||
`;
|
||||
|
||||
return Buffer.from(manifestContent, 'utf-8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to calculate Maven checksums
|
||||
*/
|
||||
export function calculateMavenChecksums(data: Buffer) {
|
||||
return {
|
||||
md5: crypto.createHash('md5').update(data).digest('hex'),
|
||||
sha1: crypto.createHash('sha1').update(data).digest('hex'),
|
||||
sha256: crypto.createHash('sha256').update(data).digest('hex'),
|
||||
sha512: crypto.createHash('sha512').update(data).digest('hex'),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a Composer package ZIP using smartarchive
|
||||
*/
|
||||
export async function createComposerZip(
|
||||
vendorPackage: string,
|
||||
version: string,
|
||||
options?: {
|
||||
description?: string;
|
||||
license?: string[];
|
||||
authors?: Array<{ name: string; email?: string }>;
|
||||
}
|
||||
): Promise<Buffer> {
|
||||
const zipTools = new smartarchive.ZipTools();
|
||||
|
||||
const composerJson = {
|
||||
name: vendorPackage,
|
||||
version: version,
|
||||
type: 'library',
|
||||
description: options?.description || 'Test Composer package',
|
||||
license: options?.license || ['MIT'],
|
||||
authors: options?.authors || [{ name: 'Test Author', email: 'test@example.com' }],
|
||||
require: {
|
||||
php: '>=7.4',
|
||||
},
|
||||
autoload: {
|
||||
'psr-4': {
|
||||
'Vendor\\TestPackage\\': 'src/',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
const [vendor, pkg] = vendorPackage.split('/');
|
||||
const namespace = `${vendor.charAt(0).toUpperCase() + vendor.slice(1)}\\${pkg.charAt(0).toUpperCase() + pkg.slice(1).replace(/-/g, '')}`;
|
||||
const testPhpContent = `<?php
|
||||
namespace ${namespace};
|
||||
|
||||
class TestClass
|
||||
{
|
||||
public function greet(): string
|
||||
{
|
||||
return "Hello from ${vendorPackage}!";
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const entries: smartarchive.IArchiveEntry[] = [
|
||||
{
|
||||
archivePath: 'composer.json',
|
||||
content: Buffer.from(JSON.stringify(composerJson, null, 2), 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: 'src/TestClass.php',
|
||||
content: Buffer.from(testPhpContent, 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: 'README.md',
|
||||
content: Buffer.from(`# ${vendorPackage}\n\nTest package`, 'utf-8'),
|
||||
},
|
||||
];
|
||||
|
||||
return Buffer.from(await zipTools.createZip(entries));
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a test Python wheel file (minimal ZIP structure) using smartarchive
|
||||
*/
|
||||
export async function createPythonWheel(
|
||||
packageName: string,
|
||||
version: string,
|
||||
pyVersion: string = 'py3'
|
||||
): Promise<Buffer> {
|
||||
const zipTools = new smartarchive.ZipTools();
|
||||
|
||||
const normalizedName = packageName.replace(/-/g, '_');
|
||||
const distInfoDir = `${normalizedName}-${version}.dist-info`;
|
||||
|
||||
const metadata = `Metadata-Version: 2.1
|
||||
Name: ${packageName}
|
||||
Version: ${version}
|
||||
Summary: Test Python package
|
||||
Home-page: https://example.com
|
||||
Author: Test Author
|
||||
Author-email: test@example.com
|
||||
License: MIT
|
||||
Platform: UNKNOWN
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Requires-Python: >=3.7
|
||||
Description-Content-Type: text/markdown
|
||||
|
||||
# ${packageName}
|
||||
|
||||
Test package for SmartRegistry
|
||||
`;
|
||||
|
||||
const wheelContent = `Wheel-Version: 1.0
|
||||
Generator: test 1.0.0
|
||||
Root-Is-Purelib: true
|
||||
Tag: ${pyVersion}-none-any
|
||||
`;
|
||||
|
||||
const moduleContent = `"""${packageName} module"""
|
||||
|
||||
__version__ = "${version}"
|
||||
|
||||
def hello():
|
||||
return "Hello from ${packageName}!"
|
||||
`;
|
||||
|
||||
const entries: smartarchive.IArchiveEntry[] = [
|
||||
{
|
||||
archivePath: `${distInfoDir}/METADATA`,
|
||||
content: Buffer.from(metadata, 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: `${distInfoDir}/WHEEL`,
|
||||
content: Buffer.from(wheelContent, 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: `${distInfoDir}/RECORD`,
|
||||
content: Buffer.from('', 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: `${distInfoDir}/top_level.txt`,
|
||||
content: Buffer.from(normalizedName, 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: `${normalizedName}/__init__.py`,
|
||||
content: Buffer.from(moduleContent, 'utf-8'),
|
||||
},
|
||||
];
|
||||
|
||||
return Buffer.from(await zipTools.createZip(entries));
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a test Python source distribution (sdist) using smartarchive
|
||||
*/
|
||||
export async function createPythonSdist(
|
||||
packageName: string,
|
||||
version: string
|
||||
): Promise<Buffer> {
|
||||
const tarTools = new smartarchive.TarTools();
|
||||
|
||||
const normalizedName = packageName.replace(/-/g, '_');
|
||||
const dirPrefix = `${packageName}-${version}`;
|
||||
|
||||
const pkgInfo = `Metadata-Version: 2.1
|
||||
Name: ${packageName}
|
||||
Version: ${version}
|
||||
Summary: Test Python package
|
||||
Home-page: https://example.com
|
||||
Author: Test Author
|
||||
Author-email: test@example.com
|
||||
License: MIT
|
||||
`;
|
||||
|
||||
const setupPy = `from setuptools import setup, find_packages
|
||||
|
||||
setup(
|
||||
name="${packageName}",
|
||||
version="${version}",
|
||||
packages=find_packages(),
|
||||
python_requires=">=3.7",
|
||||
)
|
||||
`;
|
||||
|
||||
const moduleContent = `"""${packageName} module"""
|
||||
|
||||
__version__ = "${version}"
|
||||
|
||||
def hello():
|
||||
return "Hello from ${packageName}!"
|
||||
`;
|
||||
|
||||
const entries: smartarchive.IArchiveEntry[] = [
|
||||
{
|
||||
archivePath: `${dirPrefix}/PKG-INFO`,
|
||||
content: Buffer.from(pkgInfo, 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: `${dirPrefix}/setup.py`,
|
||||
content: Buffer.from(setupPy, 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: `${dirPrefix}/${normalizedName}/__init__.py`,
|
||||
content: Buffer.from(moduleContent, 'utf-8'),
|
||||
},
|
||||
];
|
||||
|
||||
return Buffer.from(await tarTools.packFilesToTarGz(entries));
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to calculate PyPI file hashes
|
||||
*/
|
||||
export function calculatePypiHashes(data: Buffer) {
|
||||
return {
|
||||
md5: crypto.createHash('md5').update(data).digest('hex'),
|
||||
sha256: crypto.createHash('sha256').update(data).digest('hex'),
|
||||
blake2b: crypto.createHash('blake2b512').update(data).digest('hex'),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a test RubyGem file (minimal tar.gz structure) using smartarchive
|
||||
*/
|
||||
export async function createRubyGem(
|
||||
gemName: string,
|
||||
version: string,
|
||||
platform: string = 'ruby'
|
||||
): Promise<Buffer> {
|
||||
const tarTools = new smartarchive.TarTools();
|
||||
const gzipTools = new smartarchive.GzipTools();
|
||||
|
||||
const metadataYaml = `--- !ruby/object:Gem::Specification
|
||||
name: ${gemName}
|
||||
version: !ruby/object:Gem::Version
|
||||
version: ${version}
|
||||
platform: ${platform}
|
||||
authors:
|
||||
- Test Author
|
||||
autorequire:
|
||||
bindir: bin
|
||||
cert_chain: []
|
||||
date: ${new Date().toISOString().split('T')[0]}
|
||||
dependencies: []
|
||||
description: Test RubyGem
|
||||
email: test@example.com
|
||||
executables: []
|
||||
extensions: []
|
||||
extra_rdoc_files: []
|
||||
files:
|
||||
- lib/${gemName}.rb
|
||||
homepage: https://example.com
|
||||
licenses:
|
||||
- MIT
|
||||
metadata: {}
|
||||
post_install_message:
|
||||
rdoc_options: []
|
||||
require_paths:
|
||||
- lib
|
||||
required_ruby_version: !ruby/object:Gem::Requirement
|
||||
requirements:
|
||||
- - ">="
|
||||
- !ruby/object:Gem::Version
|
||||
version: '2.7'
|
||||
required_rubygems_version: !ruby/object:Gem::Requirement
|
||||
requirements:
|
||||
- - ">="
|
||||
- !ruby/object:Gem::Version
|
||||
version: '0'
|
||||
requirements: []
|
||||
rubygems_version: 3.0.0
|
||||
signing_key:
|
||||
specification_version: 4
|
||||
summary: Test gem for SmartRegistry
|
||||
test_files: []
|
||||
`;
|
||||
|
||||
const metadataGz = Buffer.from(await gzipTools.compress(Buffer.from(metadataYaml, 'utf-8')));
|
||||
|
||||
const libContent = `# ${gemName}
|
||||
|
||||
module ${gemName.charAt(0).toUpperCase() + gemName.slice(1).replace(/-/g, '')}
|
||||
VERSION = "${version}"
|
||||
|
||||
def self.hello
|
||||
"Hello from #{gemName}!"
|
||||
end
|
||||
end
|
||||
`;
|
||||
|
||||
const dataEntries: smartarchive.IArchiveEntry[] = [
|
||||
{
|
||||
archivePath: `lib/${gemName}.rb`,
|
||||
content: Buffer.from(libContent, 'utf-8'),
|
||||
},
|
||||
];
|
||||
|
||||
const dataTarGz = Buffer.from(await tarTools.packFilesToTarGz(dataEntries));
|
||||
|
||||
const gemEntries: smartarchive.IArchiveEntry[] = [
|
||||
{
|
||||
archivePath: 'metadata.gz',
|
||||
content: metadataGz,
|
||||
},
|
||||
{
|
||||
archivePath: 'data.tar.gz',
|
||||
content: dataTarGz,
|
||||
},
|
||||
];
|
||||
|
||||
return Buffer.from(await tarTools.packFiles(gemEntries));
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to calculate RubyGems checksums
|
||||
*/
|
||||
export function calculateRubyGemsChecksums(data: Buffer) {
|
||||
return {
|
||||
md5: crypto.createHash('md5').update(data).digest('hex'),
|
||||
sha256: crypto.createHash('sha256').update(data).digest('hex'),
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,8 @@
|
||||
/**
|
||||
* Generate a unique test run ID for avoiding conflicts between test runs.
|
||||
*/
|
||||
export function generateTestRunId(): string {
|
||||
const timestamp = Date.now().toString(36);
|
||||
const random = Math.random().toString(36).substring(2, 6);
|
||||
return `${timestamp}${random}`;
|
||||
}
|
||||
@@ -0,0 +1,125 @@
|
||||
import type { IAuthToken, TRegistryProtocol } from '../../ts/core/interfaces.core.js';
|
||||
import type { IAuthProvider } from '../../ts/core/interfaces.auth.js';
|
||||
import type {
|
||||
IUpstreamProvider,
|
||||
IUpstreamRegistryConfig,
|
||||
IUpstreamResolutionContext,
|
||||
IProtocolUpstreamConfig,
|
||||
} from '../../ts/upstream/interfaces.upstream.js';
|
||||
|
||||
type TTestUpstreamRegistryConfig = Omit<Partial<IUpstreamRegistryConfig>, 'id' | 'url' | 'priority' | 'enabled'> &
|
||||
Pick<IUpstreamRegistryConfig, 'id' | 'url' | 'priority' | 'enabled'>;
|
||||
|
||||
type TTestProtocolUpstreamConfig = Omit<IProtocolUpstreamConfig, 'upstreams'> & {
|
||||
upstreams: TTestUpstreamRegistryConfig[];
|
||||
};
|
||||
|
||||
function normalizeUpstreamRegistryConfig(
|
||||
upstream: TTestUpstreamRegistryConfig
|
||||
): IUpstreamRegistryConfig {
|
||||
return {
|
||||
...upstream,
|
||||
name: upstream.name ?? upstream.id,
|
||||
auth: upstream.auth ?? { type: 'none' },
|
||||
};
|
||||
}
|
||||
|
||||
function normalizeProtocolUpstreamConfig(
|
||||
config: TTestProtocolUpstreamConfig | undefined
|
||||
): IProtocolUpstreamConfig | null {
|
||||
if (!config) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
...config,
|
||||
upstreams: config.upstreams.map(normalizeUpstreamRegistryConfig),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a mock upstream provider that tracks all calls for testing
|
||||
*/
|
||||
export function createTrackingUpstreamProvider(
|
||||
baseConfig?: Partial<Record<TRegistryProtocol, TTestProtocolUpstreamConfig>>
|
||||
): {
|
||||
provider: IUpstreamProvider;
|
||||
calls: IUpstreamResolutionContext[];
|
||||
} {
|
||||
const calls: IUpstreamResolutionContext[] = [];
|
||||
|
||||
const provider: IUpstreamProvider = {
|
||||
async resolveUpstreamConfig(context: IUpstreamResolutionContext) {
|
||||
calls.push({ ...context });
|
||||
return normalizeProtocolUpstreamConfig(baseConfig?.[context.protocol]);
|
||||
},
|
||||
};
|
||||
|
||||
return { provider, calls };
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a mock auth provider for testing pluggable authentication.
|
||||
* Allows customizing behavior for different test scenarios.
|
||||
*/
|
||||
export function createMockAuthProvider(overrides?: Partial<IAuthProvider>): IAuthProvider {
|
||||
const tokens = new Map<string, IAuthToken>();
|
||||
|
||||
return {
|
||||
init: async () => {},
|
||||
authenticate: async (credentials) => {
|
||||
return credentials.username;
|
||||
},
|
||||
validateToken: async (token, protocol) => {
|
||||
const stored = tokens.get(token);
|
||||
if (stored && (!protocol || stored.type === protocol)) {
|
||||
return stored;
|
||||
}
|
||||
if (token === 'valid-mock-token') {
|
||||
return {
|
||||
type: 'npm' as TRegistryProtocol,
|
||||
userId: 'mock-user',
|
||||
scopes: ['npm:*:*:*'],
|
||||
};
|
||||
}
|
||||
return null;
|
||||
},
|
||||
createToken: async (userId, protocol, options) => {
|
||||
const tokenId = `mock-${protocol}-${Date.now()}`;
|
||||
const authToken: IAuthToken = {
|
||||
type: protocol,
|
||||
userId,
|
||||
scopes: options?.scopes || [`${protocol}:*:*:*`],
|
||||
readonly: options?.readonly,
|
||||
expiresAt: options?.expiresIn ? new Date(Date.now() + options.expiresIn * 1000) : undefined,
|
||||
};
|
||||
tokens.set(tokenId, authToken);
|
||||
return tokenId;
|
||||
},
|
||||
revokeToken: async (token) => {
|
||||
tokens.delete(token);
|
||||
},
|
||||
authorize: async (token, resource, action) => {
|
||||
if (!token) return false;
|
||||
if (token.readonly && ['write', 'push', 'delete'].includes(action)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
listUserTokens: async (userId) => {
|
||||
const result: Array<{ key: string; readonly: boolean; created: string; protocol?: TRegistryProtocol }> = [];
|
||||
for (const [key, token] of tokens.entries()) {
|
||||
if (token.userId === userId) {
|
||||
result.push({
|
||||
key: `hash-${key.substring(0, 8)}`,
|
||||
readonly: token.readonly || false,
|
||||
created: new Date().toISOString(),
|
||||
protocol: token.type,
|
||||
});
|
||||
}
|
||||
}
|
||||
return result;
|
||||
},
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
+34
-869
@@ -1,30 +1,34 @@
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
import * as crypto from 'crypto';
|
||||
import * as smartarchive from '@push.rocks/smartarchive';
|
||||
import * as smartbucket from '@push.rocks/smartbucket';
|
||||
import { SmartRegistry } from '../../ts/classes.smartregistry.js';
|
||||
import type { IRegistryConfig, IAuthToken, TRegistryProtocol } from '../../ts/core/interfaces.core.js';
|
||||
import type { IAuthProvider, ITokenOptions } from '../../ts/core/interfaces.auth.js';
|
||||
import type { IStorageHooks, IStorageHookContext, IBeforePutResult, IBeforeDeleteResult } from '../../ts/core/interfaces.storage.js';
|
||||
import { StaticUpstreamProvider } from '../../ts/upstream/interfaces.upstream.js';
|
||||
import type { IUpstreamProvider, IUpstreamResolutionContext, IProtocolUpstreamConfig } from '../../ts/upstream/interfaces.upstream.js';
|
||||
import type { IStorageHooks } from '../../ts/core/interfaces.storage.js';
|
||||
import type { IUpstreamProvider } from '../../ts/upstream/interfaces.upstream.js';
|
||||
import { buildTestRegistryConfig, createDefaultTestUpstreamProvider } from './registryconfig.js';
|
||||
import { generateTestRunId } from './ids.js';
|
||||
|
||||
export {
|
||||
calculateDigest,
|
||||
createTestManifest,
|
||||
createTestPackument,
|
||||
createTestPom,
|
||||
createTestJar,
|
||||
calculateMavenChecksums,
|
||||
createComposerZip,
|
||||
createPythonWheel,
|
||||
createPythonSdist,
|
||||
calculatePypiHashes,
|
||||
createRubyGem,
|
||||
calculateRubyGemsChecksums,
|
||||
} from './fixtures.js';
|
||||
export { createMockAuthProvider, createTrackingUpstreamProvider } from './providers.js';
|
||||
export { buildTestRegistryConfig, createDefaultTestUpstreamProvider } from './registryconfig.js';
|
||||
export { createTestStorageBackend } from './storagebackend.js';
|
||||
export { generateTestRunId } from './ids.js';
|
||||
export { createTestTokens } from './tokens.js';
|
||||
export { createTrackingHooks, createQuotaHooks } from './storagehooks.js';
|
||||
|
||||
const testQenv = new qenv.Qenv('./', './.nogit');
|
||||
|
||||
/**
|
||||
* Clean up S3 bucket contents for a fresh test run
|
||||
* @param prefix Optional prefix to delete (e.g., 'cargo/', 'npm/', 'composer/')
|
||||
*/
|
||||
/**
|
||||
* Generate a unique test run ID for avoiding conflicts between test runs
|
||||
* Uses timestamp + random suffix for uniqueness
|
||||
*/
|
||||
export function generateTestRunId(): string {
|
||||
const timestamp = Date.now().toString(36);
|
||||
const random = Math.random().toString(36).substring(2, 6);
|
||||
return `${timestamp}${random}`;
|
||||
}
|
||||
|
||||
export async function cleanupS3Bucket(prefix?: string): Promise<void> {
|
||||
const s3AccessKey = await testQenv.getEnvVarOnDemand('S3_ACCESSKEY');
|
||||
const s3SecretKey = await testQenv.getEnvVarOnDemand('S3_SECRETKEY');
|
||||
@@ -40,19 +44,17 @@ export async function cleanupS3Bucket(prefix?: string): Promise<void> {
|
||||
});
|
||||
|
||||
try {
|
||||
const bucket = await s3.getBucket('test-registry');
|
||||
const bucket = await s3.getBucketByName('test-registry');
|
||||
if (bucket) {
|
||||
if (prefix) {
|
||||
// Delete only objects with the given prefix
|
||||
const files = await bucket.fastList({ prefix });
|
||||
for (const file of files) {
|
||||
await bucket.fastRemove({ path: file.name });
|
||||
for await (const path of bucket.listAllObjects(prefix)) {
|
||||
await bucket.fastRemove({ path });
|
||||
}
|
||||
} else {
|
||||
// Delete all objects in the bucket
|
||||
const files = await bucket.fastList({});
|
||||
for (const file of files) {
|
||||
await bucket.fastRemove({ path: file.name });
|
||||
for await (const path of bucket.listAllObjects()) {
|
||||
await bucket.fastRemove({ path });
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -67,77 +69,9 @@ export async function cleanupS3Bucket(prefix?: string): Promise<void> {
|
||||
*/
|
||||
export async function createTestRegistry(options?: {
|
||||
registryUrl?: string;
|
||||
storageHooks?: IStorageHooks;
|
||||
}): Promise<SmartRegistry> {
|
||||
// Read S3 config from env.json
|
||||
const s3AccessKey = await testQenv.getEnvVarOnDemand('S3_ACCESSKEY');
|
||||
const s3SecretKey = await testQenv.getEnvVarOnDemand('S3_SECRETKEY');
|
||||
const s3Endpoint = await testQenv.getEnvVarOnDemand('S3_ENDPOINT');
|
||||
const s3Port = await testQenv.getEnvVarOnDemand('S3_PORT');
|
||||
|
||||
const config: IRegistryConfig = {
|
||||
storage: {
|
||||
accessKey: s3AccessKey || 'minioadmin',
|
||||
accessSecret: s3SecretKey || 'minioadmin',
|
||||
endpoint: s3Endpoint || 'localhost',
|
||||
port: parseInt(s3Port || '9000', 10),
|
||||
useSsl: false,
|
||||
region: 'us-east-1',
|
||||
bucketName: 'test-registry',
|
||||
},
|
||||
auth: {
|
||||
jwtSecret: 'test-secret-key',
|
||||
tokenStore: 'memory',
|
||||
npmTokens: {
|
||||
enabled: true,
|
||||
},
|
||||
ociTokens: {
|
||||
enabled: true,
|
||||
realm: 'https://auth.example.com/token',
|
||||
service: 'test-registry',
|
||||
},
|
||||
pypiTokens: {
|
||||
enabled: true,
|
||||
},
|
||||
rubygemsTokens: {
|
||||
enabled: true,
|
||||
},
|
||||
},
|
||||
oci: {
|
||||
enabled: true,
|
||||
basePath: '/oci',
|
||||
...(options?.registryUrl ? { registryUrl: `${options.registryUrl}/oci` } : {}),
|
||||
},
|
||||
npm: {
|
||||
enabled: true,
|
||||
basePath: '/npm',
|
||||
...(options?.registryUrl ? { registryUrl: `${options.registryUrl}/npm` } : {}),
|
||||
},
|
||||
maven: {
|
||||
enabled: true,
|
||||
basePath: '/maven',
|
||||
...(options?.registryUrl ? { registryUrl: `${options.registryUrl}/maven` } : {}),
|
||||
},
|
||||
composer: {
|
||||
enabled: true,
|
||||
basePath: '/composer',
|
||||
...(options?.registryUrl ? { registryUrl: `${options.registryUrl}/composer` } : {}),
|
||||
},
|
||||
cargo: {
|
||||
enabled: true,
|
||||
basePath: '/cargo',
|
||||
...(options?.registryUrl ? { registryUrl: `${options.registryUrl}/cargo` } : {}),
|
||||
},
|
||||
pypi: {
|
||||
enabled: true,
|
||||
basePath: '/pypi',
|
||||
...(options?.registryUrl ? { registryUrl: options.registryUrl } : {}),
|
||||
},
|
||||
rubygems: {
|
||||
enabled: true,
|
||||
basePath: '/rubygems',
|
||||
...(options?.registryUrl ? { registryUrl: `${options.registryUrl}/rubygems` } : {}),
|
||||
},
|
||||
};
|
||||
const config = await buildTestRegistryConfig(options);
|
||||
|
||||
const registry = new SmartRegistry(config);
|
||||
await registry.init();
|
||||
@@ -151,781 +85,12 @@ export async function createTestRegistry(options?: {
|
||||
export async function createTestRegistryWithUpstream(
|
||||
upstreamProvider?: IUpstreamProvider
|
||||
): Promise<SmartRegistry> {
|
||||
// Read S3 config from env.json
|
||||
const s3AccessKey = await testQenv.getEnvVarOnDemand('S3_ACCESSKEY');
|
||||
const s3SecretKey = await testQenv.getEnvVarOnDemand('S3_SECRETKEY');
|
||||
const s3Endpoint = await testQenv.getEnvVarOnDemand('S3_ENDPOINT');
|
||||
const s3Port = await testQenv.getEnvVarOnDemand('S3_PORT');
|
||||
|
||||
// Default to StaticUpstreamProvider with npm.js configured
|
||||
const defaultProvider = new StaticUpstreamProvider({
|
||||
npm: {
|
||||
enabled: true,
|
||||
upstreams: [{ id: 'npmjs', url: 'https://registry.npmjs.org', priority: 1, enabled: true }],
|
||||
},
|
||||
oci: {
|
||||
enabled: true,
|
||||
upstreams: [{ id: 'dockerhub', url: 'https://registry-1.docker.io', priority: 1, enabled: true }],
|
||||
},
|
||||
const config = await buildTestRegistryConfig({
|
||||
upstreamProvider: upstreamProvider || createDefaultTestUpstreamProvider(),
|
||||
});
|
||||
|
||||
const config: IRegistryConfig = {
|
||||
storage: {
|
||||
accessKey: s3AccessKey || 'minioadmin',
|
||||
accessSecret: s3SecretKey || 'minioadmin',
|
||||
endpoint: s3Endpoint || 'localhost',
|
||||
port: parseInt(s3Port || '9000', 10),
|
||||
useSsl: false,
|
||||
region: 'us-east-1',
|
||||
bucketName: 'test-registry',
|
||||
},
|
||||
auth: {
|
||||
jwtSecret: 'test-secret-key',
|
||||
tokenStore: 'memory',
|
||||
npmTokens: { enabled: true },
|
||||
ociTokens: {
|
||||
enabled: true,
|
||||
realm: 'https://auth.example.com/token',
|
||||
service: 'test-registry',
|
||||
},
|
||||
pypiTokens: { enabled: true },
|
||||
rubygemsTokens: { enabled: true },
|
||||
},
|
||||
upstreamProvider: upstreamProvider || defaultProvider,
|
||||
oci: { enabled: true, basePath: '/oci' },
|
||||
npm: { enabled: true, basePath: '/npm' },
|
||||
maven: { enabled: true, basePath: '/maven' },
|
||||
composer: { enabled: true, basePath: '/composer' },
|
||||
cargo: { enabled: true, basePath: '/cargo' },
|
||||
pypi: { enabled: true, basePath: '/pypi' },
|
||||
rubygems: { enabled: true, basePath: '/rubygems' },
|
||||
};
|
||||
|
||||
const registry = new SmartRegistry(config);
|
||||
await registry.init();
|
||||
|
||||
return registry;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a mock upstream provider that tracks all calls for testing
|
||||
*/
|
||||
export function createTrackingUpstreamProvider(
|
||||
baseConfig?: Partial<Record<TRegistryProtocol, IProtocolUpstreamConfig>>
|
||||
): {
|
||||
provider: IUpstreamProvider;
|
||||
calls: IUpstreamResolutionContext[];
|
||||
} {
|
||||
const calls: IUpstreamResolutionContext[] = [];
|
||||
|
||||
const provider: IUpstreamProvider = {
|
||||
async resolveUpstreamConfig(context: IUpstreamResolutionContext) {
|
||||
calls.push({ ...context });
|
||||
return baseConfig?.[context.protocol] ?? null;
|
||||
},
|
||||
};
|
||||
|
||||
return { provider, calls };
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create test authentication tokens
|
||||
*/
|
||||
export async function createTestTokens(registry: SmartRegistry) {
|
||||
const authManager = registry.getAuthManager();
|
||||
|
||||
// Authenticate and create tokens
|
||||
const userId = await authManager.authenticate({
|
||||
username: 'testuser',
|
||||
password: 'testpass',
|
||||
});
|
||||
|
||||
if (!userId) {
|
||||
throw new Error('Failed to authenticate test user');
|
||||
}
|
||||
|
||||
// Create NPM token
|
||||
const npmToken = await authManager.createNpmToken(userId, false);
|
||||
|
||||
// Create OCI token with full access
|
||||
const ociToken = await authManager.createOciToken(
|
||||
userId,
|
||||
['oci:repository:*:*'],
|
||||
3600
|
||||
);
|
||||
|
||||
// Create Maven token with full access
|
||||
const mavenToken = await authManager.createMavenToken(userId, false);
|
||||
|
||||
// Create Composer token with full access
|
||||
const composerToken = await authManager.createComposerToken(userId, false);
|
||||
|
||||
// Create Cargo token with full access
|
||||
const cargoToken = await authManager.createCargoToken(userId, false);
|
||||
|
||||
// Create PyPI token with full access
|
||||
const pypiToken = await authManager.createPypiToken(userId, false);
|
||||
|
||||
// Create RubyGems token with full access
|
||||
const rubygemsToken = await authManager.createRubyGemsToken(userId, false);
|
||||
|
||||
return { npmToken, ociToken, mavenToken, composerToken, cargoToken, pypiToken, rubygemsToken, userId };
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to calculate SHA-256 digest in OCI format
|
||||
*/
|
||||
export function calculateDigest(data: Buffer): string {
|
||||
const hash = crypto.createHash('sha256').update(data).digest('hex');
|
||||
return `sha256:${hash}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a minimal valid OCI manifest
|
||||
*/
|
||||
export function createTestManifest(configDigest: string, layerDigest: string) {
|
||||
return {
|
||||
schemaVersion: 2,
|
||||
mediaType: 'application/vnd.oci.image.manifest.v1+json',
|
||||
config: {
|
||||
mediaType: 'application/vnd.oci.image.config.v1+json',
|
||||
size: 123,
|
||||
digest: configDigest,
|
||||
},
|
||||
layers: [
|
||||
{
|
||||
mediaType: 'application/vnd.oci.image.layer.v1.tar+gzip',
|
||||
size: 456,
|
||||
digest: layerDigest,
|
||||
},
|
||||
],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a minimal valid NPM packument
|
||||
*/
|
||||
export function createTestPackument(packageName: string, version: string, tarballData: Buffer) {
|
||||
const shasum = crypto.createHash('sha1').update(tarballData).digest('hex');
|
||||
const integrity = `sha512-${crypto.createHash('sha512').update(tarballData).digest('base64')}`;
|
||||
|
||||
return {
|
||||
name: packageName,
|
||||
versions: {
|
||||
[version]: {
|
||||
name: packageName,
|
||||
version: version,
|
||||
description: 'Test package',
|
||||
main: 'index.js',
|
||||
scripts: {},
|
||||
dist: {
|
||||
shasum: shasum,
|
||||
integrity: integrity,
|
||||
tarball: `http://localhost:5000/npm/${packageName}/-/${packageName}-${version}.tgz`,
|
||||
},
|
||||
},
|
||||
},
|
||||
'dist-tags': {
|
||||
latest: version,
|
||||
},
|
||||
_attachments: {
|
||||
[`${packageName}-${version}.tgz`]: {
|
||||
content_type: 'application/octet-stream',
|
||||
data: tarballData.toString('base64'),
|
||||
length: tarballData.length,
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a minimal valid Maven POM file
|
||||
*/
|
||||
export function createTestPom(
|
||||
groupId: string,
|
||||
artifactId: string,
|
||||
version: string,
|
||||
packaging: string = 'jar'
|
||||
): string {
|
||||
return `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
|
||||
http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>${groupId}</groupId>
|
||||
<artifactId>${artifactId}</artifactId>
|
||||
<version>${version}</version>
|
||||
<packaging>${packaging}</packaging>
|
||||
<name>${artifactId}</name>
|
||||
<description>Test Maven artifact</description>
|
||||
</project>`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a test JAR file (minimal ZIP with manifest)
|
||||
*/
|
||||
export function createTestJar(): Buffer {
|
||||
// Create a simple JAR structure (just a manifest)
|
||||
// In practice, this is a ZIP file with at least META-INF/MANIFEST.MF
|
||||
const manifestContent = `Manifest-Version: 1.0
|
||||
Created-By: SmartRegistry Test
|
||||
`;
|
||||
|
||||
// For testing, we'll just create a buffer with dummy content
|
||||
// Real JAR would be a proper ZIP archive
|
||||
return Buffer.from(manifestContent, 'utf-8');
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to calculate Maven checksums
|
||||
*/
|
||||
export function calculateMavenChecksums(data: Buffer) {
|
||||
return {
|
||||
md5: crypto.createHash('md5').update(data).digest('hex'),
|
||||
sha1: crypto.createHash('sha1').update(data).digest('hex'),
|
||||
sha256: crypto.createHash('sha256').update(data).digest('hex'),
|
||||
sha512: crypto.createHash('sha512').update(data).digest('hex'),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a Composer package ZIP using smartarchive
|
||||
*/
|
||||
export async function createComposerZip(
|
||||
vendorPackage: string,
|
||||
version: string,
|
||||
options?: {
|
||||
description?: string;
|
||||
license?: string[];
|
||||
authors?: Array<{ name: string; email?: string }>;
|
||||
}
|
||||
): Promise<Buffer> {
|
||||
const zipTools = new smartarchive.ZipTools();
|
||||
|
||||
const composerJson = {
|
||||
name: vendorPackage,
|
||||
version: version,
|
||||
type: 'library',
|
||||
description: options?.description || 'Test Composer package',
|
||||
license: options?.license || ['MIT'],
|
||||
authors: options?.authors || [{ name: 'Test Author', email: 'test@example.com' }],
|
||||
require: {
|
||||
php: '>=7.4',
|
||||
},
|
||||
autoload: {
|
||||
'psr-4': {
|
||||
'Vendor\\TestPackage\\': 'src/',
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
// Add a test PHP file
|
||||
const [vendor, pkg] = vendorPackage.split('/');
|
||||
const namespace = `${vendor.charAt(0).toUpperCase() + vendor.slice(1)}\\${pkg.charAt(0).toUpperCase() + pkg.slice(1).replace(/-/g, '')}`;
|
||||
const testPhpContent = `<?php
|
||||
namespace ${namespace};
|
||||
|
||||
class TestClass
|
||||
{
|
||||
public function greet(): string
|
||||
{
|
||||
return "Hello from ${vendorPackage}!";
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
const entries: smartarchive.IArchiveEntry[] = [
|
||||
{
|
||||
archivePath: 'composer.json',
|
||||
content: Buffer.from(JSON.stringify(composerJson, null, 2), 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: 'src/TestClass.php',
|
||||
content: Buffer.from(testPhpContent, 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: 'README.md',
|
||||
content: Buffer.from(`# ${vendorPackage}\n\nTest package`, 'utf-8'),
|
||||
},
|
||||
];
|
||||
|
||||
return Buffer.from(await zipTools.createZip(entries));
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a test Python wheel file (minimal ZIP structure) using smartarchive
|
||||
*/
|
||||
export async function createPythonWheel(
|
||||
packageName: string,
|
||||
version: string,
|
||||
pyVersion: string = 'py3'
|
||||
): Promise<Buffer> {
|
||||
const zipTools = new smartarchive.ZipTools();
|
||||
|
||||
const normalizedName = packageName.replace(/-/g, '_');
|
||||
const distInfoDir = `${normalizedName}-${version}.dist-info`;
|
||||
|
||||
// Create METADATA file
|
||||
const metadata = `Metadata-Version: 2.1
|
||||
Name: ${packageName}
|
||||
Version: ${version}
|
||||
Summary: Test Python package
|
||||
Home-page: https://example.com
|
||||
Author: Test Author
|
||||
Author-email: test@example.com
|
||||
License: MIT
|
||||
Platform: UNKNOWN
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Requires-Python: >=3.7
|
||||
Description-Content-Type: text/markdown
|
||||
|
||||
# ${packageName}
|
||||
|
||||
Test package for SmartRegistry
|
||||
`;
|
||||
|
||||
// Create WHEEL file
|
||||
const wheelContent = `Wheel-Version: 1.0
|
||||
Generator: test 1.0.0
|
||||
Root-Is-Purelib: true
|
||||
Tag: ${pyVersion}-none-any
|
||||
`;
|
||||
|
||||
// Create a simple Python module
|
||||
const moduleContent = `"""${packageName} module"""
|
||||
|
||||
__version__ = "${version}"
|
||||
|
||||
def hello():
|
||||
return "Hello from ${packageName}!"
|
||||
`;
|
||||
|
||||
const entries: smartarchive.IArchiveEntry[] = [
|
||||
{
|
||||
archivePath: `${distInfoDir}/METADATA`,
|
||||
content: Buffer.from(metadata, 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: `${distInfoDir}/WHEEL`,
|
||||
content: Buffer.from(wheelContent, 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: `${distInfoDir}/RECORD`,
|
||||
content: Buffer.from('', 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: `${distInfoDir}/top_level.txt`,
|
||||
content: Buffer.from(normalizedName, 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: `${normalizedName}/__init__.py`,
|
||||
content: Buffer.from(moduleContent, 'utf-8'),
|
||||
},
|
||||
];
|
||||
|
||||
return Buffer.from(await zipTools.createZip(entries));
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a test Python source distribution (sdist) using smartarchive
|
||||
*/
|
||||
export async function createPythonSdist(
|
||||
packageName: string,
|
||||
version: string
|
||||
): Promise<Buffer> {
|
||||
const tarTools = new smartarchive.TarTools();
|
||||
|
||||
const normalizedName = packageName.replace(/-/g, '_');
|
||||
const dirPrefix = `${packageName}-${version}`;
|
||||
|
||||
// PKG-INFO
|
||||
const pkgInfo = `Metadata-Version: 2.1
|
||||
Name: ${packageName}
|
||||
Version: ${version}
|
||||
Summary: Test Python package
|
||||
Home-page: https://example.com
|
||||
Author: Test Author
|
||||
Author-email: test@example.com
|
||||
License: MIT
|
||||
`;
|
||||
|
||||
// setup.py
|
||||
const setupPy = `from setuptools import setup, find_packages
|
||||
|
||||
setup(
|
||||
name="${packageName}",
|
||||
version="${version}",
|
||||
packages=find_packages(),
|
||||
python_requires=">=3.7",
|
||||
)
|
||||
`;
|
||||
|
||||
// Module file
|
||||
const moduleContent = `"""${packageName} module"""
|
||||
|
||||
__version__ = "${version}"
|
||||
|
||||
def hello():
|
||||
return "Hello from ${packageName}!"
|
||||
`;
|
||||
|
||||
const entries: smartarchive.IArchiveEntry[] = [
|
||||
{
|
||||
archivePath: `${dirPrefix}/PKG-INFO`,
|
||||
content: Buffer.from(pkgInfo, 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: `${dirPrefix}/setup.py`,
|
||||
content: Buffer.from(setupPy, 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: `${dirPrefix}/${normalizedName}/__init__.py`,
|
||||
content: Buffer.from(moduleContent, 'utf-8'),
|
||||
},
|
||||
];
|
||||
|
||||
return Buffer.from(await tarTools.packFilesToTarGz(entries));
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to calculate PyPI file hashes
|
||||
*/
|
||||
export function calculatePypiHashes(data: Buffer) {
|
||||
return {
|
||||
md5: crypto.createHash('md5').update(data).digest('hex'),
|
||||
sha256: crypto.createHash('sha256').update(data).digest('hex'),
|
||||
blake2b: crypto.createHash('blake2b512').update(data).digest('hex'),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a test RubyGem file (minimal tar.gz structure) using smartarchive
|
||||
*/
|
||||
export async function createRubyGem(
|
||||
gemName: string,
|
||||
version: string,
|
||||
platform: string = 'ruby'
|
||||
): Promise<Buffer> {
|
||||
const tarTools = new smartarchive.TarTools();
|
||||
const gzipTools = new smartarchive.GzipTools();
|
||||
|
||||
// Create metadata.gz (simplified)
|
||||
const metadataYaml = `--- !ruby/object:Gem::Specification
|
||||
name: ${gemName}
|
||||
version: !ruby/object:Gem::Version
|
||||
version: ${version}
|
||||
platform: ${platform}
|
||||
authors:
|
||||
- Test Author
|
||||
autorequire:
|
||||
bindir: bin
|
||||
cert_chain: []
|
||||
date: ${new Date().toISOString().split('T')[0]}
|
||||
dependencies: []
|
||||
description: Test RubyGem
|
||||
email: test@example.com
|
||||
executables: []
|
||||
extensions: []
|
||||
extra_rdoc_files: []
|
||||
files:
|
||||
- lib/${gemName}.rb
|
||||
homepage: https://example.com
|
||||
licenses:
|
||||
- MIT
|
||||
metadata: {}
|
||||
post_install_message:
|
||||
rdoc_options: []
|
||||
require_paths:
|
||||
- lib
|
||||
required_ruby_version: !ruby/object:Gem::Requirement
|
||||
requirements:
|
||||
- - ">="
|
||||
- !ruby/object:Gem::Version
|
||||
version: '2.7'
|
||||
required_rubygems_version: !ruby/object:Gem::Requirement
|
||||
requirements:
|
||||
- - ">="
|
||||
- !ruby/object:Gem::Version
|
||||
version: '0'
|
||||
requirements: []
|
||||
rubygems_version: 3.0.0
|
||||
signing_key:
|
||||
specification_version: 4
|
||||
summary: Test gem for SmartRegistry
|
||||
test_files: []
|
||||
`;
|
||||
|
||||
const metadataGz = Buffer.from(await gzipTools.compress(Buffer.from(metadataYaml, 'utf-8')));
|
||||
|
||||
// Create data.tar.gz content
|
||||
const libContent = `# ${gemName}
|
||||
|
||||
module ${gemName.charAt(0).toUpperCase() + gemName.slice(1).replace(/-/g, '')}
|
||||
VERSION = "${version}"
|
||||
|
||||
def self.hello
|
||||
"Hello from #{gemName}!"
|
||||
end
|
||||
end
|
||||
`;
|
||||
|
||||
const dataEntries: smartarchive.IArchiveEntry[] = [
|
||||
{
|
||||
archivePath: `lib/${gemName}.rb`,
|
||||
content: Buffer.from(libContent, 'utf-8'),
|
||||
},
|
||||
];
|
||||
|
||||
const dataTarGz = Buffer.from(await tarTools.packFilesToTarGz(dataEntries));
|
||||
|
||||
// Create the outer gem (tar.gz containing metadata.gz and data.tar.gz)
|
||||
const gemEntries: smartarchive.IArchiveEntry[] = [
|
||||
{
|
||||
archivePath: 'metadata.gz',
|
||||
content: metadataGz,
|
||||
},
|
||||
{
|
||||
archivePath: 'data.tar.gz',
|
||||
content: dataTarGz,
|
||||
},
|
||||
];
|
||||
|
||||
// RubyGems .gem files are plain tar archives (NOT gzipped), containing metadata.gz and data.tar.gz
|
||||
return Buffer.from(await tarTools.packFiles(gemEntries));
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to calculate RubyGems checksums
|
||||
*/
|
||||
export function calculateRubyGemsChecksums(data: Buffer) {
|
||||
return {
|
||||
md5: crypto.createHash('md5').update(data).digest('hex'),
|
||||
sha256: crypto.createHash('sha256').update(data).digest('hex'),
|
||||
};
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Enterprise Extensibility Test Helpers
|
||||
// ============================================================================
|
||||
|
||||
/**
|
||||
* Create a mock auth provider for testing pluggable authentication.
|
||||
* Allows customizing behavior for different test scenarios.
|
||||
*/
|
||||
export function createMockAuthProvider(overrides?: Partial<IAuthProvider>): IAuthProvider {
|
||||
const tokens = new Map<string, IAuthToken>();
|
||||
|
||||
return {
|
||||
init: async () => {},
|
||||
authenticate: async (credentials) => {
|
||||
// Default: always authenticate successfully
|
||||
return credentials.username;
|
||||
},
|
||||
validateToken: async (token, protocol) => {
|
||||
const stored = tokens.get(token);
|
||||
if (stored && (!protocol || stored.type === protocol)) {
|
||||
return stored;
|
||||
}
|
||||
// Mock token for tests
|
||||
if (token === 'valid-mock-token') {
|
||||
return {
|
||||
type: 'npm' as TRegistryProtocol,
|
||||
userId: 'mock-user',
|
||||
scopes: ['npm:*:*:*'],
|
||||
};
|
||||
}
|
||||
return null;
|
||||
},
|
||||
createToken: async (userId, protocol, options) => {
|
||||
const tokenId = `mock-${protocol}-${Date.now()}`;
|
||||
const authToken: IAuthToken = {
|
||||
type: protocol,
|
||||
userId,
|
||||
scopes: options?.scopes || [`${protocol}:*:*:*`],
|
||||
readonly: options?.readonly,
|
||||
expiresAt: options?.expiresIn ? new Date(Date.now() + options.expiresIn * 1000) : undefined,
|
||||
};
|
||||
tokens.set(tokenId, authToken);
|
||||
return tokenId;
|
||||
},
|
||||
revokeToken: async (token) => {
|
||||
tokens.delete(token);
|
||||
},
|
||||
authorize: async (token, resource, action) => {
|
||||
if (!token) return false;
|
||||
if (token.readonly && ['write', 'push', 'delete'].includes(action)) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
},
|
||||
listUserTokens: async (userId) => {
|
||||
const result: Array<{ key: string; readonly: boolean; created: string; protocol?: TRegistryProtocol }> = [];
|
||||
for (const [key, token] of tokens.entries()) {
|
||||
if (token.userId === userId) {
|
||||
result.push({
|
||||
key: `hash-${key.substring(0, 8)}`,
|
||||
readonly: token.readonly || false,
|
||||
created: new Date().toISOString(),
|
||||
protocol: token.type,
|
||||
});
|
||||
}
|
||||
}
|
||||
return result;
|
||||
},
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create test storage hooks that track all calls.
|
||||
* Useful for verifying hook invocation order and parameters.
|
||||
*/
|
||||
export function createTrackingHooks(options?: {
|
||||
beforePutAllowed?: boolean;
|
||||
beforeDeleteAllowed?: boolean;
|
||||
throwOnAfterPut?: boolean;
|
||||
throwOnAfterGet?: boolean;
|
||||
}): {
|
||||
hooks: IStorageHooks;
|
||||
calls: Array<{ method: string; context: IStorageHookContext; timestamp: number }>;
|
||||
} {
|
||||
const calls: Array<{ method: string; context: IStorageHookContext; timestamp: number }> = [];
|
||||
|
||||
return {
|
||||
calls,
|
||||
hooks: {
|
||||
beforePut: async (ctx) => {
|
||||
calls.push({ method: 'beforePut', context: ctx, timestamp: Date.now() });
|
||||
return {
|
||||
allowed: options?.beforePutAllowed !== false,
|
||||
reason: options?.beforePutAllowed === false ? 'Blocked by test' : undefined,
|
||||
};
|
||||
},
|
||||
afterPut: async (ctx) => {
|
||||
calls.push({ method: 'afterPut', context: ctx, timestamp: Date.now() });
|
||||
if (options?.throwOnAfterPut) {
|
||||
throw new Error('Test error in afterPut');
|
||||
}
|
||||
},
|
||||
beforeDelete: async (ctx) => {
|
||||
calls.push({ method: 'beforeDelete', context: ctx, timestamp: Date.now() });
|
||||
return {
|
||||
allowed: options?.beforeDeleteAllowed !== false,
|
||||
reason: options?.beforeDeleteAllowed === false ? 'Blocked by test' : undefined,
|
||||
};
|
||||
},
|
||||
afterDelete: async (ctx) => {
|
||||
calls.push({ method: 'afterDelete', context: ctx, timestamp: Date.now() });
|
||||
},
|
||||
afterGet: async (ctx) => {
|
||||
calls.push({ method: 'afterGet', context: ctx, timestamp: Date.now() });
|
||||
if (options?.throwOnAfterGet) {
|
||||
throw new Error('Test error in afterGet');
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a blocking storage hooks implementation for quota testing.
|
||||
*/
|
||||
export function createQuotaHooks(maxSizeBytes: number): {
|
||||
hooks: IStorageHooks;
|
||||
currentUsage: { bytes: number };
|
||||
} {
|
||||
const currentUsage = { bytes: 0 };
|
||||
|
||||
return {
|
||||
currentUsage,
|
||||
hooks: {
|
||||
beforePut: async (ctx) => {
|
||||
const size = ctx.metadata?.size || 0;
|
||||
if (currentUsage.bytes + size > maxSizeBytes) {
|
||||
return { allowed: false, reason: `Quota exceeded: ${currentUsage.bytes + size} > ${maxSizeBytes}` };
|
||||
}
|
||||
return { allowed: true };
|
||||
},
|
||||
afterPut: async (ctx) => {
|
||||
currentUsage.bytes += ctx.metadata?.size || 0;
|
||||
},
|
||||
afterDelete: async (ctx) => {
|
||||
currentUsage.bytes -= ctx.metadata?.size || 0;
|
||||
if (currentUsage.bytes < 0) currentUsage.bytes = 0;
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a SmartBucket storage backend for upstream cache testing.
|
||||
*/
|
||||
export async function createTestStorageBackend(): Promise<{
|
||||
storage: {
|
||||
getObject: (key: string) => Promise<Buffer | null>;
|
||||
putObject: (key: string, data: Buffer) => Promise<void>;
|
||||
deleteObject: (key: string) => Promise<void>;
|
||||
listObjects: (prefix: string) => Promise<string[]>;
|
||||
};
|
||||
bucket: smartbucket.Bucket;
|
||||
cleanup: () => Promise<void>;
|
||||
}> {
|
||||
const s3AccessKey = await testQenv.getEnvVarOnDemand('S3_ACCESSKEY');
|
||||
const s3SecretKey = await testQenv.getEnvVarOnDemand('S3_SECRETKEY');
|
||||
const s3Endpoint = await testQenv.getEnvVarOnDemand('S3_ENDPOINT');
|
||||
const s3Port = await testQenv.getEnvVarOnDemand('S3_PORT');
|
||||
|
||||
const s3 = new smartbucket.SmartBucket({
|
||||
accessKey: s3AccessKey || 'minioadmin',
|
||||
accessSecret: s3SecretKey || 'minioadmin',
|
||||
endpoint: s3Endpoint || 'localhost',
|
||||
port: parseInt(s3Port || '9000', 10),
|
||||
useSsl: false,
|
||||
});
|
||||
|
||||
const testRunId = generateTestRunId();
|
||||
const bucketName = 'test-cache-' + testRunId.substring(0, 8);
|
||||
const bucket = await s3.createBucket(bucketName);
|
||||
|
||||
const storage = {
|
||||
getObject: async (key: string): Promise<Buffer | null> => {
|
||||
try {
|
||||
const file = await bucket.fastGet({ path: key });
|
||||
if (!file) return null;
|
||||
const stream = await file.createReadStream();
|
||||
const chunks: Buffer[] = [];
|
||||
for await (const chunk of stream) {
|
||||
chunks.push(Buffer.from(chunk));
|
||||
}
|
||||
return Buffer.concat(chunks);
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
putObject: async (key: string, data: Buffer): Promise<void> => {
|
||||
await bucket.fastPut({ path: key, contents: data, overwrite: true });
|
||||
},
|
||||
deleteObject: async (key: string): Promise<void> => {
|
||||
await bucket.fastRemove({ path: key });
|
||||
},
|
||||
listObjects: async (prefix: string): Promise<string[]> => {
|
||||
const files = await bucket.fastList({ prefix });
|
||||
return files.map(f => f.name);
|
||||
},
|
||||
};
|
||||
|
||||
const cleanup = async () => {
|
||||
try {
|
||||
const files = await bucket.fastList({});
|
||||
for (const file of files) {
|
||||
await bucket.fastRemove({ path: file.name });
|
||||
}
|
||||
await s3.removeBucket(bucketName);
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
};
|
||||
|
||||
return { storage, bucket, cleanup };
|
||||
}
|
||||
|
||||
@@ -0,0 +1,122 @@
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
import type { IRegistryConfig } from '../../ts/core/interfaces.core.js';
|
||||
import type { IStorageHooks } from '../../ts/core/interfaces.storage.js';
|
||||
import { StaticUpstreamProvider } from '../../ts/upstream/interfaces.upstream.js';
|
||||
import type { IUpstreamProvider } from '../../ts/upstream/interfaces.upstream.js';
|
||||
|
||||
const testQenv = new qenv.Qenv('./', './.nogit');
|
||||
|
||||
async function getTestStorageConfig(): Promise<IRegistryConfig['storage']> {
|
||||
const s3AccessKey = await testQenv.getEnvVarOnDemand('S3_ACCESSKEY');
|
||||
const s3SecretKey = await testQenv.getEnvVarOnDemand('S3_SECRETKEY');
|
||||
const s3Endpoint = await testQenv.getEnvVarOnDemand('S3_ENDPOINT');
|
||||
const s3Port = await testQenv.getEnvVarOnDemand('S3_PORT');
|
||||
|
||||
return {
|
||||
accessKey: s3AccessKey || 'minioadmin',
|
||||
accessSecret: s3SecretKey || 'minioadmin',
|
||||
endpoint: s3Endpoint || 'localhost',
|
||||
port: parseInt(s3Port || '9000', 10),
|
||||
useSsl: false,
|
||||
region: 'us-east-1',
|
||||
bucketName: 'test-registry',
|
||||
};
|
||||
}
|
||||
|
||||
function getTestAuthConfig(): IRegistryConfig['auth'] {
|
||||
return {
|
||||
jwtSecret: 'test-secret-key',
|
||||
tokenStore: 'memory',
|
||||
npmTokens: {
|
||||
enabled: true,
|
||||
},
|
||||
ociTokens: {
|
||||
enabled: true,
|
||||
realm: 'https://auth.example.com/token',
|
||||
service: 'test-registry',
|
||||
},
|
||||
pypiTokens: {
|
||||
enabled: true,
|
||||
},
|
||||
rubygemsTokens: {
|
||||
enabled: true,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function createDefaultTestUpstreamProvider(): IUpstreamProvider {
|
||||
return new StaticUpstreamProvider({
|
||||
npm: {
|
||||
enabled: true,
|
||||
upstreams: [{
|
||||
id: 'npmjs',
|
||||
name: 'npmjs',
|
||||
url: 'https://registry.npmjs.org',
|
||||
priority: 1,
|
||||
enabled: true,
|
||||
auth: { type: 'none' },
|
||||
}],
|
||||
},
|
||||
oci: {
|
||||
enabled: true,
|
||||
upstreams: [{
|
||||
id: 'dockerhub',
|
||||
name: 'dockerhub',
|
||||
url: 'https://registry-1.docker.io',
|
||||
priority: 1,
|
||||
enabled: true,
|
||||
auth: { type: 'none' },
|
||||
}],
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
export async function buildTestRegistryConfig(options?: {
|
||||
registryUrl?: string;
|
||||
storageHooks?: IStorageHooks;
|
||||
upstreamProvider?: IUpstreamProvider;
|
||||
}): Promise<IRegistryConfig> {
|
||||
const config: IRegistryConfig = {
|
||||
storage: await getTestStorageConfig(),
|
||||
auth: getTestAuthConfig(),
|
||||
...(options?.storageHooks ? { storageHooks: options.storageHooks } : {}),
|
||||
...(options?.upstreamProvider ? { upstreamProvider: options.upstreamProvider } : {}),
|
||||
oci: {
|
||||
enabled: true,
|
||||
basePath: '/oci',
|
||||
...(options?.registryUrl ? { registryUrl: `${options.registryUrl}/oci` } : {}),
|
||||
},
|
||||
npm: {
|
||||
enabled: true,
|
||||
basePath: '/npm',
|
||||
...(options?.registryUrl ? { registryUrl: `${options.registryUrl}/npm` } : {}),
|
||||
},
|
||||
maven: {
|
||||
enabled: true,
|
||||
basePath: '/maven',
|
||||
...(options?.registryUrl ? { registryUrl: `${options.registryUrl}/maven` } : {}),
|
||||
},
|
||||
composer: {
|
||||
enabled: true,
|
||||
basePath: '/composer',
|
||||
...(options?.registryUrl ? { registryUrl: `${options.registryUrl}/composer` } : {}),
|
||||
},
|
||||
cargo: {
|
||||
enabled: true,
|
||||
basePath: '/cargo',
|
||||
...(options?.registryUrl ? { registryUrl: `${options.registryUrl}/cargo` } : {}),
|
||||
},
|
||||
pypi: {
|
||||
enabled: true,
|
||||
basePath: '/pypi',
|
||||
...(options?.registryUrl ? { registryUrl: options.registryUrl } : {}),
|
||||
},
|
||||
rubygems: {
|
||||
enabled: true,
|
||||
basePath: '/rubygems',
|
||||
...(options?.registryUrl ? { registryUrl: `${options.registryUrl}/rubygems` } : {}),
|
||||
},
|
||||
};
|
||||
|
||||
return config;
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
import * as smartbucket from '@push.rocks/smartbucket';
|
||||
import { generateTestRunId } from './ids.js';
|
||||
|
||||
const testQenv = new qenv.Qenv('./', './.nogit');
|
||||
|
||||
/**
|
||||
* Create a SmartBucket storage backend for upstream cache testing.
|
||||
*/
|
||||
export async function createTestStorageBackend(): Promise<{
|
||||
storage: {
|
||||
getObject: (key: string) => Promise<Buffer | null>;
|
||||
putObject: (key: string, data: Buffer) => Promise<void>;
|
||||
deleteObject: (key: string) => Promise<void>;
|
||||
listObjects: (prefix: string) => Promise<string[]>;
|
||||
};
|
||||
bucket: smartbucket.Bucket;
|
||||
cleanup: () => Promise<void>;
|
||||
}> {
|
||||
const s3AccessKey = await testQenv.getEnvVarOnDemand('S3_ACCESSKEY');
|
||||
const s3SecretKey = await testQenv.getEnvVarOnDemand('S3_SECRETKEY');
|
||||
const s3Endpoint = await testQenv.getEnvVarOnDemand('S3_ENDPOINT');
|
||||
const s3Port = await testQenv.getEnvVarOnDemand('S3_PORT');
|
||||
|
||||
const s3 = new smartbucket.SmartBucket({
|
||||
accessKey: s3AccessKey || 'minioadmin',
|
||||
accessSecret: s3SecretKey || 'minioadmin',
|
||||
endpoint: s3Endpoint || 'localhost',
|
||||
port: parseInt(s3Port || '9000', 10),
|
||||
useSsl: false,
|
||||
});
|
||||
|
||||
const testRunId = generateTestRunId();
|
||||
const bucketName = 'test-cache-' + testRunId.substring(0, 8);
|
||||
const bucket = await s3.createBucket(bucketName);
|
||||
|
||||
const storage = {
|
||||
getObject: async (key: string): Promise<Buffer | null> => {
|
||||
try {
|
||||
return await bucket.fastGet({ path: key });
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
},
|
||||
putObject: async (key: string, data: Buffer): Promise<void> => {
|
||||
await bucket.fastPut({ path: key, contents: data, overwrite: true });
|
||||
},
|
||||
deleteObject: async (key: string): Promise<void> => {
|
||||
await bucket.fastRemove({ path: key });
|
||||
},
|
||||
listObjects: async (prefix: string): Promise<string[]> => {
|
||||
const paths: string[] = [];
|
||||
for await (const path of bucket.listAllObjects(prefix)) {
|
||||
paths.push(path);
|
||||
}
|
||||
return paths;
|
||||
},
|
||||
};
|
||||
|
||||
const cleanup = async () => {
|
||||
try {
|
||||
for await (const path of bucket.listAllObjects()) {
|
||||
await bucket.fastRemove({ path });
|
||||
}
|
||||
await s3.removeBucket(bucketName);
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
};
|
||||
|
||||
return { storage, bucket, cleanup };
|
||||
}
|
||||
@@ -0,0 +1,82 @@
|
||||
import type { IStorageHooks, IStorageHookContext } from '../../ts/core/interfaces.storage.js';
|
||||
|
||||
/**
|
||||
* Create test storage hooks that track all calls.
|
||||
* Useful for verifying hook invocation order and parameters.
|
||||
*/
|
||||
export function createTrackingHooks(options?: {
|
||||
beforePutAllowed?: boolean;
|
||||
beforeDeleteAllowed?: boolean;
|
||||
throwOnAfterPut?: boolean;
|
||||
throwOnAfterGet?: boolean;
|
||||
}): {
|
||||
hooks: IStorageHooks;
|
||||
calls: Array<{ method: string; context: IStorageHookContext; timestamp: number }>;
|
||||
} {
|
||||
const calls: Array<{ method: string; context: IStorageHookContext; timestamp: number }> = [];
|
||||
|
||||
return {
|
||||
calls,
|
||||
hooks: {
|
||||
beforePut: async (ctx) => {
|
||||
calls.push({ method: 'beforePut', context: ctx, timestamp: Date.now() });
|
||||
return {
|
||||
allowed: options?.beforePutAllowed !== false,
|
||||
reason: options?.beforePutAllowed === false ? 'Blocked by test' : undefined,
|
||||
};
|
||||
},
|
||||
afterPut: async (ctx) => {
|
||||
calls.push({ method: 'afterPut', context: ctx, timestamp: Date.now() });
|
||||
if (options?.throwOnAfterPut) {
|
||||
throw new Error('Test error in afterPut');
|
||||
}
|
||||
},
|
||||
beforeDelete: async (ctx) => {
|
||||
calls.push({ method: 'beforeDelete', context: ctx, timestamp: Date.now() });
|
||||
return {
|
||||
allowed: options?.beforeDeleteAllowed !== false,
|
||||
reason: options?.beforeDeleteAllowed === false ? 'Blocked by test' : undefined,
|
||||
};
|
||||
},
|
||||
afterDelete: async (ctx) => {
|
||||
calls.push({ method: 'afterDelete', context: ctx, timestamp: Date.now() });
|
||||
},
|
||||
afterGet: async (ctx) => {
|
||||
calls.push({ method: 'afterGet', context: ctx, timestamp: Date.now() });
|
||||
if (options?.throwOnAfterGet) {
|
||||
throw new Error('Test error in afterGet');
|
||||
}
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a blocking storage hooks implementation for quota testing.
|
||||
*/
|
||||
export function createQuotaHooks(maxSizeBytes: number): {
|
||||
hooks: IStorageHooks;
|
||||
currentUsage: { bytes: number };
|
||||
} {
|
||||
const currentUsage = { bytes: 0 };
|
||||
|
||||
return {
|
||||
currentUsage,
|
||||
hooks: {
|
||||
beforePut: async (ctx) => {
|
||||
const size = ctx.metadata?.size || 0;
|
||||
if (currentUsage.bytes + size > maxSizeBytes) {
|
||||
return { allowed: false, reason: `Quota exceeded: ${currentUsage.bytes + size} > ${maxSizeBytes}` };
|
||||
}
|
||||
return { allowed: true };
|
||||
},
|
||||
afterPut: async (ctx) => {
|
||||
currentUsage.bytes += ctx.metadata?.size || 0;
|
||||
},
|
||||
afterDelete: async (ctx) => {
|
||||
currentUsage.bytes -= ctx.metadata?.size || 0;
|
||||
if (currentUsage.bytes < 0) currentUsage.bytes = 0;
|
||||
},
|
||||
},
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
import type { SmartRegistry } from '../../ts/classes.smartregistry.js';
|
||||
|
||||
/**
|
||||
* Helper to create test authentication tokens.
|
||||
*/
|
||||
export async function createTestTokens(registry: SmartRegistry) {
|
||||
const authManager = registry.getAuthManager();
|
||||
|
||||
const userId = await authManager.authenticate({
|
||||
username: 'testuser',
|
||||
password: 'testpass',
|
||||
});
|
||||
|
||||
if (!userId) {
|
||||
throw new Error('Failed to authenticate test user');
|
||||
}
|
||||
|
||||
const npmToken = await authManager.createNpmToken(userId, false);
|
||||
const ociToken = await authManager.createOciToken(userId, ['oci:repository:*:*'], 3600);
|
||||
const mavenToken = await authManager.createMavenToken(userId, false);
|
||||
const composerToken = await authManager.createComposerToken(userId, false);
|
||||
const cargoToken = await authManager.createCargoToken(userId, false);
|
||||
const pypiToken = await authManager.createPypiToken(userId, false);
|
||||
const rubygemsToken = await authManager.createRubyGemsToken(userId, false);
|
||||
|
||||
return { npmToken, ociToken, mavenToken, composerToken, cargoToken, pypiToken, rubygemsToken, userId };
|
||||
}
|
||||
+45
-1
@@ -1,7 +1,7 @@
|
||||
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||
import { SmartRegistry } from '../ts/index.js';
|
||||
import { streamToBuffer, streamToJson } from '../ts/core/helpers.stream.js';
|
||||
import { createTestRegistry, createTestTokens, createTestPackument } from './helpers/registry.js';
|
||||
import { createTestRegistry, createTestTokens, createTestPackument, generateTestRunId } from './helpers/registry.js';
|
||||
|
||||
let registry: SmartRegistry;
|
||||
let npmToken: string;
|
||||
@@ -137,6 +137,50 @@ tap.test('NPM: should publish a new version of the package', async () => {
|
||||
expect(getBody.versions).toHaveProperty(newVersion);
|
||||
});
|
||||
|
||||
tap.test('NPM: should support unencoded scoped package publish and metadata routes', async () => {
|
||||
const scopedPackageName = `@scope/test-package-${generateTestRunId()}`;
|
||||
const scopedVersion = '2.0.0';
|
||||
const scopedTarballData = Buffer.from('scoped tarball content', 'utf-8');
|
||||
const packument = createTestPackument(scopedPackageName, scopedVersion, scopedTarballData);
|
||||
|
||||
const publishResponse = await registry.handleRequest({
|
||||
method: 'PUT',
|
||||
path: `/npm/${scopedPackageName}`,
|
||||
headers: {
|
||||
Authorization: `Bearer ${npmToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
query: {},
|
||||
body: packument,
|
||||
});
|
||||
|
||||
expect(publishResponse.status).toEqual(201);
|
||||
|
||||
const metadataResponse = await registry.handleRequest({
|
||||
method: 'GET',
|
||||
path: `/npm/${scopedPackageName}`,
|
||||
headers: {},
|
||||
query: {},
|
||||
});
|
||||
|
||||
expect(metadataResponse.status).toEqual(200);
|
||||
const metadataBody = await streamToJson(metadataResponse.body);
|
||||
expect(metadataBody.name).toEqual(scopedPackageName);
|
||||
expect(metadataBody.versions).toHaveProperty(scopedVersion);
|
||||
|
||||
const versionResponse = await registry.handleRequest({
|
||||
method: 'GET',
|
||||
path: `/npm/${scopedPackageName}/${scopedVersion}`,
|
||||
headers: {},
|
||||
query: {},
|
||||
});
|
||||
|
||||
expect(versionResponse.status).toEqual(200);
|
||||
const versionBody = await streamToJson(versionResponse.body);
|
||||
expect(versionBody.name).toEqual(scopedPackageName);
|
||||
expect(versionBody.version).toEqual(scopedVersion);
|
||||
});
|
||||
|
||||
tap.test('NPM: should get dist-tags (GET /-/package/{pkg}/dist-tags)', async () => {
|
||||
const response = await registry.handleRequest({
|
||||
method: 'GET',
|
||||
|
||||
+142
-1
@@ -3,7 +3,14 @@ import * as qenv from '@push.rocks/qenv';
|
||||
import { RegistryStorage } from '../ts/core/classes.registrystorage.js';
|
||||
import type { IStorageConfig } from '../ts/core/interfaces.core.js';
|
||||
import type { IStorageHooks, IStorageHookContext } from '../ts/core/interfaces.storage.js';
|
||||
import { createTrackingHooks, createQuotaHooks, generateTestRunId } from './helpers/registry.js';
|
||||
import {
|
||||
createQuotaHooks,
|
||||
createTestPackument,
|
||||
createTestRegistry,
|
||||
createTestTokens,
|
||||
createTrackingHooks,
|
||||
generateTestRunId,
|
||||
} from './helpers/registry.js';
|
||||
|
||||
const testQenv = new qenv.Qenv('./', './.nogit');
|
||||
|
||||
@@ -344,6 +351,140 @@ tap.test('withContext: should clear context even on error', async () => {
|
||||
await errorStorage.putObject('test/after-error.txt', Buffer.from('ok'));
|
||||
});
|
||||
|
||||
tap.test('withContext: should isolate concurrent async operations', async () => {
|
||||
const tracker = createTrackingHooks();
|
||||
|
||||
const concurrentStorage = new RegistryStorage(storageConfig, tracker.hooks);
|
||||
await concurrentStorage.init();
|
||||
|
||||
const bucket = (concurrentStorage as any).bucket;
|
||||
const originalFastPut = bucket.fastPut.bind(bucket);
|
||||
const pendingWrites: Array<() => void> = [];
|
||||
let startedWrites = 0;
|
||||
let waitingWrites = 0;
|
||||
let startedResolve: () => void;
|
||||
let waitingResolve: () => void;
|
||||
const bothWritesStarted = new Promise<void>((resolve) => {
|
||||
startedResolve = resolve;
|
||||
});
|
||||
const bothWritesWaiting = new Promise<void>((resolve) => {
|
||||
waitingResolve = resolve;
|
||||
});
|
||||
|
||||
bucket.fastPut = async (options: any) => {
|
||||
startedWrites += 1;
|
||||
if (startedWrites === 2) {
|
||||
startedResolve();
|
||||
}
|
||||
|
||||
await bothWritesStarted;
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
pendingWrites.push(resolve);
|
||||
waitingWrites += 1;
|
||||
if (waitingWrites === 2) {
|
||||
waitingResolve();
|
||||
}
|
||||
});
|
||||
|
||||
return originalFastPut(options);
|
||||
};
|
||||
|
||||
try {
|
||||
const opA = concurrentStorage.withContext(
|
||||
{
|
||||
protocol: 'npm',
|
||||
actor: { userId: 'user-a' },
|
||||
metadata: { packageName: 'package-a' },
|
||||
},
|
||||
async () => {
|
||||
await concurrentStorage.putObject('test/concurrent-a.txt', Buffer.from('a'));
|
||||
}
|
||||
);
|
||||
|
||||
const opB = concurrentStorage.withContext(
|
||||
{
|
||||
protocol: 'npm',
|
||||
actor: { userId: 'user-b' },
|
||||
metadata: { packageName: 'package-b' },
|
||||
},
|
||||
async () => {
|
||||
await concurrentStorage.putObject('test/concurrent-b.txt', Buffer.from('b'));
|
||||
}
|
||||
);
|
||||
|
||||
await bothWritesWaiting;
|
||||
|
||||
pendingWrites[0]!();
|
||||
pendingWrites[1]!();
|
||||
|
||||
await Promise.all([opA, opB]);
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
} finally {
|
||||
bucket.fastPut = originalFastPut;
|
||||
}
|
||||
|
||||
const afterPutCalls = tracker.calls.filter(
|
||||
(call) => call.method === 'afterPut' && call.context.key.startsWith('test/concurrent-')
|
||||
);
|
||||
expect(afterPutCalls.length).toEqual(2);
|
||||
|
||||
const callByKey = new Map(afterPutCalls.map((call) => [call.context.key, call]));
|
||||
expect(callByKey.get('test/concurrent-a.txt')?.context.actor?.userId).toEqual('user-a');
|
||||
expect(callByKey.get('test/concurrent-a.txt')?.context.metadata?.packageName).toEqual('package-a');
|
||||
expect(callByKey.get('test/concurrent-b.txt')?.context.actor?.userId).toEqual('user-b');
|
||||
expect(callByKey.get('test/concurrent-b.txt')?.context.metadata?.packageName).toEqual('package-b');
|
||||
});
|
||||
|
||||
tap.test('request hooks: should receive context during real npm publish requests', async () => {
|
||||
const tracker = createTrackingHooks();
|
||||
const registry = await createTestRegistry({ storageHooks: tracker.hooks });
|
||||
|
||||
try {
|
||||
const tokens = await createTestTokens(registry);
|
||||
const packageName = `hooked-package-${generateTestRunId()}`;
|
||||
const version = '1.0.0';
|
||||
const tarball = Buffer.from('hooked tarball data', 'utf-8');
|
||||
const packument = createTestPackument(packageName, version, tarball);
|
||||
|
||||
const response = await registry.handleRequest({
|
||||
method: 'PUT',
|
||||
path: `/npm/${packageName}`,
|
||||
headers: {
|
||||
Authorization: `Bearer ${tokens.npmToken}`,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
query: {},
|
||||
body: packument,
|
||||
});
|
||||
|
||||
expect(response.status).toEqual(201);
|
||||
await new Promise(resolve => setTimeout(resolve, 100));
|
||||
|
||||
const npmWrites = tracker.calls.filter(
|
||||
(call) => call.method === 'beforePut' && call.context.metadata?.packageName === packageName
|
||||
);
|
||||
expect(npmWrites.length).toBeGreaterThanOrEqual(2);
|
||||
|
||||
const packumentWrite = npmWrites.find(
|
||||
(call) => call.context.key === `npm/packages/${packageName}/index.json`
|
||||
);
|
||||
expect(packumentWrite).toBeTruthy();
|
||||
expect(packumentWrite!.context.protocol).toEqual('npm');
|
||||
expect(packumentWrite!.context.actor?.userId).toEqual(tokens.userId);
|
||||
expect(packumentWrite!.context.metadata?.packageName).toEqual(packageName);
|
||||
|
||||
const tarballWrite = npmWrites.find(
|
||||
(call) => call.context.key.endsWith(`-${version}.tgz`)
|
||||
);
|
||||
expect(tarballWrite).toBeTruthy();
|
||||
expect(tarballWrite!.context.metadata?.packageName).toEqual(packageName);
|
||||
expect(tarballWrite!.context.metadata?.version).toEqual(version);
|
||||
} finally {
|
||||
registry.destroy();
|
||||
}
|
||||
});
|
||||
|
||||
// ============================================================================
|
||||
// Graceful Degradation Tests
|
||||
// ============================================================================
|
||||
|
||||
Reference in New Issue
Block a user