Compare commits
12 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| fb8d6897e3 | |||
| 81ae4f2d59 | |||
| 374469e37e | |||
| 9039613f7a | |||
| 4d13fac9f1 | |||
| 42209d235d | |||
| 80005af576 | |||
| 8d48627301 | |||
| 92d27d8b15 | |||
| 0b31219b7d | |||
| 29dea2e0e8 | |||
| 52dc1c0549 |
@@ -1,35 +1,5 @@
|
|||||||
// The Dev Container format allows you to configure your environment. At the heart of it
|
|
||||||
// is a Docker image or Dockerfile which controls the tools available in your environment.
|
|
||||||
//
|
|
||||||
// See https://aka.ms/devcontainer.json for more information.
|
// See https://aka.ms/devcontainer.json for more information.
|
||||||
{
|
{
|
||||||
"name": "Ona",
|
"name": "gitzone.universal",
|
||||||
// This universal image (~10GB) includes many development tools and languages,
|
|
||||||
// providing a convenient all-in-one development environment.
|
|
||||||
//
|
|
||||||
// This image is already available on remote runners for fast startup. On desktop
|
|
||||||
// and linux runners, it will need to be downloaded, which may take longer.
|
|
||||||
//
|
|
||||||
// For faster startup on desktop/linux, consider a smaller, language-specific image:
|
|
||||||
// • For Python: mcr.microsoft.com/devcontainers/python:3.13
|
|
||||||
// • For Node.js: mcr.microsoft.com/devcontainers/javascript-node:24
|
|
||||||
// • For Go: mcr.microsoft.com/devcontainers/go:1.24
|
|
||||||
// • For Java: mcr.microsoft.com/devcontainers/java:21
|
|
||||||
//
|
|
||||||
// Browse more options at: https://hub.docker.com/r/microsoft/devcontainers
|
|
||||||
// or build your own using the Dockerfile option below.
|
|
||||||
"image": "mcr.microsoft.com/devcontainers/universal:4.0.1-noble"
|
"image": "mcr.microsoft.com/devcontainers/universal:4.0.1-noble"
|
||||||
// Use "build":
|
|
||||||
// instead of the image to use a Dockerfile to build an image.
|
|
||||||
// "build": {
|
|
||||||
// "context": ".",
|
|
||||||
// "dockerfile": "Dockerfile"
|
|
||||||
// }
|
|
||||||
// Features add additional features to your environment. See https://containers.dev/features
|
|
||||||
// Beware: features are not supported on all platforms and may have unintended side-effects.
|
|
||||||
// "features": {
|
|
||||||
// "ghcr.io/devcontainers/features/docker-in-docker": {
|
|
||||||
// "moby": false
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
}
|
}
|
||||||
|
|||||||
51
changelog.md
51
changelog.md
@@ -1,5 +1,56 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 2025-11-21 - 1.4.1 - fix(devcontainer)
|
||||||
|
Simplify devcontainer configuration and rename container image
|
||||||
|
|
||||||
|
- Rename Dev Container name to 'gitzone.universal' and set image to mcr.microsoft.com/devcontainers/universal:4.0.1-noble
|
||||||
|
- Remove large inline comments and example 'build'/'features' blocks to simplify the devcontainer.json
|
||||||
|
|
||||||
|
## 2025-11-21 - 1.4.0 - feat(registrystorage)
|
||||||
|
Add deleteMavenMetadata to RegistryStorage and update Maven DELETE test to expect 204 No Content
|
||||||
|
|
||||||
|
- Add deleteMavenMetadata(groupId, artifactId) to RegistryStorage to remove maven-metadata.xml.
|
||||||
|
- Update Maven test to assert 204 No Content for DELETE responses (previously expected 200).
|
||||||
|
|
||||||
|
## 2025-11-21 - 1.3.1 - fix(maven)
|
||||||
|
Pass request path to Maven checksum handler so checksum files are resolved correctly
|
||||||
|
|
||||||
|
- Call handleChecksumRequest with the full request path from MavenRegistry.handleRequest
|
||||||
|
- Allows getChecksum to extract the checksum filename from the URL and fetch the correct checksum file from storage
|
||||||
|
- Fixes 404s when requesting artifact checksum files (md5, sha1, sha256, sha512)
|
||||||
|
|
||||||
|
## 2025-11-21 - 1.3.0 - feat(core)
|
||||||
|
Add Cargo and Composer registries with storage, auth and helpers
|
||||||
|
|
||||||
|
- Add Cargo registry implementation (ts/cargo) including index, publish, download, yank/unyank and search handlers
|
||||||
|
- Add Composer registry implementation (ts/composer) including package upload/download, metadata, packages.json and helpers
|
||||||
|
- Extend RegistryStorage with Cargo and Composer-specific storage helpers and path conventions
|
||||||
|
- Extend AuthManager with Composer token creation/validation and unified token validation support
|
||||||
|
- Wire SmartRegistry to initialize and route requests to cargo and composer handlers
|
||||||
|
- Add adm-zip dependency and Composer ZIP parsing helpers (extractComposerJsonFromZip, sha1 calculation, version sorting)
|
||||||
|
- Add tests for Cargo index path calculation and config handling
|
||||||
|
- Export new modules from ts/index.ts and add module entry files for composer and cargo
|
||||||
|
|
||||||
|
## 2025-11-21 - 1.2.0 - feat(maven)
|
||||||
|
Add Maven registry protocol support (storage, auth, routing, interfaces, and exports)
|
||||||
|
|
||||||
|
- Add Maven protocol to core types (TRegistryProtocol) and IRegistryConfig
|
||||||
|
- SmartRegistry: initialize Maven registry when enabled, route requests to /maven, and expose it via getRegistry
|
||||||
|
- RegistryStorage: implement Maven storage helpers (get/put/delete artifact, metadata, list versions) and path helpers
|
||||||
|
- AuthManager: add UUID token creation/validation/revocation for Maven and integrate into unified validateToken/authorize flow
|
||||||
|
- New ts/maven module: exports, interfaces and helpers for Maven coordinates, metadata, and search results
|
||||||
|
- Add basic Cargo (crates.io) scaffolding: ts/cargo exports and Cargo interfaces
|
||||||
|
- Update top-level ts/index.ts and package exports to include Maven (and cargo) modules
|
||||||
|
- Tests/helpers updated to enable Maven in test registry and add Maven artifact/checksum helpers
|
||||||
|
|
||||||
|
## 2025-11-20 - 1.1.1 - fix(oci)
|
||||||
|
Improve OCI manifest permission response and tag handling: include WWW-Authenticate header on unauthorized manifest GETs, accept optional headers in manifest lookup, and persist tags as a unified tags.json mapping when pushing manifests.
|
||||||
|
|
||||||
|
- getManifest now accepts an optional headers parameter for better request context handling.
|
||||||
|
- Unauthorized GET manifest responses now include a WWW-Authenticate header with realm/service/scope to comply with OCI auth expectations.
|
||||||
|
- PUT manifest logic no longer writes individual tag objects; it updates a consolidated oci/tags/{repository}/tags.json mapping using getTagsData and putObject.
|
||||||
|
- Simplified tag update flow when pushing a manifest: tags[reference] = digest and persist tags.json.
|
||||||
|
|
||||||
## 2025-11-20 - 1.1.0 - feat(oci)
|
## 2025-11-20 - 1.1.0 - feat(oci)
|
||||||
Support monolithic OCI blob uploads; add registry cleanup/destroy hooks; update tests and docs
|
Support monolithic OCI blob uploads; add registry cleanup/destroy hooks; update tests and docs
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@push.rocks/smartregistry",
|
"name": "@push.rocks/smartregistry",
|
||||||
"version": "1.1.0",
|
"version": "1.4.1",
|
||||||
"private": false,
|
"private": false,
|
||||||
"description": "a registry for npm modules and oci images",
|
"description": "a registry for npm modules and oci images",
|
||||||
"main": "dist_ts/index.js",
|
"main": "dist_ts/index.js",
|
||||||
@@ -47,7 +47,8 @@
|
|||||||
"@push.rocks/qenv": "^6.1.3",
|
"@push.rocks/qenv": "^6.1.3",
|
||||||
"@push.rocks/smartbucket": "^4.3.0",
|
"@push.rocks/smartbucket": "^4.3.0",
|
||||||
"@push.rocks/smartlog": "^3.1.10",
|
"@push.rocks/smartlog": "^3.1.10",
|
||||||
"@push.rocks/smartpath": "^6.0.0"
|
"@push.rocks/smartpath": "^6.0.0",
|
||||||
|
"adm-zip": "^0.5.10"
|
||||||
},
|
},
|
||||||
"packageManager": "pnpm@10.18.1+sha512.77a884a165cbba2d8d1c19e3b4880eee6d2fcabd0d879121e282196b80042351d5eb3ca0935fa599da1dc51265cc68816ad2bddd2a2de5ea9fdf92adbec7cd34"
|
"packageManager": "pnpm@10.18.1+sha512.77a884a165cbba2d8d1c19e3b4880eee6d2fcabd0d879121e282196b80042351d5eb3ca0935fa599da1dc51265cc68816ad2bddd2a2de5ea9fdf92adbec7cd34"
|
||||||
}
|
}
|
||||||
|
|||||||
9
pnpm-lock.yaml
generated
9
pnpm-lock.yaml
generated
@@ -20,6 +20,9 @@ importers:
|
|||||||
'@push.rocks/smartpath':
|
'@push.rocks/smartpath':
|
||||||
specifier: ^6.0.0
|
specifier: ^6.0.0
|
||||||
version: 6.0.0
|
version: 6.0.0
|
||||||
|
adm-zip:
|
||||||
|
specifier: ^0.5.10
|
||||||
|
version: 0.5.16
|
||||||
devDependencies:
|
devDependencies:
|
||||||
'@git.zone/tsbuild':
|
'@git.zone/tsbuild':
|
||||||
specifier: ^3.1.0
|
specifier: ^3.1.0
|
||||||
@@ -1507,6 +1510,10 @@ packages:
|
|||||||
resolution: {integrity: sha512-mORqg60S8iML6XSmVjqjGHJkINrCGLMj2QvDmFzI9vIlv1RGlyjmw3nrzaINJjkNsYXC41XhhD5pfy7CtuGcbA==}
|
resolution: {integrity: sha512-mORqg60S8iML6XSmVjqjGHJkINrCGLMj2QvDmFzI9vIlv1RGlyjmw3nrzaINJjkNsYXC41XhhD5pfy7CtuGcbA==}
|
||||||
engines: {node: '>= 16'}
|
engines: {node: '>= 16'}
|
||||||
|
|
||||||
|
adm-zip@0.5.16:
|
||||||
|
resolution: {integrity: sha512-TGw5yVi4saajsSEgz25grObGHEUaDrniwvA2qwSC060KfqGPdglhvPMA2lPIoxs3PQIItj2iag35fONcQqgUaQ==}
|
||||||
|
engines: {node: '>=12.0'}
|
||||||
|
|
||||||
agent-base@7.1.4:
|
agent-base@7.1.4:
|
||||||
resolution: {integrity: sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==}
|
resolution: {integrity: sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ==}
|
||||||
engines: {node: '>= 14'}
|
engines: {node: '>= 14'}
|
||||||
@@ -6557,6 +6564,8 @@ snapshots:
|
|||||||
transitivePeerDependencies:
|
transitivePeerDependencies:
|
||||||
- supports-color
|
- supports-color
|
||||||
|
|
||||||
|
adm-zip@0.5.16: {}
|
||||||
|
|
||||||
agent-base@7.1.4: {}
|
agent-base@7.1.4: {}
|
||||||
|
|
||||||
agentkeepalive@4.6.0:
|
agentkeepalive@4.6.0:
|
||||||
|
|||||||
302
readme.md
302
readme.md
@@ -1,18 +1,21 @@
|
|||||||
# @push.rocks/smartregistry
|
# @push.rocks/smartregistry
|
||||||
|
|
||||||
> 🚀 A composable TypeScript library implementing both **OCI Distribution Specification v1.1** and **NPM Registry API** for building unified container and package registries.
|
> 🚀 A composable TypeScript library implementing **OCI Distribution Specification v1.1**, **NPM Registry API**, **Maven Repository**, **Cargo/crates.io Registry**, and **Composer/Packagist** for building unified container and package registries.
|
||||||
|
|
||||||
## ✨ Features
|
## ✨ Features
|
||||||
|
|
||||||
### 🔄 Dual Protocol Support
|
### 🔄 Multi-Protocol Support
|
||||||
- **OCI Distribution Spec v1.1**: Full container registry with manifest/blob operations
|
- **OCI Distribution Spec v1.1**: Full container registry with manifest/blob operations
|
||||||
- **NPM Registry API**: Complete package registry with publish/install/search
|
- **NPM Registry API**: Complete package registry with publish/install/search
|
||||||
|
- **Maven Repository**: Java/JVM artifact management with POM support
|
||||||
|
- **Cargo/crates.io Registry**: Rust crate registry with sparse HTTP protocol
|
||||||
|
- **Composer/Packagist**: PHP package registry with Composer v2 protocol
|
||||||
|
|
||||||
### 🏗️ Unified Architecture
|
### 🏗️ Unified Architecture
|
||||||
- **Composable Design**: Core infrastructure with protocol plugins
|
- **Composable Design**: Core infrastructure with protocol plugins
|
||||||
- **Shared Storage**: Cloud-agnostic S3-compatible backend ([@push.rocks/smartbucket](https://www.npmjs.com/package/@push.rocks/smartbucket))
|
- **Shared Storage**: Cloud-agnostic S3-compatible backend ([@push.rocks/smartbucket](https://www.npmjs.com/package/@push.rocks/smartbucket))
|
||||||
- **Unified Authentication**: Scope-based permissions across both protocols
|
- **Unified Authentication**: Scope-based permissions across all protocols
|
||||||
- **Path-based Routing**: `/oci/*` for containers, `/npm/*` for packages
|
- **Path-based Routing**: `/oci/*` for containers, `/npm/*` for packages, `/maven/*` for Java artifacts, `/cargo/*` for Rust crates, `/composer/*` for PHP packages
|
||||||
|
|
||||||
### 🔐 Authentication & Authorization
|
### 🔐 Authentication & Authorization
|
||||||
- NPM UUID tokens for package operations
|
- NPM UUID tokens for package operations
|
||||||
@@ -35,6 +38,27 @@
|
|||||||
- ✅ Dist-tag management
|
- ✅ Dist-tag management
|
||||||
- ✅ Token management
|
- ✅ Token management
|
||||||
|
|
||||||
|
**Maven Features:**
|
||||||
|
- ✅ Artifact upload/download
|
||||||
|
- ✅ POM and metadata management
|
||||||
|
- ✅ Snapshot and release versions
|
||||||
|
- ✅ Checksum verification (MD5, SHA1)
|
||||||
|
|
||||||
|
**Cargo Features:**
|
||||||
|
- ✅ Crate publish (.crate files)
|
||||||
|
- ✅ Sparse HTTP protocol (modern index)
|
||||||
|
- ✅ Version yank/unyank
|
||||||
|
- ✅ Dependency resolution
|
||||||
|
- ✅ Search functionality
|
||||||
|
|
||||||
|
**Composer Features:**
|
||||||
|
- ✅ Package publish/download (ZIP format)
|
||||||
|
- ✅ Composer v2 repository API
|
||||||
|
- ✅ Package metadata (packages.json)
|
||||||
|
- ✅ Version management
|
||||||
|
- ✅ Dependency resolution
|
||||||
|
- ✅ PSR-4/PSR-0 autoloading support
|
||||||
|
|
||||||
## 📥 Installation
|
## 📥 Installation
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@@ -78,6 +102,18 @@ const config: IRegistryConfig = {
|
|||||||
enabled: true,
|
enabled: true,
|
||||||
basePath: '/npm',
|
basePath: '/npm',
|
||||||
},
|
},
|
||||||
|
maven: {
|
||||||
|
enabled: true,
|
||||||
|
basePath: '/maven',
|
||||||
|
},
|
||||||
|
cargo: {
|
||||||
|
enabled: true,
|
||||||
|
basePath: '/cargo',
|
||||||
|
},
|
||||||
|
composer: {
|
||||||
|
enabled: true,
|
||||||
|
basePath: '/composer',
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const registry = new SmartRegistry(config);
|
const registry = new SmartRegistry(config);
|
||||||
@@ -212,6 +248,167 @@ const searchResults = await registry.handleRequest({
|
|||||||
});
|
});
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### 🦀 Cargo Registry (Rust Crates)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Get config.json (required for Cargo)
|
||||||
|
const config = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: '/cargo/config.json',
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get index file for a crate
|
||||||
|
const index = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: '/cargo/se/rd/serde', // Path based on crate name length
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Download a crate file
|
||||||
|
const crateFile = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: '/cargo/api/v1/crates/serde/1.0.0/download',
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Publish a crate (binary format: [4 bytes JSON len][JSON][4 bytes crate len][.crate])
|
||||||
|
const publishResponse = await registry.handleRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
path: '/cargo/api/v1/crates/new',
|
||||||
|
headers: { 'Authorization': '<cargo-token>' }, // No "Bearer" prefix
|
||||||
|
query: {},
|
||||||
|
body: binaryPublishData, // Length-prefixed binary format
|
||||||
|
});
|
||||||
|
|
||||||
|
// Yank a version (deprecate without deleting)
|
||||||
|
const yankResponse = await registry.handleRequest({
|
||||||
|
method: 'DELETE',
|
||||||
|
path: '/cargo/api/v1/crates/my-crate/0.1.0/yank',
|
||||||
|
headers: { 'Authorization': '<cargo-token>' },
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Unyank a version
|
||||||
|
const unyankResponse = await registry.handleRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
path: '/cargo/api/v1/crates/my-crate/0.1.0/unyank',
|
||||||
|
headers: { 'Authorization': '<cargo-token>' },
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Search crates
|
||||||
|
const search = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: '/cargo/api/v1/crates',
|
||||||
|
headers: {},
|
||||||
|
query: { q: 'serde', per_page: '10' },
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
**Using with Cargo CLI:**
|
||||||
|
|
||||||
|
```toml
|
||||||
|
# .cargo/config.toml
|
||||||
|
[registries.myregistry]
|
||||||
|
index = "sparse+https://registry.example.com/cargo/"
|
||||||
|
|
||||||
|
[registries.myregistry.credential-provider]
|
||||||
|
# Or use credentials directly:
|
||||||
|
# [registries.myregistry]
|
||||||
|
# token = "your-api-token"
|
||||||
|
```
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Publish to custom registry
|
||||||
|
cargo publish --registry=myregistry
|
||||||
|
|
||||||
|
# Install from custom registry
|
||||||
|
cargo install --registry=myregistry my-crate
|
||||||
|
|
||||||
|
# Search custom registry
|
||||||
|
cargo search --registry=myregistry tokio
|
||||||
|
```
|
||||||
|
|
||||||
|
### 🎼 Composer Registry (PHP Packages)
|
||||||
|
|
||||||
|
```typescript
|
||||||
|
// Get repository root (packages.json)
|
||||||
|
const packagesJson = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: '/composer/packages.json',
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Get package metadata
|
||||||
|
const metadata = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: '/composer/p2/vendor/package.json',
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Upload a package (ZIP with composer.json)
|
||||||
|
const zipBuffer = await readFile('package.zip');
|
||||||
|
const uploadResponse = await registry.handleRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
path: '/composer/packages/vendor/package',
|
||||||
|
headers: { 'Authorization': `Bearer <composer-token>` },
|
||||||
|
query: {},
|
||||||
|
body: zipBuffer,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Download package ZIP
|
||||||
|
const download = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: '/composer/dists/vendor/package/ref123.zip',
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
// List all packages
|
||||||
|
const list = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: '/composer/packages/list.json',
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
// Delete a specific version
|
||||||
|
const deleteVersion = await registry.handleRequest({
|
||||||
|
method: 'DELETE',
|
||||||
|
path: '/composer/packages/vendor/package/1.0.0',
|
||||||
|
headers: { 'Authorization': `Bearer <composer-token>` },
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
```
|
||||||
|
|
||||||
|
**Using with Composer CLI:**
|
||||||
|
|
||||||
|
```json
|
||||||
|
// composer.json
|
||||||
|
{
|
||||||
|
"repositories": [
|
||||||
|
{
|
||||||
|
"type": "composer",
|
||||||
|
"url": "https://registry.example.com/composer"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Install from custom registry
|
||||||
|
composer require vendor/package
|
||||||
|
|
||||||
|
# Update packages
|
||||||
|
composer update
|
||||||
|
```
|
||||||
|
|
||||||
### 🔐 Authentication
|
### 🔐 Authentication
|
||||||
|
|
||||||
```typescript
|
```typescript
|
||||||
@@ -374,6 +571,48 @@ NPM registry API compliant implementation.
|
|||||||
- `POST /-/npm/v1/tokens` - Create token
|
- `POST /-/npm/v1/tokens` - Create token
|
||||||
- `PUT /-/package/{pkg}/dist-tags/{tag}` - Update tag
|
- `PUT /-/package/{pkg}/dist-tags/{tag}` - Update tag
|
||||||
|
|
||||||
|
#### CargoRegistry
|
||||||
|
|
||||||
|
Cargo/crates.io registry with sparse HTTP protocol support.
|
||||||
|
|
||||||
|
**Endpoints:**
|
||||||
|
- `GET /config.json` - Registry configuration (sparse protocol)
|
||||||
|
- `GET /index/{path}` - Index files (hierarchical structure)
|
||||||
|
- `/1/{name}` - 1-character crate names
|
||||||
|
- `/2/{name}` - 2-character crate names
|
||||||
|
- `/3/{c}/{name}` - 3-character crate names
|
||||||
|
- `/{p1}/{p2}/{name}` - 4+ character crate names
|
||||||
|
- `PUT /api/v1/crates/new` - Publish crate (binary format)
|
||||||
|
- `GET /api/v1/crates/{crate}/{version}/download` - Download .crate file
|
||||||
|
- `DELETE /api/v1/crates/{crate}/{version}/yank` - Yank (deprecate) version
|
||||||
|
- `PUT /api/v1/crates/{crate}/{version}/unyank` - Unyank version
|
||||||
|
- `GET /api/v1/crates?q={query}` - Search crates
|
||||||
|
|
||||||
|
**Index Format:**
|
||||||
|
- Newline-delimited JSON (one line per version)
|
||||||
|
- SHA256 checksums for .crate files
|
||||||
|
- Yanked flag (keep files, mark unavailable)
|
||||||
|
|
||||||
|
#### ComposerRegistry
|
||||||
|
|
||||||
|
Composer v2 repository API compliant implementation.
|
||||||
|
|
||||||
|
**Endpoints:**
|
||||||
|
- `GET /packages.json` - Repository metadata and configuration
|
||||||
|
- `GET /p2/{vendor}/{package}.json` - Package version metadata
|
||||||
|
- `GET /p2/{vendor}/{package}~dev.json` - Dev versions metadata
|
||||||
|
- `GET /packages/list.json` - List all packages
|
||||||
|
- `GET /dists/{vendor}/{package}/{ref}.zip` - Download package ZIP
|
||||||
|
- `PUT /packages/{vendor}/{package}` - Upload package (requires auth)
|
||||||
|
- `DELETE /packages/{vendor}/{package}` - Delete entire package
|
||||||
|
- `DELETE /packages/{vendor}/{package}/{version}` - Delete specific version
|
||||||
|
|
||||||
|
**Package Format:**
|
||||||
|
- ZIP archives with composer.json in root
|
||||||
|
- SHA-1 checksums for verification
|
||||||
|
- Version normalization (1.0.0 → 1.0.0.0)
|
||||||
|
- PSR-4/PSR-0 autoloading configuration
|
||||||
|
|
||||||
## 🗄️ Storage Structure
|
## 🗄️ Storage Structure
|
||||||
|
|
||||||
```
|
```
|
||||||
@@ -385,16 +624,38 @@ bucket/
|
|||||||
│ │ └── {repository}/{digest}
|
│ │ └── {repository}/{digest}
|
||||||
│ └── tags/
|
│ └── tags/
|
||||||
│ └── {repository}/tags.json
|
│ └── {repository}/tags.json
|
||||||
└── npm/
|
├── npm/
|
||||||
├── packages/
|
│ ├── packages/
|
||||||
│ ├── {name}/
|
│ │ ├── {name}/
|
||||||
│ │ ├── index.json # Packument
|
│ │ │ ├── index.json # Packument
|
||||||
│ │ └── {name}-{ver}.tgz # Tarball
|
│ │ │ └── {name}-{ver}.tgz # Tarball
|
||||||
│ └── @{scope}/{name}/
|
│ │ └── @{scope}/{name}/
|
||||||
│ ├── index.json
|
│ │ ├── index.json
|
||||||
│ └── {name}-{ver}.tgz
|
│ │ └── {name}-{ver}.tgz
|
||||||
└── users/
|
│ └── users/
|
||||||
└── {username}.json
|
│ └── {username}.json
|
||||||
|
├── maven/
|
||||||
|
│ ├── artifacts/
|
||||||
|
│ │ └── {group-path}/{artifact}/{version}/
|
||||||
|
│ │ ├── {artifact}-{version}.jar
|
||||||
|
│ │ ├── {artifact}-{version}.pom
|
||||||
|
│ │ └── {artifact}-{version}.{ext}
|
||||||
|
│ └── metadata/
|
||||||
|
│ └── {group-path}/{artifact}/maven-metadata.xml
|
||||||
|
├── cargo/
|
||||||
|
│ ├── config.json # Registry configuration (sparse protocol)
|
||||||
|
│ ├── index/ # Hierarchical index structure
|
||||||
|
│ │ ├── 1/{name} # 1-char crate names (e.g., "a")
|
||||||
|
│ │ ├── 2/{name} # 2-char crate names (e.g., "io")
|
||||||
|
│ │ ├── 3/{c}/{name} # 3-char crate names (e.g., "3/a/axo")
|
||||||
|
│ │ └── {p1}/{p2}/{name} # 4+ char (e.g., "se/rd/serde")
|
||||||
|
│ └── crates/
|
||||||
|
│ └── {name}/{name}-{version}.crate # Gzipped tar archives
|
||||||
|
└── composer/
|
||||||
|
└── packages/
|
||||||
|
└── {vendor}/{package}/
|
||||||
|
├── metadata.json # All versions metadata
|
||||||
|
└── {reference}.zip # Package ZIP files
|
||||||
```
|
```
|
||||||
|
|
||||||
## 🎯 Scope Format
|
## 🎯 Scope Format
|
||||||
@@ -408,9 +669,22 @@ Examples:
|
|||||||
npm:package:express:read # Read express package
|
npm:package:express:read # Read express package
|
||||||
npm:package:*:write # Write any package
|
npm:package:*:write # Write any package
|
||||||
npm:*:*:* # Full NPM access
|
npm:*:*:* # Full NPM access
|
||||||
|
|
||||||
oci:repository:nginx:pull # Pull nginx image
|
oci:repository:nginx:pull # Pull nginx image
|
||||||
oci:repository:*:push # Push any image
|
oci:repository:*:push # Push any image
|
||||||
oci:*:*:* # Full OCI access
|
oci:*:*:* # Full OCI access
|
||||||
|
|
||||||
|
maven:artifact:com.example:read # Read Maven artifact
|
||||||
|
maven:artifact:*:write # Write any artifact
|
||||||
|
maven:*:*:* # Full Maven access
|
||||||
|
|
||||||
|
cargo:crate:serde:write # Write serde crate
|
||||||
|
cargo:crate:*:read # Read any crate
|
||||||
|
cargo:*:*:* # Full Cargo access
|
||||||
|
|
||||||
|
composer:package:vendor/package:read # Read Composer package
|
||||||
|
composer:package:*:write # Write any package
|
||||||
|
composer:*:*:* # Full Composer access
|
||||||
```
|
```
|
||||||
|
|
||||||
## 🔌 Integration Examples
|
## 🔌 Integration Examples
|
||||||
|
|||||||
131
test/cargo.test.node.ts
Normal file
131
test/cargo.test.node.ts
Normal file
@@ -0,0 +1,131 @@
|
|||||||
|
import { tap, expect } from '@git.zone/tstest';
|
||||||
|
import { RegistryStorage } from '../ts/core/classes.registrystorage.js';
|
||||||
|
import { CargoRegistry } from '../ts/cargo/classes.cargoregistry.js';
|
||||||
|
import { AuthManager } from '../ts/core/classes.authmanager.js';
|
||||||
|
|
||||||
|
// Test index path calculation
|
||||||
|
tap.test('should calculate correct index paths for different crate names', async () => {
|
||||||
|
const storage = new RegistryStorage({
|
||||||
|
accessKey: 'test',
|
||||||
|
accessSecret: 'test',
|
||||||
|
endpoint: 's3.test.com',
|
||||||
|
bucketName: 'test-bucket',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Access private method for testing
|
||||||
|
const getPath = (storage as any).getCargoIndexPath.bind(storage);
|
||||||
|
|
||||||
|
// 1-character names
|
||||||
|
expect(getPath('a')).to.equal('cargo/index/1/a');
|
||||||
|
expect(getPath('z')).to.equal('cargo/index/1/z');
|
||||||
|
|
||||||
|
// 2-character names
|
||||||
|
expect(getPath('io')).to.equal('cargo/index/2/io');
|
||||||
|
expect(getPath('ab')).to.equal('cargo/index/2/ab');
|
||||||
|
|
||||||
|
// 3-character names
|
||||||
|
expect(getPath('axo')).to.equal('cargo/index/3/a/axo');
|
||||||
|
expect(getPath('foo')).to.equal('cargo/index/3/f/foo');
|
||||||
|
|
||||||
|
// 4+ character names
|
||||||
|
expect(getPath('serde')).to.equal('cargo/index/se/rd/serde');
|
||||||
|
expect(getPath('tokio')).to.equal('cargo/index/to/ki/tokio');
|
||||||
|
expect(getPath('my-crate')).to.equal('cargo/index/my/--/my-crate');
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test crate file path calculation
|
||||||
|
tap.test('should calculate correct crate file paths', async () => {
|
||||||
|
const storage = new RegistryStorage({
|
||||||
|
accessKey: 'test',
|
||||||
|
accessSecret: 'test',
|
||||||
|
endpoint: 's3.test.com',
|
||||||
|
bucketName: 'test-bucket',
|
||||||
|
});
|
||||||
|
|
||||||
|
// Access private method for testing
|
||||||
|
const getPath = (storage as any).getCargoCratePath.bind(storage);
|
||||||
|
|
||||||
|
expect(getPath('serde', '1.0.0')).to.equal('cargo/crates/serde/serde-1.0.0.crate');
|
||||||
|
expect(getPath('tokio', '1.28.0')).to.equal('cargo/crates/tokio/tokio-1.28.0.crate');
|
||||||
|
expect(getPath('my-crate', '0.1.0')).to.equal('cargo/crates/my-crate/my-crate-0.1.0.crate');
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test crate name validation
|
||||||
|
tap.test('should validate crate names correctly', async () => {
|
||||||
|
const storage = new RegistryStorage({
|
||||||
|
accessKey: 'test',
|
||||||
|
accessSecret: 'test',
|
||||||
|
endpoint: 's3.test.com',
|
||||||
|
bucketName: 'test-bucket',
|
||||||
|
});
|
||||||
|
|
||||||
|
const authManager = new AuthManager({
|
||||||
|
jwtSecret: 'test-secret',
|
||||||
|
tokenStore: 'memory',
|
||||||
|
npmTokens: { enabled: true },
|
||||||
|
ociTokens: { enabled: false, realm: '', service: '' },
|
||||||
|
});
|
||||||
|
|
||||||
|
const registry = new CargoRegistry(storage, authManager, '/cargo', 'http://localhost:5000/cargo');
|
||||||
|
|
||||||
|
// Access private method for testing
|
||||||
|
const validate = (registry as any).validateCrateName.bind(registry);
|
||||||
|
|
||||||
|
// Valid names
|
||||||
|
expect(validate('serde')).to.be.true;
|
||||||
|
expect(validate('tokio')).to.be.true;
|
||||||
|
expect(validate('my-crate')).to.be.true;
|
||||||
|
expect(validate('my_crate')).to.be.true;
|
||||||
|
expect(validate('crate123')).to.be.true;
|
||||||
|
expect(validate('a')).to.be.true;
|
||||||
|
|
||||||
|
// Invalid names (uppercase not allowed)
|
||||||
|
expect(validate('Serde')).to.be.false;
|
||||||
|
expect(validate('MyCreate')).to.be.false;
|
||||||
|
|
||||||
|
// Invalid names (special characters)
|
||||||
|
expect(validate('my.crate')).to.be.false;
|
||||||
|
expect(validate('my@crate')).to.be.false;
|
||||||
|
expect(validate('my crate')).to.be.false;
|
||||||
|
|
||||||
|
// Invalid names (too long)
|
||||||
|
const longName = 'a'.repeat(65);
|
||||||
|
expect(validate(longName)).to.be.false;
|
||||||
|
|
||||||
|
// Invalid names (empty)
|
||||||
|
expect(validate('')).to.be.false;
|
||||||
|
});
|
||||||
|
|
||||||
|
// Test config.json response
|
||||||
|
tap.test('should return valid config.json', async () => {
|
||||||
|
const storage = new RegistryStorage({
|
||||||
|
accessKey: 'test',
|
||||||
|
accessSecret: 'test',
|
||||||
|
endpoint: 's3.test.com',
|
||||||
|
bucketName: 'test-bucket',
|
||||||
|
});
|
||||||
|
|
||||||
|
const authManager = new AuthManager({
|
||||||
|
jwtSecret: 'test-secret',
|
||||||
|
tokenStore: 'memory',
|
||||||
|
npmTokens: { enabled: true },
|
||||||
|
ociTokens: { enabled: false, realm: '', service: '' },
|
||||||
|
});
|
||||||
|
|
||||||
|
const registry = new CargoRegistry(storage, authManager, '/cargo', 'http://localhost:5000/cargo');
|
||||||
|
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: '/cargo/config.json',
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).to.equal(200);
|
||||||
|
expect(response.headers['Content-Type']).to.equal('application/json');
|
||||||
|
expect(response.body).to.be.an('object');
|
||||||
|
expect(response.body.dl).to.include('/api/v1/crates/{crate}/{version}/download');
|
||||||
|
expect(response.body.api).to.equal('http://localhost:5000/cargo');
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
@@ -6,7 +6,7 @@ import type { IRegistryConfig } from '../../ts/core/interfaces.core.js';
|
|||||||
const testQenv = new qenv.Qenv('./', './.nogit');
|
const testQenv = new qenv.Qenv('./', './.nogit');
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a test SmartRegistry instance with both OCI and NPM enabled
|
* Create a test SmartRegistry instance with OCI, NPM, Maven, and Composer enabled
|
||||||
*/
|
*/
|
||||||
export async function createTestRegistry(): Promise<SmartRegistry> {
|
export async function createTestRegistry(): Promise<SmartRegistry> {
|
||||||
// Read S3 config from env.json
|
// Read S3 config from env.json
|
||||||
@@ -45,6 +45,14 @@ export async function createTestRegistry(): Promise<SmartRegistry> {
|
|||||||
enabled: true,
|
enabled: true,
|
||||||
basePath: '/npm',
|
basePath: '/npm',
|
||||||
},
|
},
|
||||||
|
maven: {
|
||||||
|
enabled: true,
|
||||||
|
basePath: '/maven',
|
||||||
|
},
|
||||||
|
composer: {
|
||||||
|
enabled: true,
|
||||||
|
basePath: '/composer',
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
const registry = new SmartRegistry(config);
|
const registry = new SmartRegistry(config);
|
||||||
@@ -79,7 +87,13 @@ export async function createTestTokens(registry: SmartRegistry) {
|
|||||||
3600
|
3600
|
||||||
);
|
);
|
||||||
|
|
||||||
return { npmToken, ociToken, userId };
|
// Create Maven token with full access
|
||||||
|
const mavenToken = await authManager.createMavenToken(userId, false);
|
||||||
|
|
||||||
|
// Create Composer token with full access
|
||||||
|
const composerToken = await authManager.createComposerToken(userId, false);
|
||||||
|
|
||||||
|
return { npmToken, ociToken, mavenToken, composerToken, userId };
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -147,3 +161,112 @@ export function createTestPackument(packageName: string, version: string, tarbal
|
|||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to create a minimal valid Maven POM file
|
||||||
|
*/
|
||||||
|
export function createTestPom(
|
||||||
|
groupId: string,
|
||||||
|
artifactId: string,
|
||||||
|
version: string,
|
||||||
|
packaging: string = 'jar'
|
||||||
|
): string {
|
||||||
|
return `<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<project xmlns="http://maven.apache.org/POM/4.0.0"
|
||||||
|
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
|
||||||
|
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
|
||||||
|
http://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||||
|
<modelVersion>4.0.0</modelVersion>
|
||||||
|
<groupId>${groupId}</groupId>
|
||||||
|
<artifactId>${artifactId}</artifactId>
|
||||||
|
<version>${version}</version>
|
||||||
|
<packaging>${packaging}</packaging>
|
||||||
|
<name>${artifactId}</name>
|
||||||
|
<description>Test Maven artifact</description>
|
||||||
|
</project>`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to create a test JAR file (minimal ZIP with manifest)
|
||||||
|
*/
|
||||||
|
export function createTestJar(): Buffer {
|
||||||
|
// Create a simple JAR structure (just a manifest)
|
||||||
|
// In practice, this is a ZIP file with at least META-INF/MANIFEST.MF
|
||||||
|
const manifestContent = `Manifest-Version: 1.0
|
||||||
|
Created-By: SmartRegistry Test
|
||||||
|
`;
|
||||||
|
|
||||||
|
// For testing, we'll just create a buffer with dummy content
|
||||||
|
// Real JAR would be a proper ZIP archive
|
||||||
|
return Buffer.from(manifestContent, 'utf-8');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to calculate Maven checksums
|
||||||
|
*/
|
||||||
|
export function calculateMavenChecksums(data: Buffer) {
|
||||||
|
return {
|
||||||
|
md5: crypto.createHash('md5').update(data).digest('hex'),
|
||||||
|
sha1: crypto.createHash('sha1').update(data).digest('hex'),
|
||||||
|
sha256: crypto.createHash('sha256').update(data).digest('hex'),
|
||||||
|
sha512: crypto.createHash('sha512').update(data).digest('hex'),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to create a Composer package ZIP
|
||||||
|
*/
|
||||||
|
export async function createComposerZip(
|
||||||
|
vendorPackage: string,
|
||||||
|
version: string,
|
||||||
|
options?: {
|
||||||
|
description?: string;
|
||||||
|
license?: string[];
|
||||||
|
authors?: Array<{ name: string; email?: string }>;
|
||||||
|
}
|
||||||
|
): Promise<Buffer> {
|
||||||
|
const AdmZip = (await import('adm-zip')).default;
|
||||||
|
const zip = new AdmZip();
|
||||||
|
|
||||||
|
const composerJson = {
|
||||||
|
name: vendorPackage,
|
||||||
|
version: version,
|
||||||
|
type: 'library',
|
||||||
|
description: options?.description || 'Test Composer package',
|
||||||
|
license: options?.license || ['MIT'],
|
||||||
|
authors: options?.authors || [{ name: 'Test Author', email: 'test@example.com' }],
|
||||||
|
require: {
|
||||||
|
php: '>=7.4',
|
||||||
|
},
|
||||||
|
autoload: {
|
||||||
|
'psr-4': {
|
||||||
|
'Vendor\\TestPackage\\': 'src/',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add composer.json
|
||||||
|
zip.addFile('composer.json', Buffer.from(JSON.stringify(composerJson, null, 2), 'utf-8'));
|
||||||
|
|
||||||
|
// Add a test PHP file
|
||||||
|
const [vendor, pkg] = vendorPackage.split('/');
|
||||||
|
const namespace = `${vendor.charAt(0).toUpperCase() + vendor.slice(1)}\\${pkg.charAt(0).toUpperCase() + pkg.slice(1).replace(/-/g, '')}`;
|
||||||
|
const testPhpContent = `<?php
|
||||||
|
namespace ${namespace};
|
||||||
|
|
||||||
|
class TestClass
|
||||||
|
{
|
||||||
|
public function greet(): string
|
||||||
|
{
|
||||||
|
return "Hello from ${vendorPackage}!";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`;
|
||||||
|
|
||||||
|
zip.addFile('src/TestClass.php', Buffer.from(testPhpContent, 'utf-8'));
|
||||||
|
|
||||||
|
// Add README
|
||||||
|
zip.addFile('README.md', Buffer.from(`# ${vendorPackage}\n\nTest package`, 'utf-8'));
|
||||||
|
|
||||||
|
return zip.toBuffer();
|
||||||
|
}
|
||||||
|
|||||||
296
test/test.composer.ts
Normal file
296
test/test.composer.ts
Normal file
@@ -0,0 +1,296 @@
|
|||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import { SmartRegistry } from '../ts/index.js';
|
||||||
|
import { createTestRegistry, createTestTokens, createComposerZip } from './helpers/registry.js';
|
||||||
|
|
||||||
|
let registry: SmartRegistry;
|
||||||
|
let composerToken: string;
|
||||||
|
let userId: string;
|
||||||
|
|
||||||
|
// Test data
|
||||||
|
const testPackageName = 'vendor/test-package';
|
||||||
|
const testVersion = '1.0.0';
|
||||||
|
let testZipData: Buffer;
|
||||||
|
|
||||||
|
tap.test('Composer: should create registry instance', async () => {
|
||||||
|
registry = await createTestRegistry();
|
||||||
|
const tokens = await createTestTokens(registry);
|
||||||
|
composerToken = tokens.composerToken;
|
||||||
|
userId = tokens.userId;
|
||||||
|
|
||||||
|
expect(registry).toBeInstanceOf(SmartRegistry);
|
||||||
|
expect(composerToken).toBeTypeOf('string');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Composer: should create test ZIP package', async () => {
|
||||||
|
testZipData = await createComposerZip(testPackageName, testVersion, {
|
||||||
|
description: 'Test Composer package for registry',
|
||||||
|
license: ['MIT'],
|
||||||
|
authors: [{ name: 'Test Author', email: 'test@example.com' }],
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(testZipData).toBeInstanceOf(Buffer);
|
||||||
|
expect(testZipData.length).toBeGreaterThan(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Composer: should return packages.json (GET /packages.json)', async () => {
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: '/composer/packages.json',
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(200);
|
||||||
|
expect(response.body).toHaveProperty('metadata-url');
|
||||||
|
expect(response.body).toHaveProperty('available-packages');
|
||||||
|
expect(response.body['available-packages']).toBeInstanceOf(Array);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Composer: should upload a package (PUT /packages/{vendor/package})', async () => {
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
path: `/composer/packages/${testPackageName}`,
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${composerToken}`,
|
||||||
|
'Content-Type': 'application/zip',
|
||||||
|
},
|
||||||
|
query: {},
|
||||||
|
body: testZipData,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(201);
|
||||||
|
expect(response.body.status).toEqual('success');
|
||||||
|
expect(response.body.package).toEqual(testPackageName);
|
||||||
|
expect(response.body.version).toEqual(testVersion);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Composer: should retrieve package metadata (GET /p2/{vendor/package}.json)', async () => {
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: `/composer/p2/${testPackageName}.json`,
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(200);
|
||||||
|
expect(response.body).toHaveProperty('packages');
|
||||||
|
expect(response.body.packages[testPackageName]).toBeInstanceOf(Array);
|
||||||
|
expect(response.body.packages[testPackageName].length).toEqual(1);
|
||||||
|
|
||||||
|
const packageData = response.body.packages[testPackageName][0];
|
||||||
|
expect(packageData.name).toEqual(testPackageName);
|
||||||
|
expect(packageData.version).toEqual(testVersion);
|
||||||
|
expect(packageData.version_normalized).toEqual('1.0.0.0');
|
||||||
|
expect(packageData).toHaveProperty('dist');
|
||||||
|
expect(packageData.dist.type).toEqual('zip');
|
||||||
|
expect(packageData.dist).toHaveProperty('url');
|
||||||
|
expect(packageData.dist).toHaveProperty('shasum');
|
||||||
|
expect(packageData.dist).toHaveProperty('reference');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Composer: should download package ZIP (GET /dists/{vendor/package}/{ref}.zip)', async () => {
|
||||||
|
// First get metadata to find reference
|
||||||
|
const metadataResponse = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: `/composer/p2/${testPackageName}.json`,
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
const reference = metadataResponse.body.packages[testPackageName][0].dist.reference;
|
||||||
|
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: `/composer/dists/${testPackageName}/${reference}.zip`,
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(200);
|
||||||
|
expect(response.body).toBeInstanceOf(Buffer);
|
||||||
|
expect(response.headers['Content-Type']).toEqual('application/zip');
|
||||||
|
expect(response.headers['Content-Disposition']).toContain('attachment');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Composer: should list packages (GET /packages/list.json)', async () => {
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: '/composer/packages/list.json',
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(200);
|
||||||
|
expect(response.body).toHaveProperty('packageNames');
|
||||||
|
expect(response.body.packageNames).toBeInstanceOf(Array);
|
||||||
|
expect(response.body.packageNames).toContain(testPackageName);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Composer: should filter package list (GET /packages/list.json?filter=vendor/*)', async () => {
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: '/composer/packages/list.json',
|
||||||
|
headers: {},
|
||||||
|
query: { filter: 'vendor/*' },
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(200);
|
||||||
|
expect(response.body.packageNames).toBeInstanceOf(Array);
|
||||||
|
expect(response.body.packageNames).toContain(testPackageName);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Composer: should prevent duplicate version upload', async () => {
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
path: `/composer/packages/${testPackageName}`,
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${composerToken}`,
|
||||||
|
'Content-Type': 'application/zip',
|
||||||
|
},
|
||||||
|
query: {},
|
||||||
|
body: testZipData,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(409);
|
||||||
|
expect(response.body.status).toEqual('error');
|
||||||
|
expect(response.body.message).toContain('already exists');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Composer: should upload a second version', async () => {
|
||||||
|
const testVersion2 = '1.1.0';
|
||||||
|
const testZipData2 = await createComposerZip(testPackageName, testVersion2);
|
||||||
|
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
path: `/composer/packages/${testPackageName}`,
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${composerToken}`,
|
||||||
|
'Content-Type': 'application/zip',
|
||||||
|
},
|
||||||
|
query: {},
|
||||||
|
body: testZipData2,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(201);
|
||||||
|
expect(response.body.status).toEqual('success');
|
||||||
|
expect(response.body.version).toEqual(testVersion2);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Composer: should return multiple versions in metadata', async () => {
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: `/composer/p2/${testPackageName}.json`,
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(200);
|
||||||
|
expect(response.body.packages[testPackageName]).toBeInstanceOf(Array);
|
||||||
|
expect(response.body.packages[testPackageName].length).toEqual(2);
|
||||||
|
|
||||||
|
const versions = response.body.packages[testPackageName].map((p: any) => p.version);
|
||||||
|
expect(versions).toContain('1.0.0');
|
||||||
|
expect(versions).toContain('1.1.0');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Composer: should delete a specific version (DELETE /packages/{vendor/package}/{version})', async () => {
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'DELETE',
|
||||||
|
path: `/composer/packages/${testPackageName}/1.0.0`,
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${composerToken}`,
|
||||||
|
},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(204);
|
||||||
|
|
||||||
|
// Verify version was removed
|
||||||
|
const metadataResponse = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: `/composer/p2/${testPackageName}.json`,
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(metadataResponse.body.packages[testPackageName].length).toEqual(1);
|
||||||
|
expect(metadataResponse.body.packages[testPackageName][0].version).toEqual('1.1.0');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Composer: should require auth for package upload', async () => {
|
||||||
|
const testZipData3 = await createComposerZip('vendor/unauth-package', '1.0.0');
|
||||||
|
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
path: '/composer/packages/vendor/unauth-package',
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/zip',
|
||||||
|
},
|
||||||
|
query: {},
|
||||||
|
body: testZipData3,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(401);
|
||||||
|
expect(response.body.status).toEqual('error');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Composer: should reject invalid ZIP (no composer.json)', async () => {
|
||||||
|
const invalidZip = Buffer.from('invalid zip content');
|
||||||
|
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
path: `/composer/packages/${testPackageName}`,
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${composerToken}`,
|
||||||
|
'Content-Type': 'application/zip',
|
||||||
|
},
|
||||||
|
query: {},
|
||||||
|
body: invalidZip,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(400);
|
||||||
|
expect(response.body.status).toEqual('error');
|
||||||
|
expect(response.body.message).toContain('composer.json');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Composer: should delete entire package (DELETE /packages/{vendor/package})', async () => {
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'DELETE',
|
||||||
|
path: `/composer/packages/${testPackageName}`,
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${composerToken}`,
|
||||||
|
},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(204);
|
||||||
|
|
||||||
|
// Verify package was removed
|
||||||
|
const metadataResponse = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: `/composer/p2/${testPackageName}.json`,
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(metadataResponse.status).toEqual(404);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Composer: should return 404 for non-existent package', async () => {
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: '/composer/p2/non/existent.json',
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(404);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.postTask('cleanup registry', async () => {
|
||||||
|
if (registry) {
|
||||||
|
registry.destroy();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
380
test/test.maven.ts
Normal file
380
test/test.maven.ts
Normal file
@@ -0,0 +1,380 @@
|
|||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import { SmartRegistry } from '../ts/index.js';
|
||||||
|
import {
|
||||||
|
createTestRegistry,
|
||||||
|
createTestTokens,
|
||||||
|
createTestPom,
|
||||||
|
createTestJar,
|
||||||
|
calculateMavenChecksums,
|
||||||
|
} from './helpers/registry.js';
|
||||||
|
|
||||||
|
let registry: SmartRegistry;
|
||||||
|
let mavenToken: string;
|
||||||
|
let userId: string;
|
||||||
|
|
||||||
|
// Test data
|
||||||
|
const testGroupId = 'com.example.test';
|
||||||
|
const testArtifactId = 'test-artifact';
|
||||||
|
const testVersion = '1.0.0';
|
||||||
|
const testJarData = createTestJar();
|
||||||
|
const testPomData = Buffer.from(
|
||||||
|
createTestPom(testGroupId, testArtifactId, testVersion),
|
||||||
|
'utf-8'
|
||||||
|
);
|
||||||
|
|
||||||
|
tap.test('Maven: should create registry instance', async () => {
|
||||||
|
registry = await createTestRegistry();
|
||||||
|
const tokens = await createTestTokens(registry);
|
||||||
|
mavenToken = tokens.mavenToken;
|
||||||
|
userId = tokens.userId;
|
||||||
|
|
||||||
|
expect(registry).toBeInstanceOf(SmartRegistry);
|
||||||
|
expect(mavenToken).toBeTypeOf('string');
|
||||||
|
|
||||||
|
// Clean up any existing metadata from previous test runs
|
||||||
|
const storage = registry.getStorage();
|
||||||
|
try {
|
||||||
|
await storage.deleteMavenMetadata(testGroupId, testArtifactId);
|
||||||
|
} catch (error) {
|
||||||
|
// Ignore error if metadata doesn't exist
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Maven: should upload POM file (PUT /{groupPath}/{artifactId}/{version}/*.pom)', async () => {
|
||||||
|
const groupPath = testGroupId.replace(/\./g, '/');
|
||||||
|
const pomFilename = `${testArtifactId}-${testVersion}.pom`;
|
||||||
|
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
path: `/maven/${groupPath}/${testArtifactId}/${testVersion}/${pomFilename}`,
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${mavenToken}`,
|
||||||
|
'Content-Type': 'application/xml',
|
||||||
|
},
|
||||||
|
query: {},
|
||||||
|
body: testPomData,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(201);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Maven: should upload JAR file (PUT /{groupPath}/{artifactId}/{version}/*.jar)', async () => {
|
||||||
|
const groupPath = testGroupId.replace(/\./g, '/');
|
||||||
|
const jarFilename = `${testArtifactId}-${testVersion}.jar`;
|
||||||
|
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
path: `/maven/${groupPath}/${testArtifactId}/${testVersion}/${jarFilename}`,
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${mavenToken}`,
|
||||||
|
'Content-Type': 'application/java-archive',
|
||||||
|
},
|
||||||
|
query: {},
|
||||||
|
body: testJarData,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(201);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Maven: should retrieve uploaded POM file (GET)', async () => {
|
||||||
|
const groupPath = testGroupId.replace(/\./g, '/');
|
||||||
|
const pomFilename = `${testArtifactId}-${testVersion}.pom`;
|
||||||
|
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: `/maven/${groupPath}/${testArtifactId}/${testVersion}/${pomFilename}`,
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(200);
|
||||||
|
expect(response.body).toBeInstanceOf(Buffer);
|
||||||
|
expect((response.body as Buffer).toString('utf-8')).toContain(testGroupId);
|
||||||
|
expect((response.body as Buffer).toString('utf-8')).toContain(testArtifactId);
|
||||||
|
expect((response.body as Buffer).toString('utf-8')).toContain(testVersion);
|
||||||
|
expect(response.headers['Content-Type']).toEqual('application/xml');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Maven: should retrieve uploaded JAR file (GET)', async () => {
|
||||||
|
const groupPath = testGroupId.replace(/\./g, '/');
|
||||||
|
const jarFilename = `${testArtifactId}-${testVersion}.jar`;
|
||||||
|
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: `/maven/${groupPath}/${testArtifactId}/${testVersion}/${jarFilename}`,
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(200);
|
||||||
|
expect(response.body).toBeInstanceOf(Buffer);
|
||||||
|
expect(response.headers['Content-Type']).toEqual('application/java-archive');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Maven: should retrieve MD5 checksum for JAR (GET *.jar.md5)', async () => {
|
||||||
|
const groupPath = testGroupId.replace(/\./g, '/');
|
||||||
|
const jarFilename = `${testArtifactId}-${testVersion}.jar`;
|
||||||
|
const checksums = calculateMavenChecksums(testJarData);
|
||||||
|
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: `/maven/${groupPath}/${testArtifactId}/${testVersion}/${jarFilename}.md5`,
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(200);
|
||||||
|
expect(response.body).toBeInstanceOf(Buffer);
|
||||||
|
expect((response.body as Buffer).toString('utf-8')).toEqual(checksums.md5);
|
||||||
|
expect(response.headers['Content-Type']).toEqual('text/plain');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Maven: should retrieve SHA1 checksum for JAR (GET *.jar.sha1)', async () => {
|
||||||
|
const groupPath = testGroupId.replace(/\./g, '/');
|
||||||
|
const jarFilename = `${testArtifactId}-${testVersion}.jar`;
|
||||||
|
const checksums = calculateMavenChecksums(testJarData);
|
||||||
|
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: `/maven/${groupPath}/${testArtifactId}/${testVersion}/${jarFilename}.sha1`,
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(200);
|
||||||
|
expect(response.body).toBeInstanceOf(Buffer);
|
||||||
|
expect((response.body as Buffer).toString('utf-8')).toEqual(checksums.sha1);
|
||||||
|
expect(response.headers['Content-Type']).toEqual('text/plain');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Maven: should retrieve SHA256 checksum for JAR (GET *.jar.sha256)', async () => {
|
||||||
|
const groupPath = testGroupId.replace(/\./g, '/');
|
||||||
|
const jarFilename = `${testArtifactId}-${testVersion}.jar`;
|
||||||
|
const checksums = calculateMavenChecksums(testJarData);
|
||||||
|
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: `/maven/${groupPath}/${testArtifactId}/${testVersion}/${jarFilename}.sha256`,
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(200);
|
||||||
|
expect(response.body).toBeInstanceOf(Buffer);
|
||||||
|
expect((response.body as Buffer).toString('utf-8')).toEqual(checksums.sha256);
|
||||||
|
expect(response.headers['Content-Type']).toEqual('text/plain');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Maven: should retrieve SHA512 checksum for JAR (GET *.jar.sha512)', async () => {
|
||||||
|
const groupPath = testGroupId.replace(/\./g, '/');
|
||||||
|
const jarFilename = `${testArtifactId}-${testVersion}.jar`;
|
||||||
|
const checksums = calculateMavenChecksums(testJarData);
|
||||||
|
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: `/maven/${groupPath}/${testArtifactId}/${testVersion}/${jarFilename}.sha512`,
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(200);
|
||||||
|
expect(response.body).toBeInstanceOf(Buffer);
|
||||||
|
expect((response.body as Buffer).toString('utf-8')).toEqual(checksums.sha512);
|
||||||
|
expect(response.headers['Content-Type']).toEqual('text/plain');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Maven: should retrieve maven-metadata.xml (GET)', async () => {
|
||||||
|
const groupPath = testGroupId.replace(/\./g, '/');
|
||||||
|
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: `/maven/${groupPath}/${testArtifactId}/maven-metadata.xml`,
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(200);
|
||||||
|
expect(response.body).toBeInstanceOf(Buffer);
|
||||||
|
const xml = (response.body as Buffer).toString('utf-8');
|
||||||
|
expect(xml).toContain('<groupId>');
|
||||||
|
expect(xml).toContain('<artifactId>');
|
||||||
|
expect(xml).toContain('<version>1.0.0</version>');
|
||||||
|
expect(xml).toContain('<latest>1.0.0</latest>');
|
||||||
|
expect(xml).toContain('<release>1.0.0</release>');
|
||||||
|
expect(response.headers['Content-Type']).toEqual('application/xml');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Maven: should upload a second version and update metadata', async () => {
|
||||||
|
const groupPath = testGroupId.replace(/\./g, '/');
|
||||||
|
const newVersion = '2.0.0';
|
||||||
|
const pomFilename = `${testArtifactId}-${newVersion}.pom`;
|
||||||
|
const jarFilename = `${testArtifactId}-${newVersion}.jar`;
|
||||||
|
const newPomData = Buffer.from(
|
||||||
|
createTestPom(testGroupId, testArtifactId, newVersion),
|
||||||
|
'utf-8'
|
||||||
|
);
|
||||||
|
|
||||||
|
// Upload POM
|
||||||
|
await registry.handleRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
path: `/maven/${groupPath}/${testArtifactId}/${newVersion}/${pomFilename}`,
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${mavenToken}`,
|
||||||
|
'Content-Type': 'application/xml',
|
||||||
|
},
|
||||||
|
query: {},
|
||||||
|
body: newPomData,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Upload JAR
|
||||||
|
await registry.handleRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
path: `/maven/${groupPath}/${testArtifactId}/${newVersion}/${jarFilename}`,
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${mavenToken}`,
|
||||||
|
'Content-Type': 'application/java-archive',
|
||||||
|
},
|
||||||
|
query: {},
|
||||||
|
body: testJarData,
|
||||||
|
});
|
||||||
|
|
||||||
|
// Retrieve metadata and verify both versions are present
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: `/maven/${groupPath}/${testArtifactId}/maven-metadata.xml`,
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(200);
|
||||||
|
const xml = (response.body as Buffer).toString('utf-8');
|
||||||
|
expect(xml).toContain('<version>1.0.0</version>');
|
||||||
|
expect(xml).toContain('<version>2.0.0</version>');
|
||||||
|
expect(xml).toContain('<latest>2.0.0</latest>');
|
||||||
|
expect(xml).toContain('<release>2.0.0</release>');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Maven: should upload WAR file with correct content type', async () => {
|
||||||
|
const groupPath = testGroupId.replace(/\./g, '/');
|
||||||
|
const warVersion = '1.0.0-war';
|
||||||
|
const warFilename = `${testArtifactId}-${warVersion}.war`;
|
||||||
|
const warData = Buffer.from('fake war content', 'utf-8');
|
||||||
|
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
path: `/maven/${groupPath}/${testArtifactId}/${warVersion}/${warFilename}`,
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${mavenToken}`,
|
||||||
|
'Content-Type': 'application/x-webarchive',
|
||||||
|
},
|
||||||
|
query: {},
|
||||||
|
body: warData,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(201);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Maven: should return 404 for non-existent artifact', async () => {
|
||||||
|
const groupPath = 'com/example/nonexistent';
|
||||||
|
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: `/maven/${groupPath}/fake-artifact/1.0.0/fake-artifact-1.0.0.jar`,
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(404);
|
||||||
|
expect(response.body).toHaveProperty('error');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Maven: should return 401 for unauthorized upload', async () => {
|
||||||
|
const groupPath = testGroupId.replace(/\./g, '/');
|
||||||
|
const jarFilename = `${testArtifactId}-3.0.0.jar`;
|
||||||
|
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
path: `/maven/${groupPath}/${testArtifactId}/3.0.0/${jarFilename}`,
|
||||||
|
headers: {
|
||||||
|
// No authorization header
|
||||||
|
'Content-Type': 'application/java-archive',
|
||||||
|
},
|
||||||
|
query: {},
|
||||||
|
body: testJarData,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(401);
|
||||||
|
expect(response.body).toHaveProperty('error');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Maven: should reject POM upload with mismatched GAV', async () => {
|
||||||
|
const groupPath = 'com/mismatch/test';
|
||||||
|
const pomFilename = `different-artifact-1.0.0.pom`;
|
||||||
|
// POM contains different GAV than the path
|
||||||
|
const mismatchedPom = Buffer.from(
|
||||||
|
createTestPom('com.other.group', 'other-artifact', '1.0.0'),
|
||||||
|
'utf-8'
|
||||||
|
);
|
||||||
|
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'PUT',
|
||||||
|
path: `/maven/${groupPath}/different-artifact/1.0.0/${pomFilename}`,
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${mavenToken}`,
|
||||||
|
'Content-Type': 'application/xml',
|
||||||
|
},
|
||||||
|
query: {},
|
||||||
|
body: mismatchedPom,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(400);
|
||||||
|
expect(response.body).toHaveProperty('error');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Maven: should delete an artifact (DELETE)', async () => {
|
||||||
|
const groupPath = testGroupId.replace(/\./g, '/');
|
||||||
|
const jarFilename = `${testArtifactId}-${testVersion}.jar`;
|
||||||
|
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'DELETE',
|
||||||
|
path: `/maven/${groupPath}/${testArtifactId}/${testVersion}/${jarFilename}`,
|
||||||
|
headers: {
|
||||||
|
Authorization: `Bearer ${mavenToken}`,
|
||||||
|
},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(204); // 204 No Content is correct for DELETE
|
||||||
|
|
||||||
|
// Verify artifact was deleted
|
||||||
|
const getResponse = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: `/maven/${groupPath}/${testArtifactId}/${testVersion}/${jarFilename}`,
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(getResponse.status).toEqual(404);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Maven: should return 404 for checksum of deleted artifact', async () => {
|
||||||
|
const groupPath = testGroupId.replace(/\./g, '/');
|
||||||
|
const jarFilename = `${testArtifactId}-${testVersion}.jar`;
|
||||||
|
|
||||||
|
const response = await registry.handleRequest({
|
||||||
|
method: 'GET',
|
||||||
|
path: `/maven/${groupPath}/${testArtifactId}/${testVersion}/${jarFilename}.md5`,
|
||||||
|
headers: {},
|
||||||
|
query: {},
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(response.status).toEqual(404);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.postTask('cleanup registry', async () => {
|
||||||
|
if (registry) {
|
||||||
|
registry.destroy();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
412
test/test.npm.nativecli.node.ts
Normal file
412
test/test.npm.nativecli.node.ts
Normal file
@@ -0,0 +1,412 @@
|
|||||||
|
/**
|
||||||
|
* Native npm CLI Testing
|
||||||
|
* Tests the NPM registry implementation using the actual npm CLI
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import { tapNodeTools } from '@git.zone/tstest/tapbundle_serverside';
|
||||||
|
import { SmartRegistry } from '../ts/index.js';
|
||||||
|
import { createTestRegistry, createTestTokens } from './helpers/registry.js';
|
||||||
|
import type { IRequestContext, IResponse } from '../ts/core/interfaces.core.js';
|
||||||
|
import * as http from 'http';
|
||||||
|
import * as url from 'url';
|
||||||
|
import * as fs from 'fs';
|
||||||
|
import * as path from 'path';
|
||||||
|
|
||||||
|
// Test context
|
||||||
|
let registry: SmartRegistry;
|
||||||
|
let server: http.Server;
|
||||||
|
let registryUrl: string;
|
||||||
|
let registryPort: number;
|
||||||
|
let npmToken: string;
|
||||||
|
let testDir: string;
|
||||||
|
let npmrcPath: string;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create HTTP server wrapper around SmartRegistry
|
||||||
|
*/
|
||||||
|
async function createHttpServer(
|
||||||
|
registryInstance: SmartRegistry,
|
||||||
|
port: number
|
||||||
|
): Promise<{ server: http.Server; url: string }> {
|
||||||
|
return new Promise((resolve, reject) => {
|
||||||
|
const httpServer = http.createServer(async (req, res) => {
|
||||||
|
try {
|
||||||
|
// Parse request
|
||||||
|
const parsedUrl = url.parse(req.url || '', true);
|
||||||
|
const pathname = parsedUrl.pathname || '/';
|
||||||
|
const query = parsedUrl.query;
|
||||||
|
|
||||||
|
// Read body
|
||||||
|
const chunks: Buffer[] = [];
|
||||||
|
for await (const chunk of req) {
|
||||||
|
chunks.push(chunk);
|
||||||
|
}
|
||||||
|
const bodyBuffer = Buffer.concat(chunks);
|
||||||
|
|
||||||
|
// Parse body based on content type
|
||||||
|
let body: any;
|
||||||
|
if (bodyBuffer.length > 0) {
|
||||||
|
const contentType = req.headers['content-type'] || '';
|
||||||
|
if (contentType.includes('application/json')) {
|
||||||
|
try {
|
||||||
|
body = JSON.parse(bodyBuffer.toString('utf-8'));
|
||||||
|
} catch (error) {
|
||||||
|
body = bodyBuffer;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
body = bodyBuffer;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Convert to IRequestContext
|
||||||
|
const context: IRequestContext = {
|
||||||
|
method: req.method || 'GET',
|
||||||
|
path: pathname,
|
||||||
|
headers: req.headers as Record<string, string>,
|
||||||
|
query: query as Record<string, string>,
|
||||||
|
body: body,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Handle request
|
||||||
|
const response: IResponse = await registryInstance.handleRequest(context);
|
||||||
|
|
||||||
|
// Convert IResponse to HTTP response
|
||||||
|
res.statusCode = response.status;
|
||||||
|
|
||||||
|
// Set headers
|
||||||
|
for (const [key, value] of Object.entries(response.headers || {})) {
|
||||||
|
res.setHeader(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Send body
|
||||||
|
if (response.body) {
|
||||||
|
if (Buffer.isBuffer(response.body)) {
|
||||||
|
res.end(response.body);
|
||||||
|
} else if (typeof response.body === 'string') {
|
||||||
|
res.end(response.body);
|
||||||
|
} else {
|
||||||
|
res.setHeader('Content-Type', 'application/json');
|
||||||
|
res.end(JSON.stringify(response.body));
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
res.end();
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Server error:', error);
|
||||||
|
res.statusCode = 500;
|
||||||
|
res.setHeader('Content-Type', 'application/json');
|
||||||
|
res.end(JSON.stringify({ error: 'INTERNAL_ERROR', message: String(error) }));
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
httpServer.listen(port, () => {
|
||||||
|
const serverUrl = `http://localhost:${port}`;
|
||||||
|
resolve({ server: httpServer, url: serverUrl });
|
||||||
|
});
|
||||||
|
|
||||||
|
httpServer.on('error', reject);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Setup .npmrc configuration
|
||||||
|
*/
|
||||||
|
function setupNpmrc(registryUrlArg: string, token: string, testDirArg: string): string {
|
||||||
|
const npmrcContent = `registry=${registryUrlArg}/npm/
|
||||||
|
//localhost:${registryPort}/npm/:_authToken=${token}
|
||||||
|
`;
|
||||||
|
|
||||||
|
const npmrcFilePath = path.join(testDirArg, '.npmrc');
|
||||||
|
fs.writeFileSync(npmrcFilePath, npmrcContent, 'utf-8');
|
||||||
|
return npmrcFilePath;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a test package
|
||||||
|
*/
|
||||||
|
function createTestPackage(
|
||||||
|
packageName: string,
|
||||||
|
version: string,
|
||||||
|
targetDir: string
|
||||||
|
): string {
|
||||||
|
const packageDir = path.join(targetDir, packageName);
|
||||||
|
fs.mkdirSync(packageDir, { recursive: true });
|
||||||
|
|
||||||
|
// Create package.json
|
||||||
|
const packageJson = {
|
||||||
|
name: packageName,
|
||||||
|
version: version,
|
||||||
|
description: `Test package ${packageName}`,
|
||||||
|
main: 'index.js',
|
||||||
|
scripts: {
|
||||||
|
test: 'echo "Test passed"',
|
||||||
|
},
|
||||||
|
keywords: ['test'],
|
||||||
|
author: 'Test Author',
|
||||||
|
license: 'MIT',
|
||||||
|
};
|
||||||
|
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(packageDir, 'package.json'),
|
||||||
|
JSON.stringify(packageJson, null, 2),
|
||||||
|
'utf-8'
|
||||||
|
);
|
||||||
|
|
||||||
|
// Create index.js
|
||||||
|
const indexJs = `module.exports = {
|
||||||
|
name: '${packageName}',
|
||||||
|
version: '${version}',
|
||||||
|
message: 'Hello from ${packageName}@${version}'
|
||||||
|
};
|
||||||
|
`;
|
||||||
|
|
||||||
|
fs.writeFileSync(path.join(packageDir, 'index.js'), indexJs, 'utf-8');
|
||||||
|
|
||||||
|
// Create README.md
|
||||||
|
const readme = `# ${packageName}
|
||||||
|
|
||||||
|
Test package for SmartRegistry.
|
||||||
|
|
||||||
|
Version: ${version}
|
||||||
|
`;
|
||||||
|
|
||||||
|
fs.writeFileSync(path.join(packageDir, 'README.md'), readme, 'utf-8');
|
||||||
|
|
||||||
|
return packageDir;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Run npm command with proper environment
|
||||||
|
*/
|
||||||
|
async function runNpmCommand(
|
||||||
|
command: string,
|
||||||
|
cwd: string
|
||||||
|
): Promise<{ stdout: string; stderr: string; exitCode: number }> {
|
||||||
|
// Prepare environment variables
|
||||||
|
const envVars = [
|
||||||
|
`NPM_CONFIG_USERCONFIG="${npmrcPath}"`,
|
||||||
|
`NPM_CONFIG_CACHE="${path.join(testDir, '.npm-cache')}"`,
|
||||||
|
`NPM_CONFIG_PREFIX="${path.join(testDir, '.npm-global')}"`,
|
||||||
|
`NPM_CONFIG_REGISTRY="${registryUrl}/npm/"`,
|
||||||
|
].join(' ');
|
||||||
|
|
||||||
|
// Build command with cd to correct directory and environment variables
|
||||||
|
const fullCommand = `cd "${cwd}" && ${envVars} ${command}`;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await tapNodeTools.runCommand(fullCommand);
|
||||||
|
return {
|
||||||
|
stdout: result.stdout || '',
|
||||||
|
stderr: result.stderr || '',
|
||||||
|
exitCode: result.exitCode || 0,
|
||||||
|
};
|
||||||
|
} catch (error: any) {
|
||||||
|
return {
|
||||||
|
stdout: error.stdout || '',
|
||||||
|
stderr: error.stderr || String(error),
|
||||||
|
exitCode: error.exitCode || 1,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cleanup test directory
|
||||||
|
*/
|
||||||
|
function cleanupTestDir(dir: string): void {
|
||||||
|
if (fs.existsSync(dir)) {
|
||||||
|
fs.rmSync(dir, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// TESTS
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
tap.test('NPM CLI: should setup registry and HTTP server', async () => {
|
||||||
|
// Create registry
|
||||||
|
registry = await createTestRegistry();
|
||||||
|
const tokens = await createTestTokens(registry);
|
||||||
|
npmToken = tokens.npmToken;
|
||||||
|
|
||||||
|
expect(registry).toBeInstanceOf(SmartRegistry);
|
||||||
|
expect(npmToken).toBeTypeOf('string');
|
||||||
|
|
||||||
|
// Find available port
|
||||||
|
registryPort = 35000;
|
||||||
|
const serverSetup = await createHttpServer(registry, registryPort);
|
||||||
|
server = serverSetup.server;
|
||||||
|
registryUrl = serverSetup.url;
|
||||||
|
|
||||||
|
expect(server).toBeDefined();
|
||||||
|
expect(registryUrl).toEqual(`http://localhost:${registryPort}`);
|
||||||
|
|
||||||
|
// Setup test directory
|
||||||
|
testDir = path.join(process.cwd(), '.nogit', 'test-npm-cli');
|
||||||
|
cleanupTestDir(testDir);
|
||||||
|
fs.mkdirSync(testDir, { recursive: true });
|
||||||
|
|
||||||
|
// Setup .npmrc
|
||||||
|
npmrcPath = setupNpmrc(registryUrl, npmToken, testDir);
|
||||||
|
expect(fs.existsSync(npmrcPath)).toEqual(true);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('NPM CLI: should verify server is responding', async () => {
|
||||||
|
const result = await runNpmCommand('npm ping', testDir);
|
||||||
|
console.log('npm ping output:', result.stdout, result.stderr);
|
||||||
|
|
||||||
|
// npm ping may not work with custom registries, so just check server is up
|
||||||
|
// by doing a direct HTTP request
|
||||||
|
const response = await fetch(`${registryUrl}/npm/`);
|
||||||
|
expect(response.status).toBeGreaterThanOrEqual(200);
|
||||||
|
expect(response.status).toBeLessThan(500);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('NPM CLI: should publish a package', async () => {
|
||||||
|
const packageName = 'test-package-cli';
|
||||||
|
const version = '1.0.0';
|
||||||
|
const packageDir = createTestPackage(packageName, version, testDir);
|
||||||
|
|
||||||
|
const result = await runNpmCommand('npm publish', packageDir);
|
||||||
|
console.log('npm publish output:', result.stdout);
|
||||||
|
console.log('npm publish stderr:', result.stderr);
|
||||||
|
|
||||||
|
expect(result.exitCode).toEqual(0);
|
||||||
|
expect(result.stdout || result.stderr).toContain(packageName);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('NPM CLI: should view published package', async () => {
|
||||||
|
const packageName = 'test-package-cli';
|
||||||
|
|
||||||
|
const result = await runNpmCommand(`npm view ${packageName}`, testDir);
|
||||||
|
console.log('npm view output:', result.stdout);
|
||||||
|
|
||||||
|
expect(result.exitCode).toEqual(0);
|
||||||
|
expect(result.stdout).toContain(packageName);
|
||||||
|
expect(result.stdout).toContain('1.0.0');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('NPM CLI: should install published package', async () => {
|
||||||
|
const packageName = 'test-package-cli';
|
||||||
|
const installDir = path.join(testDir, 'install-test');
|
||||||
|
fs.mkdirSync(installDir, { recursive: true });
|
||||||
|
|
||||||
|
// Create package.json for installation
|
||||||
|
const packageJson = {
|
||||||
|
name: 'install-test',
|
||||||
|
version: '1.0.0',
|
||||||
|
dependencies: {
|
||||||
|
[packageName]: '1.0.0',
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
fs.writeFileSync(
|
||||||
|
path.join(installDir, 'package.json'),
|
||||||
|
JSON.stringify(packageJson, null, 2),
|
||||||
|
'utf-8'
|
||||||
|
);
|
||||||
|
|
||||||
|
const result = await runNpmCommand('npm install', installDir);
|
||||||
|
console.log('npm install output:', result.stdout);
|
||||||
|
console.log('npm install stderr:', result.stderr);
|
||||||
|
|
||||||
|
expect(result.exitCode).toEqual(0);
|
||||||
|
|
||||||
|
// Verify package was installed
|
||||||
|
const nodeModulesPath = path.join(installDir, 'node_modules', packageName);
|
||||||
|
expect(fs.existsSync(nodeModulesPath)).toEqual(true);
|
||||||
|
expect(fs.existsSync(path.join(nodeModulesPath, 'package.json'))).toEqual(true);
|
||||||
|
expect(fs.existsSync(path.join(nodeModulesPath, 'index.js'))).toEqual(true);
|
||||||
|
|
||||||
|
// Verify package contents
|
||||||
|
const installedPackageJson = JSON.parse(
|
||||||
|
fs.readFileSync(path.join(nodeModulesPath, 'package.json'), 'utf-8')
|
||||||
|
);
|
||||||
|
expect(installedPackageJson.name).toEqual(packageName);
|
||||||
|
expect(installedPackageJson.version).toEqual('1.0.0');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('NPM CLI: should publish second version', async () => {
|
||||||
|
const packageName = 'test-package-cli';
|
||||||
|
const version = '1.1.0';
|
||||||
|
const packageDir = createTestPackage(packageName, version, testDir);
|
||||||
|
|
||||||
|
const result = await runNpmCommand('npm publish', packageDir);
|
||||||
|
console.log('npm publish v1.1.0 output:', result.stdout);
|
||||||
|
|
||||||
|
expect(result.exitCode).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('NPM CLI: should list versions', async () => {
|
||||||
|
const packageName = 'test-package-cli';
|
||||||
|
|
||||||
|
const result = await runNpmCommand(`npm view ${packageName} versions`, testDir);
|
||||||
|
console.log('npm view versions output:', result.stdout);
|
||||||
|
|
||||||
|
expect(result.exitCode).toEqual(0);
|
||||||
|
expect(result.stdout).toContain('1.0.0');
|
||||||
|
expect(result.stdout).toContain('1.1.0');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('NPM CLI: should publish scoped package', async () => {
|
||||||
|
const packageName = '@testscope/scoped-package';
|
||||||
|
const version = '1.0.0';
|
||||||
|
const packageDir = createTestPackage(packageName, version, testDir);
|
||||||
|
|
||||||
|
const result = await runNpmCommand('npm publish --access public', packageDir);
|
||||||
|
console.log('npm publish scoped output:', result.stdout);
|
||||||
|
console.log('npm publish scoped stderr:', result.stderr);
|
||||||
|
|
||||||
|
expect(result.exitCode).toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('NPM CLI: should view scoped package', async () => {
|
||||||
|
const packageName = '@testscope/scoped-package';
|
||||||
|
|
||||||
|
const result = await runNpmCommand(`npm view ${packageName}`, testDir);
|
||||||
|
console.log('npm view scoped output:', result.stdout);
|
||||||
|
|
||||||
|
expect(result.exitCode).toEqual(0);
|
||||||
|
expect(result.stdout).toContain('scoped-package');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('NPM CLI: should fail to publish without auth', async () => {
|
||||||
|
const packageName = 'unauth-package';
|
||||||
|
const version = '1.0.0';
|
||||||
|
const packageDir = createTestPackage(packageName, version, testDir);
|
||||||
|
|
||||||
|
// Temporarily remove .npmrc
|
||||||
|
const npmrcBackup = fs.readFileSync(npmrcPath, 'utf-8');
|
||||||
|
fs.writeFileSync(npmrcPath, 'registry=' + registryUrl + '/npm/\n', 'utf-8');
|
||||||
|
|
||||||
|
const result = await runNpmCommand('npm publish', packageDir);
|
||||||
|
console.log('npm publish unauth output:', result.stdout);
|
||||||
|
console.log('npm publish unauth stderr:', result.stderr);
|
||||||
|
|
||||||
|
// Restore .npmrc
|
||||||
|
fs.writeFileSync(npmrcPath, npmrcBackup, 'utf-8');
|
||||||
|
|
||||||
|
// Should fail with auth error
|
||||||
|
expect(result.exitCode).not.toEqual(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.postTask('cleanup npm cli tests', async () => {
|
||||||
|
// Stop server
|
||||||
|
if (server) {
|
||||||
|
await new Promise<void>((resolve) => {
|
||||||
|
server.close(() => resolve());
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// Cleanup test directory
|
||||||
|
if (testDir) {
|
||||||
|
cleanupTestDir(testDir);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Destroy registry
|
||||||
|
if (registry) {
|
||||||
|
registry.destroy();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
@@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@push.rocks/smartregistry',
|
name: '@push.rocks/smartregistry',
|
||||||
version: '1.1.0',
|
version: '1.4.1',
|
||||||
description: 'a registry for npm modules and oci images'
|
description: 'a registry for npm modules and oci images'
|
||||||
}
|
}
|
||||||
|
|||||||
604
ts/cargo/classes.cargoregistry.ts
Normal file
604
ts/cargo/classes.cargoregistry.ts
Normal file
@@ -0,0 +1,604 @@
|
|||||||
|
import { Smartlog } from '@push.rocks/smartlog';
|
||||||
|
import { BaseRegistry } from '../core/classes.baseregistry.js';
|
||||||
|
import { RegistryStorage } from '../core/classes.registrystorage.js';
|
||||||
|
import { AuthManager } from '../core/classes.authmanager.js';
|
||||||
|
import type { IRequestContext, IResponse, IAuthToken } from '../core/interfaces.core.js';
|
||||||
|
import type {
|
||||||
|
ICargoIndexEntry,
|
||||||
|
ICargoPublishMetadata,
|
||||||
|
ICargoConfig,
|
||||||
|
ICargoError,
|
||||||
|
ICargoPublishResponse,
|
||||||
|
ICargoYankResponse,
|
||||||
|
ICargoSearchResponse,
|
||||||
|
ICargoSearchResult,
|
||||||
|
} from './interfaces.cargo.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Cargo/crates.io registry implementation
|
||||||
|
* Implements the sparse HTTP-based protocol
|
||||||
|
* Spec: https://doc.rust-lang.org/cargo/reference/registry-index.html
|
||||||
|
*/
|
||||||
|
export class CargoRegistry extends BaseRegistry {
|
||||||
|
private storage: RegistryStorage;
|
||||||
|
private authManager: AuthManager;
|
||||||
|
private basePath: string = '/cargo';
|
||||||
|
private registryUrl: string;
|
||||||
|
private logger: Smartlog;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
storage: RegistryStorage,
|
||||||
|
authManager: AuthManager,
|
||||||
|
basePath: string = '/cargo',
|
||||||
|
registryUrl: string = 'http://localhost:5000/cargo'
|
||||||
|
) {
|
||||||
|
super();
|
||||||
|
this.storage = storage;
|
||||||
|
this.authManager = authManager;
|
||||||
|
this.basePath = basePath;
|
||||||
|
this.registryUrl = registryUrl;
|
||||||
|
|
||||||
|
// Initialize logger
|
||||||
|
this.logger = new Smartlog({
|
||||||
|
logContext: {
|
||||||
|
company: 'push.rocks',
|
||||||
|
companyunit: 'smartregistry',
|
||||||
|
containerName: 'cargo-registry',
|
||||||
|
environment: (process.env.NODE_ENV as any) || 'development',
|
||||||
|
runtime: 'node',
|
||||||
|
zone: 'cargo'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
this.logger.enableConsole();
|
||||||
|
}
|
||||||
|
|
||||||
|
public async init(): Promise<void> {
|
||||||
|
// Initialize config.json if not exists
|
||||||
|
const existingConfig = await this.storage.getCargoConfig();
|
||||||
|
if (!existingConfig) {
|
||||||
|
const config: ICargoConfig = {
|
||||||
|
dl: `${this.registryUrl}/api/v1/crates/{crate}/{version}/download`,
|
||||||
|
api: this.registryUrl,
|
||||||
|
};
|
||||||
|
await this.storage.putCargoConfig(config);
|
||||||
|
this.logger.log('info', 'Initialized Cargo registry config', { config });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public getBasePath(): string {
|
||||||
|
return this.basePath;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async handleRequest(context: IRequestContext): Promise<IResponse> {
|
||||||
|
const path = context.path.replace(this.basePath, '');
|
||||||
|
|
||||||
|
// Extract token (Cargo uses Authorization header WITHOUT "Bearer" prefix)
|
||||||
|
const authHeader = context.headers['authorization'] || context.headers['Authorization'];
|
||||||
|
const token = authHeader ? await this.authManager.validateToken(authHeader, 'cargo') : null;
|
||||||
|
|
||||||
|
this.logger.log('debug', `handleRequest: ${context.method} ${path}`, {
|
||||||
|
method: context.method,
|
||||||
|
path,
|
||||||
|
hasAuth: !!token
|
||||||
|
});
|
||||||
|
|
||||||
|
// Config endpoint (required for sparse protocol)
|
||||||
|
if (path === '/config.json') {
|
||||||
|
return this.handleConfigJson();
|
||||||
|
}
|
||||||
|
|
||||||
|
// API endpoints
|
||||||
|
if (path.startsWith('/api/v1/')) {
|
||||||
|
return this.handleApiRequest(path, context, token);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Index files (sparse protocol)
|
||||||
|
return this.handleIndexRequest(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if token has permission for resource
|
||||||
|
*/
|
||||||
|
protected async checkPermission(
|
||||||
|
token: IAuthToken | null,
|
||||||
|
resource: string,
|
||||||
|
action: string
|
||||||
|
): Promise<boolean> {
|
||||||
|
if (!token) return false;
|
||||||
|
return this.authManager.authorize(token, `cargo:crate:${resource}`, action);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle API requests (/api/v1/*)
|
||||||
|
*/
|
||||||
|
private async handleApiRequest(
|
||||||
|
path: string,
|
||||||
|
context: IRequestContext,
|
||||||
|
token: IAuthToken | null
|
||||||
|
): Promise<IResponse> {
|
||||||
|
// Publish: PUT /api/v1/crates/new
|
||||||
|
if (path === '/api/v1/crates/new' && context.method === 'PUT') {
|
||||||
|
return this.handlePublish(context.body as Buffer, token);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Download: GET /api/v1/crates/{crate}/{version}/download
|
||||||
|
const downloadMatch = path.match(/^\/api\/v1\/crates\/([^\/]+)\/([^\/]+)\/download$/);
|
||||||
|
if (downloadMatch && context.method === 'GET') {
|
||||||
|
return this.handleDownload(downloadMatch[1], downloadMatch[2]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Yank: DELETE /api/v1/crates/{crate}/{version}/yank
|
||||||
|
const yankMatch = path.match(/^\/api\/v1\/crates\/([^\/]+)\/([^\/]+)\/yank$/);
|
||||||
|
if (yankMatch && context.method === 'DELETE') {
|
||||||
|
return this.handleYank(yankMatch[1], yankMatch[2], token);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Unyank: PUT /api/v1/crates/{crate}/{version}/unyank
|
||||||
|
const unyankMatch = path.match(/^\/api\/v1\/crates\/([^\/]+)\/([^\/]+)\/unyank$/);
|
||||||
|
if (unyankMatch && context.method === 'PUT') {
|
||||||
|
return this.handleUnyank(unyankMatch[1], unyankMatch[2], token);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Search: GET /api/v1/crates?q={query}
|
||||||
|
if (path.startsWith('/api/v1/crates') && context.method === 'GET') {
|
||||||
|
const query = context.query?.q || '';
|
||||||
|
const perPage = parseInt(context.query?.per_page || '10', 10);
|
||||||
|
return this.handleSearch(query, perPage);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 404,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: this.createError('API endpoint not found'),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle index file requests
|
||||||
|
* Paths: /1/{name}, /2/{name}, /3/{c}/{name}, /{p1}/{p2}/{name}
|
||||||
|
*/
|
||||||
|
private async handleIndexRequest(path: string): Promise<IResponse> {
|
||||||
|
// Parse index paths to extract crate name
|
||||||
|
const pathParts = path.split('/').filter(p => p);
|
||||||
|
let crateName: string | null = null;
|
||||||
|
|
||||||
|
if (pathParts.length === 2 && pathParts[0] === '1') {
|
||||||
|
// 1-character names: /1/{name}
|
||||||
|
crateName = pathParts[1];
|
||||||
|
} else if (pathParts.length === 2 && pathParts[0] === '2') {
|
||||||
|
// 2-character names: /2/{name}
|
||||||
|
crateName = pathParts[1];
|
||||||
|
} else if (pathParts.length === 3 && pathParts[0] === '3') {
|
||||||
|
// 3-character names: /3/{c}/{name}
|
||||||
|
crateName = pathParts[2];
|
||||||
|
} else if (pathParts.length === 3) {
|
||||||
|
// 4+ character names: /{p1}/{p2}/{name}
|
||||||
|
crateName = pathParts[2];
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!crateName) {
|
||||||
|
return {
|
||||||
|
status: 404,
|
||||||
|
headers: { 'Content-Type': 'text/plain' },
|
||||||
|
body: Buffer.from(''),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.handleIndexFile(crateName);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Serve config.json
|
||||||
|
*/
|
||||||
|
private async handleConfigJson(): Promise<IResponse> {
|
||||||
|
const config = await this.storage.getCargoConfig();
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 200,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: config || {
|
||||||
|
dl: `${this.registryUrl}/api/v1/crates/{crate}/{version}/download`,
|
||||||
|
api: this.registryUrl,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Serve index file for a crate
|
||||||
|
*/
|
||||||
|
private async handleIndexFile(crateName: string): Promise<IResponse> {
|
||||||
|
const index = await this.storage.getCargoIndex(crateName);
|
||||||
|
|
||||||
|
if (!index || index.length === 0) {
|
||||||
|
return {
|
||||||
|
status: 404,
|
||||||
|
headers: { 'Content-Type': 'text/plain' },
|
||||||
|
body: Buffer.from(''),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Return newline-delimited JSON
|
||||||
|
const data = index.map(e => JSON.stringify(e)).join('\n') + '\n';
|
||||||
|
|
||||||
|
// Calculate ETag for caching
|
||||||
|
const crypto = await import('crypto');
|
||||||
|
const etag = `"${crypto.createHash('sha256').update(data).digest('hex')}"`;
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 200,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'text/plain',
|
||||||
|
'ETag': etag,
|
||||||
|
},
|
||||||
|
body: Buffer.from(data, 'utf-8'),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse binary publish request
|
||||||
|
* Format: [4 bytes JSON len][JSON][4 bytes crate len][.crate file]
|
||||||
|
*/
|
||||||
|
private parsePublishRequest(body: Buffer): {
|
||||||
|
metadata: ICargoPublishMetadata;
|
||||||
|
crateFile: Buffer;
|
||||||
|
} {
|
||||||
|
let offset = 0;
|
||||||
|
|
||||||
|
// Read JSON length (4 bytes, u32 little-endian)
|
||||||
|
if (body.length < 4) {
|
||||||
|
throw new Error('Invalid publish request: body too short');
|
||||||
|
}
|
||||||
|
const jsonLength = body.readUInt32LE(offset);
|
||||||
|
offset += 4;
|
||||||
|
|
||||||
|
// Read JSON metadata
|
||||||
|
if (body.length < offset + jsonLength) {
|
||||||
|
throw new Error('Invalid publish request: JSON data incomplete');
|
||||||
|
}
|
||||||
|
const jsonBuffer = body.slice(offset, offset + jsonLength);
|
||||||
|
const metadata = JSON.parse(jsonBuffer.toString('utf-8'));
|
||||||
|
offset += jsonLength;
|
||||||
|
|
||||||
|
// Read crate file length (4 bytes, u32 little-endian)
|
||||||
|
if (body.length < offset + 4) {
|
||||||
|
throw new Error('Invalid publish request: crate length missing');
|
||||||
|
}
|
||||||
|
const crateLength = body.readUInt32LE(offset);
|
||||||
|
offset += 4;
|
||||||
|
|
||||||
|
// Read crate file
|
||||||
|
if (body.length < offset + crateLength) {
|
||||||
|
throw new Error('Invalid publish request: crate data incomplete');
|
||||||
|
}
|
||||||
|
const crateFile = body.slice(offset, offset + crateLength);
|
||||||
|
|
||||||
|
return { metadata, crateFile };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle crate publish
|
||||||
|
*/
|
||||||
|
private async handlePublish(
|
||||||
|
body: Buffer,
|
||||||
|
token: IAuthToken | null
|
||||||
|
): Promise<IResponse> {
|
||||||
|
this.logger.log('info', 'handlePublish: received publish request', {
|
||||||
|
bodyLength: body?.length || 0,
|
||||||
|
hasAuth: !!token
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check authorization
|
||||||
|
if (!token) {
|
||||||
|
return {
|
||||||
|
status: 403,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: this.createError('Authentication required'),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse binary request
|
||||||
|
let metadata: ICargoPublishMetadata;
|
||||||
|
let crateFile: Buffer;
|
||||||
|
try {
|
||||||
|
const parsed = this.parsePublishRequest(body);
|
||||||
|
metadata = parsed.metadata;
|
||||||
|
crateFile = parsed.crateFile;
|
||||||
|
} catch (error) {
|
||||||
|
this.logger.log('error', 'handlePublish: parse error', { error: error.message });
|
||||||
|
return {
|
||||||
|
status: 400,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: this.createError(`Invalid request format: ${error.message}`),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Validate crate name
|
||||||
|
if (!this.validateCrateName(metadata.name)) {
|
||||||
|
return {
|
||||||
|
status: 400,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: this.createError('Invalid crate name'),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check permission
|
||||||
|
const hasPermission = await this.checkPermission(token, metadata.name, 'write');
|
||||||
|
if (!hasPermission) {
|
||||||
|
this.logger.log('warn', 'handlePublish: unauthorized', {
|
||||||
|
crateName: metadata.name,
|
||||||
|
userId: token.userId
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
status: 403,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: this.createError('Insufficient permissions'),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate SHA256 checksum
|
||||||
|
const crypto = await import('crypto');
|
||||||
|
const cksum = crypto.createHash('sha256').update(crateFile).digest('hex');
|
||||||
|
|
||||||
|
// Create index entry
|
||||||
|
const indexEntry: ICargoIndexEntry = {
|
||||||
|
name: metadata.name,
|
||||||
|
vers: metadata.vers,
|
||||||
|
deps: metadata.deps,
|
||||||
|
cksum,
|
||||||
|
features: metadata.features,
|
||||||
|
yanked: false,
|
||||||
|
links: metadata.links || null,
|
||||||
|
v: 2,
|
||||||
|
rust_version: metadata.rust_version,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Check for duplicate version
|
||||||
|
const existingIndex = await this.storage.getCargoIndex(metadata.name) || [];
|
||||||
|
if (existingIndex.some(e => e.vers === metadata.vers)) {
|
||||||
|
return {
|
||||||
|
status: 400,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: this.createError(`Version ${metadata.vers} already exists`),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store crate file
|
||||||
|
await this.storage.putCargoCrate(metadata.name, metadata.vers, crateFile);
|
||||||
|
|
||||||
|
// Update index (append new version)
|
||||||
|
existingIndex.push(indexEntry);
|
||||||
|
await this.storage.putCargoIndex(metadata.name, existingIndex);
|
||||||
|
|
||||||
|
this.logger.log('success', 'handlePublish: published crate', {
|
||||||
|
name: metadata.name,
|
||||||
|
version: metadata.vers,
|
||||||
|
checksum: cksum
|
||||||
|
});
|
||||||
|
|
||||||
|
const response: ICargoPublishResponse = {
|
||||||
|
warnings: {
|
||||||
|
invalid_categories: [],
|
||||||
|
invalid_badges: [],
|
||||||
|
other: [],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 200,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: response,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle crate download
|
||||||
|
*/
|
||||||
|
private async handleDownload(
|
||||||
|
crateName: string,
|
||||||
|
version: string
|
||||||
|
): Promise<IResponse> {
|
||||||
|
this.logger.log('debug', 'handleDownload', { crate: crateName, version });
|
||||||
|
|
||||||
|
const crateFile = await this.storage.getCargoCrate(crateName, version);
|
||||||
|
|
||||||
|
if (!crateFile) {
|
||||||
|
return {
|
||||||
|
status: 404,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: this.createError('Crate not found'),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 200,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/gzip',
|
||||||
|
'Content-Length': crateFile.length.toString(),
|
||||||
|
'Content-Disposition': `attachment; filename="${crateName}-${version}.crate"`,
|
||||||
|
},
|
||||||
|
body: crateFile,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle yank operation
|
||||||
|
*/
|
||||||
|
private async handleYank(
|
||||||
|
crateName: string,
|
||||||
|
version: string,
|
||||||
|
token: IAuthToken | null
|
||||||
|
): Promise<IResponse> {
|
||||||
|
return this.handleYankOperation(crateName, version, token, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle unyank operation
|
||||||
|
*/
|
||||||
|
private async handleUnyank(
|
||||||
|
crateName: string,
|
||||||
|
version: string,
|
||||||
|
token: IAuthToken | null
|
||||||
|
): Promise<IResponse> {
|
||||||
|
return this.handleYankOperation(crateName, version, token, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle yank/unyank operation
|
||||||
|
*/
|
||||||
|
private async handleYankOperation(
|
||||||
|
crateName: string,
|
||||||
|
version: string,
|
||||||
|
token: IAuthToken | null,
|
||||||
|
yank: boolean
|
||||||
|
): Promise<IResponse> {
|
||||||
|
this.logger.log('info', `handle${yank ? 'Yank' : 'Unyank'}`, {
|
||||||
|
crate: crateName,
|
||||||
|
version,
|
||||||
|
hasAuth: !!token
|
||||||
|
});
|
||||||
|
|
||||||
|
// Check authorization
|
||||||
|
if (!token) {
|
||||||
|
return {
|
||||||
|
status: 403,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: this.createError('Authentication required'),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check permission
|
||||||
|
const hasPermission = await this.checkPermission(token, crateName, 'write');
|
||||||
|
if (!hasPermission) {
|
||||||
|
return {
|
||||||
|
status: 403,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: this.createError('Insufficient permissions'),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Load index
|
||||||
|
const index = await this.storage.getCargoIndex(crateName);
|
||||||
|
if (!index) {
|
||||||
|
return {
|
||||||
|
status: 404,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: this.createError('Crate not found'),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find version
|
||||||
|
const entry = index.find(e => e.vers === version);
|
||||||
|
if (!entry) {
|
||||||
|
return {
|
||||||
|
status: 404,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: this.createError('Version not found'),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update yank status
|
||||||
|
entry.yanked = yank;
|
||||||
|
|
||||||
|
// Save index (NOTE: do NOT delete .crate file)
|
||||||
|
await this.storage.putCargoIndex(crateName, index);
|
||||||
|
|
||||||
|
this.logger.log('success', `${yank ? 'Yanked' : 'Unyanked'} version`, {
|
||||||
|
crate: crateName,
|
||||||
|
version
|
||||||
|
});
|
||||||
|
|
||||||
|
const response: ICargoYankResponse = { ok: true };
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 200,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: response,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle search
|
||||||
|
*/
|
||||||
|
private async handleSearch(query: string, perPage: number): Promise<IResponse> {
|
||||||
|
this.logger.log('debug', 'handleSearch', { query, perPage });
|
||||||
|
|
||||||
|
const results: ICargoSearchResult[] = [];
|
||||||
|
|
||||||
|
try {
|
||||||
|
// List all index paths
|
||||||
|
const indexPaths = await this.storage.listObjects('cargo/index/');
|
||||||
|
|
||||||
|
// Extract unique crate names
|
||||||
|
const crateNames = new Set<string>();
|
||||||
|
for (const path of indexPaths) {
|
||||||
|
// Parse path to extract crate name
|
||||||
|
const parts = path.split('/');
|
||||||
|
if (parts.length >= 3) {
|
||||||
|
const name = parts[parts.length - 1];
|
||||||
|
if (name && !name.includes('.')) {
|
||||||
|
crateNames.add(name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.logger.log('debug', `handleSearch: found ${crateNames.size} crates`, {
|
||||||
|
totalCrates: crateNames.size
|
||||||
|
});
|
||||||
|
|
||||||
|
// Filter and process matching crates
|
||||||
|
for (const name of crateNames) {
|
||||||
|
if (!query || name.toLowerCase().includes(query.toLowerCase())) {
|
||||||
|
const index = await this.storage.getCargoIndex(name);
|
||||||
|
if (index && index.length > 0) {
|
||||||
|
// Find latest non-yanked version
|
||||||
|
const nonYanked = index.filter(e => !e.yanked);
|
||||||
|
if (nonYanked.length > 0) {
|
||||||
|
// Sort by version (simplified - should use semver)
|
||||||
|
const sorted = [...nonYanked].sort((a, b) => b.vers.localeCompare(a.vers));
|
||||||
|
|
||||||
|
results.push({
|
||||||
|
name: sorted[0].name,
|
||||||
|
max_version: sorted[0].vers,
|
||||||
|
description: '', // Would need to store separately
|
||||||
|
});
|
||||||
|
|
||||||
|
if (results.length >= perPage) break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
this.logger.log('error', 'handleSearch: error', { error: error.message });
|
||||||
|
}
|
||||||
|
|
||||||
|
const response: ICargoSearchResponse = {
|
||||||
|
crates: results,
|
||||||
|
meta: {
|
||||||
|
total: results.length,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 200,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: response,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate crate name
|
||||||
|
* Rules: lowercase alphanumeric + _ and -, length 1-64
|
||||||
|
*/
|
||||||
|
private validateCrateName(name: string): boolean {
|
||||||
|
return /^[a-z0-9_-]+$/.test(name) && name.length >= 1 && name.length <= 64;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create error response
|
||||||
|
*/
|
||||||
|
private createError(detail: string): ICargoError {
|
||||||
|
return {
|
||||||
|
errors: [{ detail }],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
6
ts/cargo/index.ts
Normal file
6
ts/cargo/index.ts
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
/**
|
||||||
|
* Cargo/crates.io Registry module exports
|
||||||
|
*/
|
||||||
|
|
||||||
|
export { CargoRegistry } from './classes.cargoregistry.js';
|
||||||
|
export * from './interfaces.cargo.js';
|
||||||
169
ts/cargo/interfaces.cargo.ts
Normal file
169
ts/cargo/interfaces.cargo.ts
Normal file
@@ -0,0 +1,169 @@
|
|||||||
|
/**
|
||||||
|
* Cargo/crates.io registry type definitions
|
||||||
|
* Based on: https://doc.rust-lang.org/cargo/reference/registry-index.html
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Dependency specification in Cargo index
|
||||||
|
*/
|
||||||
|
export interface ICargoDepend {
|
||||||
|
/** Dependency package name */
|
||||||
|
name: string;
|
||||||
|
/** Version requirement (e.g., "^0.6", ">=1.0.0") */
|
||||||
|
req: string;
|
||||||
|
/** Optional features to enable */
|
||||||
|
features: string[];
|
||||||
|
/** Whether this dependency is optional */
|
||||||
|
optional: boolean;
|
||||||
|
/** Whether to include default features */
|
||||||
|
default_features: boolean;
|
||||||
|
/** Platform-specific target (e.g., "cfg(unix)") */
|
||||||
|
target: string | null;
|
||||||
|
/** Dependency kind: normal, dev, or build */
|
||||||
|
kind: 'normal' | 'dev' | 'build';
|
||||||
|
/** Alternative registry URL */
|
||||||
|
registry: string | null;
|
||||||
|
/** Rename to different package name */
|
||||||
|
package: string | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Single version entry in the Cargo index file
|
||||||
|
* Each line in the index file is one of these as JSON
|
||||||
|
*/
|
||||||
|
export interface ICargoIndexEntry {
|
||||||
|
/** Crate name */
|
||||||
|
name: string;
|
||||||
|
/** Version string */
|
||||||
|
vers: string;
|
||||||
|
/** Dependencies */
|
||||||
|
deps: ICargoDepend[];
|
||||||
|
/** SHA256 checksum of the .crate file (hex) */
|
||||||
|
cksum: string;
|
||||||
|
/** Features (legacy format) */
|
||||||
|
features: Record<string, string[]>;
|
||||||
|
/** Features (extended format for newer Cargo) */
|
||||||
|
features2?: Record<string, string[]>;
|
||||||
|
/** Whether this version is yanked (deprecated but not deleted) */
|
||||||
|
yanked: boolean;
|
||||||
|
/** Optional native library link */
|
||||||
|
links?: string | null;
|
||||||
|
/** Index format version (2 is current) */
|
||||||
|
v?: number;
|
||||||
|
/** Minimum Rust version required */
|
||||||
|
rust_version?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Metadata sent during crate publication
|
||||||
|
*/
|
||||||
|
export interface ICargoPublishMetadata {
|
||||||
|
/** Crate name */
|
||||||
|
name: string;
|
||||||
|
/** Version string */
|
||||||
|
vers: string;
|
||||||
|
/** Dependencies */
|
||||||
|
deps: ICargoDepend[];
|
||||||
|
/** Features */
|
||||||
|
features: Record<string, string[]>;
|
||||||
|
/** Authors */
|
||||||
|
authors: string[];
|
||||||
|
/** Short description */
|
||||||
|
description?: string;
|
||||||
|
/** Documentation URL */
|
||||||
|
documentation?: string;
|
||||||
|
/** Homepage URL */
|
||||||
|
homepage?: string;
|
||||||
|
/** README content */
|
||||||
|
readme?: string;
|
||||||
|
/** README file path */
|
||||||
|
readme_file?: string;
|
||||||
|
/** Keywords for search */
|
||||||
|
keywords?: string[];
|
||||||
|
/** Categories */
|
||||||
|
categories?: string[];
|
||||||
|
/** License identifier (SPDX) */
|
||||||
|
license?: string;
|
||||||
|
/** License file path */
|
||||||
|
license_file?: string;
|
||||||
|
/** Repository URL */
|
||||||
|
repository?: string;
|
||||||
|
/** Badges */
|
||||||
|
badges?: Record<string, any>;
|
||||||
|
/** Native library link */
|
||||||
|
links?: string | null;
|
||||||
|
/** Minimum Rust version */
|
||||||
|
rust_version?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Registry configuration (config.json)
|
||||||
|
* Required for sparse protocol support
|
||||||
|
*/
|
||||||
|
export interface ICargoConfig {
|
||||||
|
/** Download URL template */
|
||||||
|
dl: string;
|
||||||
|
/** API base URL */
|
||||||
|
api: string;
|
||||||
|
/** Whether authentication is required for downloads */
|
||||||
|
'auth-required'?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Search result for a single crate
|
||||||
|
*/
|
||||||
|
export interface ICargoSearchResult {
|
||||||
|
/** Crate name */
|
||||||
|
name: string;
|
||||||
|
/** Latest/maximum version */
|
||||||
|
max_version: string;
|
||||||
|
/** Description */
|
||||||
|
description: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Search response structure
|
||||||
|
*/
|
||||||
|
export interface ICargoSearchResponse {
|
||||||
|
/** Array of matching crates */
|
||||||
|
crates: ICargoSearchResult[];
|
||||||
|
/** Metadata about results */
|
||||||
|
meta: {
|
||||||
|
/** Total number of results */
|
||||||
|
total: number;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Error response structure
|
||||||
|
*/
|
||||||
|
export interface ICargoError {
|
||||||
|
/** Array of error details */
|
||||||
|
errors: Array<{
|
||||||
|
/** Error message */
|
||||||
|
detail: string;
|
||||||
|
}>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Publish success response
|
||||||
|
*/
|
||||||
|
export interface ICargoPublishResponse {
|
||||||
|
/** Warnings from validation */
|
||||||
|
warnings: {
|
||||||
|
/** Invalid categories */
|
||||||
|
invalid_categories: string[];
|
||||||
|
/** Invalid badges */
|
||||||
|
invalid_badges: string[];
|
||||||
|
/** Other warnings */
|
||||||
|
other: string[];
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Yank/Unyank response
|
||||||
|
*/
|
||||||
|
export interface ICargoYankResponse {
|
||||||
|
/** Success indicator */
|
||||||
|
ok: boolean;
|
||||||
|
}
|
||||||
@@ -4,10 +4,13 @@ import { BaseRegistry } from './core/classes.baseregistry.js';
|
|||||||
import type { IRegistryConfig, IRequestContext, IResponse } from './core/interfaces.core.js';
|
import type { IRegistryConfig, IRequestContext, IResponse } from './core/interfaces.core.js';
|
||||||
import { OciRegistry } from './oci/classes.ociregistry.js';
|
import { OciRegistry } from './oci/classes.ociregistry.js';
|
||||||
import { NpmRegistry } from './npm/classes.npmregistry.js';
|
import { NpmRegistry } from './npm/classes.npmregistry.js';
|
||||||
|
import { MavenRegistry } from './maven/classes.mavenregistry.js';
|
||||||
|
import { CargoRegistry } from './cargo/classes.cargoregistry.js';
|
||||||
|
import { ComposerRegistry } from './composer/classes.composerregistry.js';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Main registry orchestrator
|
* Main registry orchestrator
|
||||||
* Routes requests to appropriate protocol handlers (OCI or NPM)
|
* Routes requests to appropriate protocol handlers (OCI, NPM, Maven, Cargo, or Composer)
|
||||||
*/
|
*/
|
||||||
export class SmartRegistry {
|
export class SmartRegistry {
|
||||||
private storage: RegistryStorage;
|
private storage: RegistryStorage;
|
||||||
@@ -51,6 +54,33 @@ export class SmartRegistry {
|
|||||||
this.registries.set('npm', npmRegistry);
|
this.registries.set('npm', npmRegistry);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Initialize Maven registry if enabled
|
||||||
|
if (this.config.maven?.enabled) {
|
||||||
|
const mavenBasePath = this.config.maven.basePath || '/maven';
|
||||||
|
const registryUrl = `http://localhost:5000${mavenBasePath}`; // TODO: Make configurable
|
||||||
|
const mavenRegistry = new MavenRegistry(this.storage, this.authManager, mavenBasePath, registryUrl);
|
||||||
|
await mavenRegistry.init();
|
||||||
|
this.registries.set('maven', mavenRegistry);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize Cargo registry if enabled
|
||||||
|
if (this.config.cargo?.enabled) {
|
||||||
|
const cargoBasePath = this.config.cargo.basePath || '/cargo';
|
||||||
|
const registryUrl = `http://localhost:5000${cargoBasePath}`; // TODO: Make configurable
|
||||||
|
const cargoRegistry = new CargoRegistry(this.storage, this.authManager, cargoBasePath, registryUrl);
|
||||||
|
await cargoRegistry.init();
|
||||||
|
this.registries.set('cargo', cargoRegistry);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Initialize Composer registry if enabled
|
||||||
|
if (this.config.composer?.enabled) {
|
||||||
|
const composerBasePath = this.config.composer.basePath || '/composer';
|
||||||
|
const registryUrl = `http://localhost:5000${composerBasePath}`; // TODO: Make configurable
|
||||||
|
const composerRegistry = new ComposerRegistry(this.storage, this.authManager, composerBasePath, registryUrl);
|
||||||
|
await composerRegistry.init();
|
||||||
|
this.registries.set('composer', composerRegistry);
|
||||||
|
}
|
||||||
|
|
||||||
this.initialized = true;
|
this.initialized = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -77,6 +107,30 @@ export class SmartRegistry {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Route to Maven registry
|
||||||
|
if (this.config.maven?.enabled && path.startsWith(this.config.maven.basePath)) {
|
||||||
|
const mavenRegistry = this.registries.get('maven');
|
||||||
|
if (mavenRegistry) {
|
||||||
|
return mavenRegistry.handleRequest(context);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Route to Cargo registry
|
||||||
|
if (this.config.cargo?.enabled && path.startsWith(this.config.cargo.basePath)) {
|
||||||
|
const cargoRegistry = this.registries.get('cargo');
|
||||||
|
if (cargoRegistry) {
|
||||||
|
return cargoRegistry.handleRequest(context);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Route to Composer registry
|
||||||
|
if (this.config.composer?.enabled && path.startsWith(this.config.composer.basePath)) {
|
||||||
|
const composerRegistry = this.registries.get('composer');
|
||||||
|
if (composerRegistry) {
|
||||||
|
return composerRegistry.handleRequest(context);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// No matching registry
|
// No matching registry
|
||||||
return {
|
return {
|
||||||
status: 404,
|
status: 404,
|
||||||
@@ -105,7 +159,7 @@ export class SmartRegistry {
|
|||||||
/**
|
/**
|
||||||
* Get a specific registry handler
|
* Get a specific registry handler
|
||||||
*/
|
*/
|
||||||
public getRegistry(protocol: 'oci' | 'npm'): BaseRegistry | undefined {
|
public getRegistry(protocol: 'oci' | 'npm' | 'maven' | 'cargo' | 'composer'): BaseRegistry | undefined {
|
||||||
return this.registries.get(protocol);
|
return this.registries.get(protocol);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
459
ts/composer/classes.composerregistry.ts
Normal file
459
ts/composer/classes.composerregistry.ts
Normal file
@@ -0,0 +1,459 @@
|
|||||||
|
/**
|
||||||
|
* Composer Registry Implementation
|
||||||
|
* Compliant with Composer v2 repository API
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { BaseRegistry } from '../core/classes.baseregistry.js';
|
||||||
|
import type { RegistryStorage } from '../core/classes.registrystorage.js';
|
||||||
|
import type { AuthManager } from '../core/classes.authmanager.js';
|
||||||
|
import type { IRequestContext, IResponse, IAuthToken } from '../core/interfaces.core.js';
|
||||||
|
import type {
|
||||||
|
IComposerPackage,
|
||||||
|
IComposerPackageMetadata,
|
||||||
|
IComposerRepository,
|
||||||
|
} from './interfaces.composer.js';
|
||||||
|
import {
|
||||||
|
normalizeVersion,
|
||||||
|
validateComposerJson,
|
||||||
|
extractComposerJsonFromZip,
|
||||||
|
calculateSha1,
|
||||||
|
parseVendorPackage,
|
||||||
|
generatePackagesJson,
|
||||||
|
sortVersions,
|
||||||
|
} from './helpers.composer.js';
|
||||||
|
|
||||||
|
export class ComposerRegistry extends BaseRegistry {
|
||||||
|
private storage: RegistryStorage;
|
||||||
|
private authManager: AuthManager;
|
||||||
|
private basePath: string = '/composer';
|
||||||
|
private registryUrl: string;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
storage: RegistryStorage,
|
||||||
|
authManager: AuthManager,
|
||||||
|
basePath: string = '/composer',
|
||||||
|
registryUrl: string = 'http://localhost:5000/composer'
|
||||||
|
) {
|
||||||
|
super();
|
||||||
|
this.storage = storage;
|
||||||
|
this.authManager = authManager;
|
||||||
|
this.basePath = basePath;
|
||||||
|
this.registryUrl = registryUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async init(): Promise<void> {
|
||||||
|
// Composer registry initialization
|
||||||
|
}
|
||||||
|
|
||||||
|
public getBasePath(): string {
|
||||||
|
return this.basePath;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async handleRequest(context: IRequestContext): Promise<IResponse> {
|
||||||
|
const path = context.path.replace(this.basePath, '');
|
||||||
|
|
||||||
|
// Extract token from Authorization header
|
||||||
|
const authHeader = context.headers['authorization'] || context.headers['Authorization'];
|
||||||
|
let token: IAuthToken | null = null;
|
||||||
|
|
||||||
|
if (authHeader) {
|
||||||
|
if (authHeader.startsWith('Bearer ')) {
|
||||||
|
const tokenString = authHeader.replace(/^Bearer\s+/i, '');
|
||||||
|
token = await this.authManager.validateToken(tokenString, 'composer');
|
||||||
|
} else if (authHeader.startsWith('Basic ')) {
|
||||||
|
// Handle HTTP Basic Auth
|
||||||
|
const credentials = Buffer.from(authHeader.replace(/^Basic\s+/i, ''), 'base64').toString('utf-8');
|
||||||
|
const [username, password] = credentials.split(':');
|
||||||
|
const userId = await this.authManager.authenticate({ username, password });
|
||||||
|
if (userId) {
|
||||||
|
// Create temporary token for this request
|
||||||
|
token = {
|
||||||
|
type: 'composer',
|
||||||
|
userId,
|
||||||
|
scopes: ['composer:*:*:read'],
|
||||||
|
readonly: true,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Root packages.json
|
||||||
|
if (path === '/packages.json' || path === '' || path === '/') {
|
||||||
|
return this.handlePackagesJson();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Package metadata: /p2/{vendor}/{package}.json or /p2/{vendor}/{package}~dev.json
|
||||||
|
const metadataMatch = path.match(/^\/p2\/([^\/]+\/[^\/]+?)(~dev)?\.json$/);
|
||||||
|
if (metadataMatch) {
|
||||||
|
const [, vendorPackage, devSuffix] = metadataMatch;
|
||||||
|
const includeDev = !!devSuffix;
|
||||||
|
return this.handlePackageMetadata(vendorPackage, includeDev, token);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Package list: /packages/list.json?filter=vendor/*
|
||||||
|
if (path.startsWith('/packages/list.json')) {
|
||||||
|
const filter = context.query['filter'];
|
||||||
|
return this.handlePackageList(filter, token);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Package ZIP download: /dists/{vendor}/{package}/{reference}.zip
|
||||||
|
const distMatch = path.match(/^\/dists\/([^\/]+\/[^\/]+)\/([^\/]+)\.zip$/);
|
||||||
|
if (distMatch) {
|
||||||
|
const [, vendorPackage, reference] = distMatch;
|
||||||
|
return this.handlePackageDownload(vendorPackage, reference, token);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Package upload: PUT /packages/{vendor}/{package}
|
||||||
|
const uploadMatch = path.match(/^\/packages\/([^\/]+\/[^\/]+)$/);
|
||||||
|
if (uploadMatch && context.method === 'PUT') {
|
||||||
|
const vendorPackage = uploadMatch[1];
|
||||||
|
return this.handlePackageUpload(vendorPackage, context.body, token);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Package delete: DELETE /packages/{vendor}/{package}
|
||||||
|
if (uploadMatch && context.method === 'DELETE') {
|
||||||
|
const vendorPackage = uploadMatch[1];
|
||||||
|
return this.handlePackageDelete(vendorPackage, token);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Version delete: DELETE /packages/{vendor}/{package}/{version}
|
||||||
|
const versionDeleteMatch = path.match(/^\/packages\/([^\/]+\/[^\/]+)\/(.+)$/);
|
||||||
|
if (versionDeleteMatch && context.method === 'DELETE') {
|
||||||
|
const [, vendorPackage, version] = versionDeleteMatch;
|
||||||
|
return this.handleVersionDelete(vendorPackage, version, token);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 404,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: { status: 'error', message: 'Not found' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
protected async checkPermission(
|
||||||
|
token: IAuthToken | null,
|
||||||
|
resource: string,
|
||||||
|
action: string
|
||||||
|
): Promise<boolean> {
|
||||||
|
if (!token) return false;
|
||||||
|
return this.authManager.authorize(token, `composer:package:${resource}`, action);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// REQUEST HANDLERS
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
private async handlePackagesJson(): Promise<IResponse> {
|
||||||
|
const availablePackages = await this.storage.listComposerPackages();
|
||||||
|
const packagesJson = generatePackagesJson(this.registryUrl, availablePackages);
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 200,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: packagesJson,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private async handlePackageMetadata(
|
||||||
|
vendorPackage: string,
|
||||||
|
includeDev: boolean,
|
||||||
|
token: IAuthToken | null
|
||||||
|
): Promise<IResponse> {
|
||||||
|
// Read operations are public, no authentication required
|
||||||
|
const metadata = await this.storage.getComposerPackageMetadata(vendorPackage);
|
||||||
|
|
||||||
|
if (!metadata) {
|
||||||
|
return {
|
||||||
|
status: 404,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: { status: 'error', message: 'Package not found' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filter dev versions if needed
|
||||||
|
let packages = metadata.packages[vendorPackage] || [];
|
||||||
|
if (!includeDev) {
|
||||||
|
packages = packages.filter((pkg: IComposerPackage) =>
|
||||||
|
!pkg.version.includes('dev') && !pkg.version.includes('alpha') && !pkg.version.includes('beta')
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
const response: IComposerPackageMetadata = {
|
||||||
|
minified: 'composer/2.0',
|
||||||
|
packages: {
|
||||||
|
[vendorPackage]: packages,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 200,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Last-Modified': metadata.lastModified || new Date().toUTCString(),
|
||||||
|
},
|
||||||
|
body: response,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private async handlePackageList(
|
||||||
|
filter: string | undefined,
|
||||||
|
token: IAuthToken | null
|
||||||
|
): Promise<IResponse> {
|
||||||
|
let packages = await this.storage.listComposerPackages();
|
||||||
|
|
||||||
|
// Apply filter if provided
|
||||||
|
if (filter) {
|
||||||
|
const regex = new RegExp('^' + filter.replace(/\*/g, '.*') + '$');
|
||||||
|
packages = packages.filter(pkg => regex.test(pkg));
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 200,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: { packageNames: packages },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private async handlePackageDownload(
|
||||||
|
vendorPackage: string,
|
||||||
|
reference: string,
|
||||||
|
token: IAuthToken | null
|
||||||
|
): Promise<IResponse> {
|
||||||
|
// Read operations are public, no authentication required
|
||||||
|
const zipData = await this.storage.getComposerPackageZip(vendorPackage, reference);
|
||||||
|
|
||||||
|
if (!zipData) {
|
||||||
|
return {
|
||||||
|
status: 404,
|
||||||
|
headers: {},
|
||||||
|
body: { status: 'error', message: 'Package file not found' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 200,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/zip',
|
||||||
|
'Content-Length': zipData.length.toString(),
|
||||||
|
'Content-Disposition': `attachment; filename="${reference}.zip"`,
|
||||||
|
},
|
||||||
|
body: zipData,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private async handlePackageUpload(
|
||||||
|
vendorPackage: string,
|
||||||
|
body: any,
|
||||||
|
token: IAuthToken | null
|
||||||
|
): Promise<IResponse> {
|
||||||
|
// Check write permission
|
||||||
|
if (!await this.checkPermission(token, vendorPackage, 'write')) {
|
||||||
|
return {
|
||||||
|
status: 401,
|
||||||
|
headers: {},
|
||||||
|
body: { status: 'error', message: 'Write permission required' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!body || !Buffer.isBuffer(body)) {
|
||||||
|
return {
|
||||||
|
status: 400,
|
||||||
|
headers: {},
|
||||||
|
body: { status: 'error', message: 'ZIP file required' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Extract and validate composer.json from ZIP
|
||||||
|
const composerJson = await extractComposerJsonFromZip(body);
|
||||||
|
if (!composerJson || !validateComposerJson(composerJson)) {
|
||||||
|
return {
|
||||||
|
status: 400,
|
||||||
|
headers: {},
|
||||||
|
body: { status: 'error', message: 'Invalid composer.json in ZIP' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify package name matches
|
||||||
|
if (composerJson.name !== vendorPackage) {
|
||||||
|
return {
|
||||||
|
status: 400,
|
||||||
|
headers: {},
|
||||||
|
body: { status: 'error', message: 'Package name mismatch' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const version = composerJson.version;
|
||||||
|
if (!version) {
|
||||||
|
return {
|
||||||
|
status: 400,
|
||||||
|
headers: {},
|
||||||
|
body: { status: 'error', message: 'Version required in composer.json' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Calculate SHA-1 hash
|
||||||
|
const shasum = await calculateSha1(body);
|
||||||
|
|
||||||
|
// Generate reference (use version or commit hash)
|
||||||
|
const reference = composerJson.source?.reference || version.replace(/[^a-zA-Z0-9.-]/g, '-');
|
||||||
|
|
||||||
|
// Store ZIP file
|
||||||
|
await this.storage.putComposerPackageZip(vendorPackage, reference, body);
|
||||||
|
|
||||||
|
// Get or create metadata
|
||||||
|
let metadata = await this.storage.getComposerPackageMetadata(vendorPackage);
|
||||||
|
if (!metadata) {
|
||||||
|
metadata = {
|
||||||
|
packages: {
|
||||||
|
[vendorPackage]: [],
|
||||||
|
},
|
||||||
|
lastModified: new Date().toUTCString(),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build package entry
|
||||||
|
const packageEntry: IComposerPackage = {
|
||||||
|
...composerJson,
|
||||||
|
version_normalized: normalizeVersion(version),
|
||||||
|
dist: {
|
||||||
|
type: 'zip',
|
||||||
|
url: `${this.registryUrl}/dists/${vendorPackage}/${reference}.zip`,
|
||||||
|
reference,
|
||||||
|
shasum,
|
||||||
|
},
|
||||||
|
time: new Date().toISOString(),
|
||||||
|
};
|
||||||
|
|
||||||
|
// Add to metadata (check if version already exists)
|
||||||
|
const packages = metadata.packages[vendorPackage] || [];
|
||||||
|
const existingIndex = packages.findIndex((p: IComposerPackage) => p.version === version);
|
||||||
|
|
||||||
|
if (existingIndex >= 0) {
|
||||||
|
return {
|
||||||
|
status: 409,
|
||||||
|
headers: {},
|
||||||
|
body: { status: 'error', message: 'Version already exists' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
packages.push(packageEntry);
|
||||||
|
|
||||||
|
// Sort by version
|
||||||
|
const sortedVersions = sortVersions(packages.map((p: IComposerPackage) => p.version));
|
||||||
|
packages.sort((a: IComposerPackage, b: IComposerPackage) => {
|
||||||
|
return sortedVersions.indexOf(a.version) - sortedVersions.indexOf(b.version);
|
||||||
|
});
|
||||||
|
|
||||||
|
metadata.packages[vendorPackage] = packages;
|
||||||
|
metadata.lastModified = new Date().toUTCString();
|
||||||
|
|
||||||
|
// Store updated metadata
|
||||||
|
await this.storage.putComposerPackageMetadata(vendorPackage, metadata);
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 201,
|
||||||
|
headers: {},
|
||||||
|
body: {
|
||||||
|
status: 'success',
|
||||||
|
message: 'Package uploaded successfully',
|
||||||
|
package: vendorPackage,
|
||||||
|
version,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private async handlePackageDelete(
|
||||||
|
vendorPackage: string,
|
||||||
|
token: IAuthToken | null
|
||||||
|
): Promise<IResponse> {
|
||||||
|
// Check delete permission
|
||||||
|
if (!await this.checkPermission(token, vendorPackage, 'delete')) {
|
||||||
|
return {
|
||||||
|
status: 401,
|
||||||
|
headers: {},
|
||||||
|
body: { status: 'error', message: 'Delete permission required' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadata = await this.storage.getComposerPackageMetadata(vendorPackage);
|
||||||
|
if (!metadata) {
|
||||||
|
return {
|
||||||
|
status: 404,
|
||||||
|
headers: {},
|
||||||
|
body: { status: 'error', message: 'Package not found' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete all ZIP files
|
||||||
|
const packages = metadata.packages[vendorPackage] || [];
|
||||||
|
for (const pkg of packages) {
|
||||||
|
if (pkg.dist?.reference) {
|
||||||
|
await this.storage.deleteComposerPackageZip(vendorPackage, pkg.dist.reference);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete metadata
|
||||||
|
await this.storage.deleteComposerPackageMetadata(vendorPackage);
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 204,
|
||||||
|
headers: {},
|
||||||
|
body: null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private async handleVersionDelete(
|
||||||
|
vendorPackage: string,
|
||||||
|
version: string,
|
||||||
|
token: IAuthToken | null
|
||||||
|
): Promise<IResponse> {
|
||||||
|
// Check delete permission
|
||||||
|
if (!await this.checkPermission(token, vendorPackage, 'delete')) {
|
||||||
|
return {
|
||||||
|
status: 401,
|
||||||
|
headers: {},
|
||||||
|
body: { status: 'error', message: 'Delete permission required' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const metadata = await this.storage.getComposerPackageMetadata(vendorPackage);
|
||||||
|
if (!metadata) {
|
||||||
|
return {
|
||||||
|
status: 404,
|
||||||
|
headers: {},
|
||||||
|
body: { status: 'error', message: 'Package not found' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const packages = metadata.packages[vendorPackage] || [];
|
||||||
|
const versionIndex = packages.findIndex((p: IComposerPackage) => p.version === version);
|
||||||
|
|
||||||
|
if (versionIndex === -1) {
|
||||||
|
return {
|
||||||
|
status: 404,
|
||||||
|
headers: {},
|
||||||
|
body: { status: 'error', message: 'Version not found' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete ZIP file
|
||||||
|
const pkg = packages[versionIndex];
|
||||||
|
if (pkg.dist?.reference) {
|
||||||
|
await this.storage.deleteComposerPackageZip(vendorPackage, pkg.dist.reference);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove from metadata
|
||||||
|
packages.splice(versionIndex, 1);
|
||||||
|
metadata.packages[vendorPackage] = packages;
|
||||||
|
metadata.lastModified = new Date().toUTCString();
|
||||||
|
|
||||||
|
// Save updated metadata
|
||||||
|
await this.storage.putComposerPackageMetadata(vendorPackage, metadata);
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 204,
|
||||||
|
headers: {},
|
||||||
|
body: null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
139
ts/composer/helpers.composer.ts
Normal file
139
ts/composer/helpers.composer.ts
Normal file
@@ -0,0 +1,139 @@
|
|||||||
|
/**
|
||||||
|
* Composer Registry Helper Functions
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type { IComposerPackage } from './interfaces.composer.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Normalize version string to Composer format
|
||||||
|
* Example: "1.0.0" -> "1.0.0.0", "v2.3.1" -> "2.3.1.0"
|
||||||
|
*/
|
||||||
|
export function normalizeVersion(version: string): string {
|
||||||
|
// Remove 'v' prefix if present
|
||||||
|
let normalized = version.replace(/^v/i, '');
|
||||||
|
|
||||||
|
// Handle special versions (dev, alpha, beta, rc)
|
||||||
|
if (normalized.includes('dev') || normalized.includes('alpha') || normalized.includes('beta') || normalized.includes('RC')) {
|
||||||
|
// For dev versions, just return as-is with .0 appended if needed
|
||||||
|
const parts = normalized.split(/[-+]/)[0].split('.');
|
||||||
|
while (parts.length < 4) {
|
||||||
|
parts.push('0');
|
||||||
|
}
|
||||||
|
return parts.slice(0, 4).join('.');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Split by dots
|
||||||
|
const parts = normalized.split('.');
|
||||||
|
|
||||||
|
// Ensure 4 parts (major.minor.patch.build)
|
||||||
|
while (parts.length < 4) {
|
||||||
|
parts.push('0');
|
||||||
|
}
|
||||||
|
|
||||||
|
return parts.slice(0, 4).join('.');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate composer.json structure
|
||||||
|
*/
|
||||||
|
export function validateComposerJson(composerJson: any): boolean {
|
||||||
|
return !!(
|
||||||
|
composerJson &&
|
||||||
|
typeof composerJson.name === 'string' &&
|
||||||
|
composerJson.name.includes('/') &&
|
||||||
|
(composerJson.version || composerJson.require)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract composer.json from ZIP buffer
|
||||||
|
*/
|
||||||
|
export async function extractComposerJsonFromZip(zipBuffer: Buffer): Promise<any | null> {
|
||||||
|
try {
|
||||||
|
const AdmZip = (await import('adm-zip')).default;
|
||||||
|
const zip = new AdmZip(zipBuffer);
|
||||||
|
const entries = zip.getEntries();
|
||||||
|
|
||||||
|
// Look for composer.json in root or first-level directory
|
||||||
|
for (const entry of entries) {
|
||||||
|
if (entry.entryName.endsWith('composer.json')) {
|
||||||
|
const parts = entry.entryName.split('/');
|
||||||
|
if (parts.length <= 2) { // Root or first-level dir
|
||||||
|
const content = entry.getData().toString('utf-8');
|
||||||
|
return JSON.parse(content);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
} catch (error) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate SHA-1 hash for ZIP file
|
||||||
|
*/
|
||||||
|
export async function calculateSha1(data: Buffer): Promise<string> {
|
||||||
|
const crypto = await import('crypto');
|
||||||
|
return crypto.createHash('sha1').update(data).digest('hex');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse vendor/package format
|
||||||
|
*/
|
||||||
|
export function parseVendorPackage(name: string): { vendor: string; package: string } | null {
|
||||||
|
const parts = name.split('/');
|
||||||
|
if (parts.length !== 2) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
return { vendor: parts[0], package: parts[1] };
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate packages.json root repository file
|
||||||
|
*/
|
||||||
|
export function generatePackagesJson(
|
||||||
|
registryUrl: string,
|
||||||
|
availablePackages: string[]
|
||||||
|
): any {
|
||||||
|
return {
|
||||||
|
'metadata-url': `${registryUrl}/p2/%package%.json`,
|
||||||
|
'available-packages': availablePackages,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sort versions in semantic version order
|
||||||
|
*/
|
||||||
|
export function sortVersions(versions: string[]): string[] {
|
||||||
|
return versions.sort((a, b) => {
|
||||||
|
const aParts = a.replace(/^v/i, '').split(/[.-]/).map(part => {
|
||||||
|
const num = parseInt(part, 10);
|
||||||
|
return isNaN(num) ? part : num;
|
||||||
|
});
|
||||||
|
const bParts = b.replace(/^v/i, '').split(/[.-]/).map(part => {
|
||||||
|
const num = parseInt(part, 10);
|
||||||
|
return isNaN(num) ? part : num;
|
||||||
|
});
|
||||||
|
|
||||||
|
for (let i = 0; i < Math.max(aParts.length, bParts.length); i++) {
|
||||||
|
const aPart = aParts[i] ?? 0;
|
||||||
|
const bPart = bParts[i] ?? 0;
|
||||||
|
|
||||||
|
// Compare numbers numerically, strings lexicographically
|
||||||
|
if (typeof aPart === 'number' && typeof bPart === 'number') {
|
||||||
|
if (aPart !== bPart) {
|
||||||
|
return aPart - bPart;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const aStr = String(aPart);
|
||||||
|
const bStr = String(bPart);
|
||||||
|
if (aStr !== bStr) {
|
||||||
|
return aStr.localeCompare(bStr);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
});
|
||||||
|
}
|
||||||
8
ts/composer/index.ts
Normal file
8
ts/composer/index.ts
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
/**
|
||||||
|
* Composer Registry Module
|
||||||
|
* Export all public interfaces, classes, and helpers
|
||||||
|
*/
|
||||||
|
|
||||||
|
export { ComposerRegistry } from './classes.composerregistry.js';
|
||||||
|
export * from './interfaces.composer.js';
|
||||||
|
export * from './helpers.composer.js';
|
||||||
111
ts/composer/interfaces.composer.ts
Normal file
111
ts/composer/interfaces.composer.ts
Normal file
@@ -0,0 +1,111 @@
|
|||||||
|
/**
|
||||||
|
* Composer Registry Type Definitions
|
||||||
|
* Compliant with Composer v2 repository API
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Composer package metadata
|
||||||
|
*/
|
||||||
|
export interface IComposerPackage {
|
||||||
|
name: string; // vendor/package-name
|
||||||
|
version: string; // 1.0.0
|
||||||
|
version_normalized: string; // 1.0.0.0
|
||||||
|
type?: string; // library, project, metapackage
|
||||||
|
description?: string;
|
||||||
|
keywords?: string[];
|
||||||
|
homepage?: string;
|
||||||
|
license?: string[];
|
||||||
|
authors?: IComposerAuthor[];
|
||||||
|
require?: Record<string, string>;
|
||||||
|
'require-dev'?: Record<string, string>;
|
||||||
|
suggest?: Record<string, string>;
|
||||||
|
provide?: Record<string, string>;
|
||||||
|
conflict?: Record<string, string>;
|
||||||
|
replace?: Record<string, string>;
|
||||||
|
autoload?: IComposerAutoload;
|
||||||
|
'autoload-dev'?: IComposerAutoload;
|
||||||
|
dist?: IComposerDist;
|
||||||
|
source?: IComposerSource;
|
||||||
|
time?: string; // ISO 8601 timestamp
|
||||||
|
support?: Record<string, string>;
|
||||||
|
funding?: IComposerFunding[];
|
||||||
|
extra?: Record<string, any>;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Author information
|
||||||
|
*/
|
||||||
|
export interface IComposerAuthor {
|
||||||
|
name: string;
|
||||||
|
email?: string;
|
||||||
|
homepage?: string;
|
||||||
|
role?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* PSR-4/PSR-0 autoloading configuration
|
||||||
|
*/
|
||||||
|
export interface IComposerAutoload {
|
||||||
|
'psr-4'?: Record<string, string | string[]>;
|
||||||
|
'psr-0'?: Record<string, string | string[]>;
|
||||||
|
classmap?: string[];
|
||||||
|
files?: string[];
|
||||||
|
'exclude-from-classmap'?: string[];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Distribution information (ZIP download)
|
||||||
|
*/
|
||||||
|
export interface IComposerDist {
|
||||||
|
type: 'zip' | 'tar' | 'phar';
|
||||||
|
url: string;
|
||||||
|
reference?: string; // commit hash or tag
|
||||||
|
shasum?: string; // SHA-1 hash
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Source repository information
|
||||||
|
*/
|
||||||
|
export interface IComposerSource {
|
||||||
|
type: 'git' | 'svn' | 'hg';
|
||||||
|
url: string;
|
||||||
|
reference: string; // commit hash, branch, or tag
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Funding information
|
||||||
|
*/
|
||||||
|
export interface IComposerFunding {
|
||||||
|
type: string; // github, patreon, etc.
|
||||||
|
url: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Repository metadata (packages.json)
|
||||||
|
*/
|
||||||
|
export interface IComposerRepository {
|
||||||
|
packages?: Record<string, Record<string, IComposerPackage>>;
|
||||||
|
'metadata-url'?: string; // /p2/%package%.json
|
||||||
|
'available-packages'?: string[];
|
||||||
|
'available-package-patterns'?: string[];
|
||||||
|
'providers-url'?: string;
|
||||||
|
'notify-batch'?: string;
|
||||||
|
minified?: string; // "composer/2.0"
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Package metadata response (/p2/vendor/package.json)
|
||||||
|
*/
|
||||||
|
export interface IComposerPackageMetadata {
|
||||||
|
packages: Record<string, IComposerPackage[]>;
|
||||||
|
minified?: string;
|
||||||
|
lastModified?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Error structure
|
||||||
|
*/
|
||||||
|
export interface IComposerError {
|
||||||
|
status: string;
|
||||||
|
message: string;
|
||||||
|
}
|
||||||
@@ -18,6 +18,39 @@ export class AuthManager {
|
|||||||
// In production, this could be Redis or a database
|
// In production, this could be Redis or a database
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// UUID TOKEN CREATION (Base method for NPM, Maven, etc.)
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a UUID-based token with custom scopes (base method)
|
||||||
|
* @param userId - User ID
|
||||||
|
* @param protocol - Protocol type
|
||||||
|
* @param scopes - Permission scopes
|
||||||
|
* @param readonly - Whether the token is readonly
|
||||||
|
* @returns UUID token string
|
||||||
|
*/
|
||||||
|
private async createUuidToken(
|
||||||
|
userId: string,
|
||||||
|
protocol: TRegistryProtocol,
|
||||||
|
scopes: string[],
|
||||||
|
readonly: boolean = false
|
||||||
|
): Promise<string> {
|
||||||
|
const token = this.generateUuid();
|
||||||
|
const authToken: IAuthToken = {
|
||||||
|
type: protocol,
|
||||||
|
userId,
|
||||||
|
scopes,
|
||||||
|
readonly,
|
||||||
|
metadata: {
|
||||||
|
created: new Date().toISOString(),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
this.tokenStore.set(token, authToken);
|
||||||
|
return token;
|
||||||
|
}
|
||||||
|
|
||||||
// ========================================================================
|
// ========================================================================
|
||||||
// NPM AUTHENTICATION
|
// NPM AUTHENTICATION
|
||||||
// ========================================================================
|
// ========================================================================
|
||||||
@@ -33,19 +66,8 @@ export class AuthManager {
|
|||||||
throw new Error('NPM tokens are not enabled');
|
throw new Error('NPM tokens are not enabled');
|
||||||
}
|
}
|
||||||
|
|
||||||
const token = this.generateUuid();
|
const scopes = readonly ? ['npm:*:*:read'] : ['npm:*:*:*'];
|
||||||
const authToken: IAuthToken = {
|
return this.createUuidToken(userId, 'npm', scopes, readonly);
|
||||||
type: 'npm',
|
|
||||||
userId,
|
|
||||||
scopes: readonly ? ['npm:*:*:read'] : ['npm:*:*:*'],
|
|
||||||
readonly,
|
|
||||||
metadata: {
|
|
||||||
created: new Date().toISOString(),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
this.tokenStore.set(token, authToken);
|
|
||||||
return token;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@@ -201,8 +223,106 @@ export class AuthManager {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// MAVEN AUTHENTICATION
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Validate any token (NPM or OCI)
|
* Create a Maven token
|
||||||
|
* @param userId - User ID
|
||||||
|
* @param readonly - Whether the token is readonly
|
||||||
|
* @returns Maven UUID token
|
||||||
|
*/
|
||||||
|
public async createMavenToken(userId: string, readonly: boolean = false): Promise<string> {
|
||||||
|
const scopes = readonly ? ['maven:*:*:read'] : ['maven:*:*:*'];
|
||||||
|
return this.createUuidToken(userId, 'maven', scopes, readonly);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate a Maven token
|
||||||
|
* @param token - Maven UUID token
|
||||||
|
* @returns Auth token object or null
|
||||||
|
*/
|
||||||
|
public async validateMavenToken(token: string): Promise<IAuthToken | null> {
|
||||||
|
if (!this.isValidUuid(token)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const authToken = this.tokenStore.get(token);
|
||||||
|
if (!authToken || authToken.type !== 'maven') {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check expiration if set
|
||||||
|
if (authToken.expiresAt && authToken.expiresAt < new Date()) {
|
||||||
|
this.tokenStore.delete(token);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return authToken;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Revoke a Maven token
|
||||||
|
* @param token - Maven UUID token
|
||||||
|
*/
|
||||||
|
public async revokeMavenToken(token: string): Promise<void> {
|
||||||
|
this.tokenStore.delete(token);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// COMPOSER TOKEN MANAGEMENT
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create a Composer token
|
||||||
|
* @param userId - User ID
|
||||||
|
* @param readonly - Whether the token is readonly
|
||||||
|
* @returns Composer UUID token
|
||||||
|
*/
|
||||||
|
public async createComposerToken(userId: string, readonly: boolean = false): Promise<string> {
|
||||||
|
const scopes = readonly ? ['composer:*:*:read'] : ['composer:*:*:*'];
|
||||||
|
return this.createUuidToken(userId, 'composer', scopes, readonly);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate a Composer token
|
||||||
|
* @param token - Composer UUID token
|
||||||
|
* @returns Auth token object or null
|
||||||
|
*/
|
||||||
|
public async validateComposerToken(token: string): Promise<IAuthToken | null> {
|
||||||
|
if (!this.isValidUuid(token)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const authToken = this.tokenStore.get(token);
|
||||||
|
if (!authToken || authToken.type !== 'composer') {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check expiration if set
|
||||||
|
if (authToken.expiresAt && authToken.expiresAt < new Date()) {
|
||||||
|
this.tokenStore.delete(token);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return authToken;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Revoke a Composer token
|
||||||
|
* @param token - Composer UUID token
|
||||||
|
*/
|
||||||
|
public async revokeComposerToken(token: string): Promise<void> {
|
||||||
|
this.tokenStore.delete(token);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// UNIFIED AUTHENTICATION
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate any token (NPM, Maven, or OCI)
|
||||||
* @param tokenString - Token string (UUID or JWT)
|
* @param tokenString - Token string (UUID or JWT)
|
||||||
* @param protocol - Expected protocol type
|
* @param protocol - Expected protocol type
|
||||||
* @returns Auth token object or null
|
* @returns Auth token object or null
|
||||||
@@ -211,12 +331,25 @@ export class AuthManager {
|
|||||||
tokenString: string,
|
tokenString: string,
|
||||||
protocol?: TRegistryProtocol
|
protocol?: TRegistryProtocol
|
||||||
): Promise<IAuthToken | null> {
|
): Promise<IAuthToken | null> {
|
||||||
// Try NPM token first (UUID format)
|
// Try UUID-based tokens (NPM, Maven, Composer)
|
||||||
if (this.isValidUuid(tokenString)) {
|
if (this.isValidUuid(tokenString)) {
|
||||||
|
// Try NPM token
|
||||||
const npmToken = await this.validateNpmToken(tokenString);
|
const npmToken = await this.validateNpmToken(tokenString);
|
||||||
if (npmToken && (!protocol || protocol === 'npm')) {
|
if (npmToken && (!protocol || protocol === 'npm')) {
|
||||||
return npmToken;
|
return npmToken;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Try Maven token
|
||||||
|
const mavenToken = await this.validateMavenToken(tokenString);
|
||||||
|
if (mavenToken && (!protocol || protocol === 'maven')) {
|
||||||
|
return mavenToken;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Try Composer token
|
||||||
|
const composerToken = await this.validateComposerToken(tokenString);
|
||||||
|
if (composerToken && (!protocol || protocol === 'composer')) {
|
||||||
|
return composerToken;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Try OCI JWT
|
// Try OCI JWT
|
||||||
|
|||||||
@@ -267,4 +267,338 @@ export class RegistryStorage implements IStorageBackend {
|
|||||||
const safeName = packageName.replace('@', '').replace('/', '-');
|
const safeName = packageName.replace('@', '').replace('/', '-');
|
||||||
return `npm/packages/${packageName}/${safeName}-${version}.tgz`;
|
return `npm/packages/${packageName}/${safeName}-${version}.tgz`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// MAVEN STORAGE METHODS
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get Maven artifact
|
||||||
|
*/
|
||||||
|
public async getMavenArtifact(
|
||||||
|
groupId: string,
|
||||||
|
artifactId: string,
|
||||||
|
version: string,
|
||||||
|
filename: string
|
||||||
|
): Promise<Buffer | null> {
|
||||||
|
const path = this.getMavenArtifactPath(groupId, artifactId, version, filename);
|
||||||
|
return this.getObject(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Store Maven artifact
|
||||||
|
*/
|
||||||
|
public async putMavenArtifact(
|
||||||
|
groupId: string,
|
||||||
|
artifactId: string,
|
||||||
|
version: string,
|
||||||
|
filename: string,
|
||||||
|
data: Buffer
|
||||||
|
): Promise<void> {
|
||||||
|
const path = this.getMavenArtifactPath(groupId, artifactId, version, filename);
|
||||||
|
return this.putObject(path, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if Maven artifact exists
|
||||||
|
*/
|
||||||
|
public async mavenArtifactExists(
|
||||||
|
groupId: string,
|
||||||
|
artifactId: string,
|
||||||
|
version: string,
|
||||||
|
filename: string
|
||||||
|
): Promise<boolean> {
|
||||||
|
const path = this.getMavenArtifactPath(groupId, artifactId, version, filename);
|
||||||
|
return this.objectExists(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete Maven artifact
|
||||||
|
*/
|
||||||
|
public async deleteMavenArtifact(
|
||||||
|
groupId: string,
|
||||||
|
artifactId: string,
|
||||||
|
version: string,
|
||||||
|
filename: string
|
||||||
|
): Promise<void> {
|
||||||
|
const path = this.getMavenArtifactPath(groupId, artifactId, version, filename);
|
||||||
|
return this.deleteObject(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get Maven metadata (maven-metadata.xml)
|
||||||
|
*/
|
||||||
|
public async getMavenMetadata(
|
||||||
|
groupId: string,
|
||||||
|
artifactId: string
|
||||||
|
): Promise<Buffer | null> {
|
||||||
|
const path = this.getMavenMetadataPath(groupId, artifactId);
|
||||||
|
return this.getObject(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Store Maven metadata (maven-metadata.xml)
|
||||||
|
*/
|
||||||
|
public async putMavenMetadata(
|
||||||
|
groupId: string,
|
||||||
|
artifactId: string,
|
||||||
|
data: Buffer
|
||||||
|
): Promise<void> {
|
||||||
|
const path = this.getMavenMetadataPath(groupId, artifactId);
|
||||||
|
return this.putObject(path, data);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete Maven metadata (maven-metadata.xml)
|
||||||
|
*/
|
||||||
|
public async deleteMavenMetadata(
|
||||||
|
groupId: string,
|
||||||
|
artifactId: string
|
||||||
|
): Promise<void> {
|
||||||
|
const path = this.getMavenMetadataPath(groupId, artifactId);
|
||||||
|
return this.deleteObject(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List Maven versions for an artifact
|
||||||
|
* Returns all version directories under the artifact path
|
||||||
|
*/
|
||||||
|
public async listMavenVersions(
|
||||||
|
groupId: string,
|
||||||
|
artifactId: string
|
||||||
|
): Promise<string[]> {
|
||||||
|
const groupPath = groupId.replace(/\./g, '/');
|
||||||
|
const prefix = `maven/artifacts/${groupPath}/${artifactId}/`;
|
||||||
|
|
||||||
|
const objects = await this.listObjects(prefix);
|
||||||
|
const versions = new Set<string>();
|
||||||
|
|
||||||
|
// Extract version from paths like: maven/artifacts/com/example/my-lib/1.0.0/my-lib-1.0.0.jar
|
||||||
|
for (const obj of objects) {
|
||||||
|
const relativePath = obj.substring(prefix.length);
|
||||||
|
const parts = relativePath.split('/');
|
||||||
|
if (parts.length >= 1 && parts[0]) {
|
||||||
|
versions.add(parts[0]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Array.from(versions).sort();
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// MAVEN PATH HELPERS
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
private getMavenArtifactPath(
|
||||||
|
groupId: string,
|
||||||
|
artifactId: string,
|
||||||
|
version: string,
|
||||||
|
filename: string
|
||||||
|
): string {
|
||||||
|
const groupPath = groupId.replace(/\./g, '/');
|
||||||
|
return `maven/artifacts/${groupPath}/${artifactId}/${version}/${filename}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
private getMavenMetadataPath(groupId: string, artifactId: string): string {
|
||||||
|
const groupPath = groupId.replace(/\./g, '/');
|
||||||
|
return `maven/metadata/${groupPath}/${artifactId}/maven-metadata.xml`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// CARGO-SPECIFIC HELPERS
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get Cargo config.json
|
||||||
|
*/
|
||||||
|
public async getCargoConfig(): Promise<any | null> {
|
||||||
|
const data = await this.getObject('cargo/config.json');
|
||||||
|
return data ? JSON.parse(data.toString('utf-8')) : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Store Cargo config.json
|
||||||
|
*/
|
||||||
|
public async putCargoConfig(config: any): Promise<void> {
|
||||||
|
const data = Buffer.from(JSON.stringify(config, null, 2), 'utf-8');
|
||||||
|
return this.putObject('cargo/config.json', data, { 'Content-Type': 'application/json' });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get Cargo index file (newline-delimited JSON)
|
||||||
|
*/
|
||||||
|
public async getCargoIndex(crateName: string): Promise<any[] | null> {
|
||||||
|
const path = this.getCargoIndexPath(crateName);
|
||||||
|
const data = await this.getObject(path);
|
||||||
|
if (!data) return null;
|
||||||
|
|
||||||
|
// Parse newline-delimited JSON
|
||||||
|
const lines = data.toString('utf-8').split('\n').filter(line => line.trim());
|
||||||
|
return lines.map(line => JSON.parse(line));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Store Cargo index file
|
||||||
|
*/
|
||||||
|
public async putCargoIndex(crateName: string, entries: any[]): Promise<void> {
|
||||||
|
const path = this.getCargoIndexPath(crateName);
|
||||||
|
// Convert to newline-delimited JSON
|
||||||
|
const data = Buffer.from(entries.map(e => JSON.stringify(e)).join('\n') + '\n', 'utf-8');
|
||||||
|
return this.putObject(path, data, { 'Content-Type': 'text/plain' });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get Cargo .crate file
|
||||||
|
*/
|
||||||
|
public async getCargoCrate(crateName: string, version: string): Promise<Buffer | null> {
|
||||||
|
const path = this.getCargoCratePath(crateName, version);
|
||||||
|
return this.getObject(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Store Cargo .crate file
|
||||||
|
*/
|
||||||
|
public async putCargoCrate(
|
||||||
|
crateName: string,
|
||||||
|
version: string,
|
||||||
|
crateFile: Buffer
|
||||||
|
): Promise<void> {
|
||||||
|
const path = this.getCargoCratePath(crateName, version);
|
||||||
|
return this.putObject(path, crateFile, { 'Content-Type': 'application/gzip' });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if Cargo crate exists
|
||||||
|
*/
|
||||||
|
public async cargoCrateExists(crateName: string, version: string): Promise<boolean> {
|
||||||
|
const path = this.getCargoCratePath(crateName, version);
|
||||||
|
return this.objectExists(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete Cargo crate (for cleanup, not for unpublishing)
|
||||||
|
*/
|
||||||
|
public async deleteCargoCrate(crateName: string, version: string): Promise<void> {
|
||||||
|
const path = this.getCargoCratePath(crateName, version);
|
||||||
|
return this.deleteObject(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// CARGO PATH HELPERS
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
private getCargoIndexPath(crateName: string): string {
|
||||||
|
const lower = crateName.toLowerCase();
|
||||||
|
const len = lower.length;
|
||||||
|
|
||||||
|
if (len === 1) {
|
||||||
|
return `cargo/index/1/${lower}`;
|
||||||
|
} else if (len === 2) {
|
||||||
|
return `cargo/index/2/${lower}`;
|
||||||
|
} else if (len === 3) {
|
||||||
|
return `cargo/index/3/${lower.charAt(0)}/${lower}`;
|
||||||
|
} else {
|
||||||
|
// 4+ characters: {first-two}/{second-two}/{name}
|
||||||
|
const prefix1 = lower.substring(0, 2);
|
||||||
|
const prefix2 = lower.substring(2, 4);
|
||||||
|
return `cargo/index/${prefix1}/${prefix2}/${lower}`;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private getCargoCratePath(crateName: string, version: string): string {
|
||||||
|
return `cargo/crates/${crateName}/${crateName}-${version}.crate`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// COMPOSER-SPECIFIC HELPERS
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get Composer package metadata
|
||||||
|
*/
|
||||||
|
public async getComposerPackageMetadata(vendorPackage: string): Promise<any | null> {
|
||||||
|
const path = this.getComposerMetadataPath(vendorPackage);
|
||||||
|
const data = await this.getObject(path);
|
||||||
|
return data ? JSON.parse(data.toString('utf-8')) : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Store Composer package metadata
|
||||||
|
*/
|
||||||
|
public async putComposerPackageMetadata(vendorPackage: string, metadata: any): Promise<void> {
|
||||||
|
const path = this.getComposerMetadataPath(vendorPackage);
|
||||||
|
const data = Buffer.from(JSON.stringify(metadata, null, 2), 'utf-8');
|
||||||
|
return this.putObject(path, data, { 'Content-Type': 'application/json' });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get Composer package ZIP
|
||||||
|
*/
|
||||||
|
public async getComposerPackageZip(vendorPackage: string, reference: string): Promise<Buffer | null> {
|
||||||
|
const path = this.getComposerZipPath(vendorPackage, reference);
|
||||||
|
return this.getObject(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Store Composer package ZIP
|
||||||
|
*/
|
||||||
|
public async putComposerPackageZip(vendorPackage: string, reference: string, zipData: Buffer): Promise<void> {
|
||||||
|
const path = this.getComposerZipPath(vendorPackage, reference);
|
||||||
|
return this.putObject(path, zipData, { 'Content-Type': 'application/zip' });
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if Composer package metadata exists
|
||||||
|
*/
|
||||||
|
public async composerPackageMetadataExists(vendorPackage: string): Promise<boolean> {
|
||||||
|
const path = this.getComposerMetadataPath(vendorPackage);
|
||||||
|
return this.objectExists(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete Composer package metadata
|
||||||
|
*/
|
||||||
|
public async deleteComposerPackageMetadata(vendorPackage: string): Promise<void> {
|
||||||
|
const path = this.getComposerMetadataPath(vendorPackage);
|
||||||
|
return this.deleteObject(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete Composer package ZIP
|
||||||
|
*/
|
||||||
|
public async deleteComposerPackageZip(vendorPackage: string, reference: string): Promise<void> {
|
||||||
|
const path = this.getComposerZipPath(vendorPackage, reference);
|
||||||
|
return this.deleteObject(path);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* List all Composer packages
|
||||||
|
*/
|
||||||
|
public async listComposerPackages(): Promise<string[]> {
|
||||||
|
const prefix = 'composer/packages/';
|
||||||
|
const objects = await this.listObjects(prefix);
|
||||||
|
const packages = new Set<string>();
|
||||||
|
|
||||||
|
// Extract vendor/package from paths like: composer/packages/vendor/package/metadata.json
|
||||||
|
for (const obj of objects) {
|
||||||
|
const match = obj.match(/^composer\/packages\/([^\/]+\/[^\/]+)\/metadata\.json$/);
|
||||||
|
if (match) {
|
||||||
|
packages.add(match[1]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Array.from(packages).sort();
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// COMPOSER PATH HELPERS
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
private getComposerMetadataPath(vendorPackage: string): string {
|
||||||
|
return `composer/packages/${vendorPackage}/metadata.json`;
|
||||||
|
}
|
||||||
|
|
||||||
|
private getComposerZipPath(vendorPackage: string, reference: string): string {
|
||||||
|
return `composer/packages/${vendorPackage}/${reference}.zip`;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,7 +5,7 @@
|
|||||||
/**
|
/**
|
||||||
* Registry protocol types
|
* Registry protocol types
|
||||||
*/
|
*/
|
||||||
export type TRegistryProtocol = 'oci' | 'npm';
|
export type TRegistryProtocol = 'oci' | 'npm' | 'maven' | 'cargo' | 'composer';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Unified action types across protocols
|
* Unified action types across protocols
|
||||||
@@ -89,6 +89,9 @@ export interface IRegistryConfig {
|
|||||||
auth: IAuthConfig;
|
auth: IAuthConfig;
|
||||||
oci?: IProtocolConfig;
|
oci?: IProtocolConfig;
|
||||||
npm?: IProtocolConfig;
|
npm?: IProtocolConfig;
|
||||||
|
maven?: IProtocolConfig;
|
||||||
|
cargo?: IProtocolConfig;
|
||||||
|
composer?: IProtocolConfig;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|||||||
11
ts/index.ts
11
ts/index.ts
@@ -1,6 +1,6 @@
|
|||||||
/**
|
/**
|
||||||
* @push.rocks/smartregistry
|
* @push.rocks/smartregistry
|
||||||
* Composable registry supporting OCI and NPM protocols
|
* Composable registry supporting OCI, NPM, Maven, Cargo, and Composer protocols
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// Main orchestrator
|
// Main orchestrator
|
||||||
@@ -14,3 +14,12 @@ export * from './oci/index.js';
|
|||||||
|
|
||||||
// NPM Registry
|
// NPM Registry
|
||||||
export * from './npm/index.js';
|
export * from './npm/index.js';
|
||||||
|
|
||||||
|
// Maven Registry
|
||||||
|
export * from './maven/index.js';
|
||||||
|
|
||||||
|
// Cargo Registry
|
||||||
|
export * from './cargo/index.js';
|
||||||
|
|
||||||
|
// Composer Registry
|
||||||
|
export * from './composer/index.js';
|
||||||
|
|||||||
580
ts/maven/classes.mavenregistry.ts
Normal file
580
ts/maven/classes.mavenregistry.ts
Normal file
@@ -0,0 +1,580 @@
|
|||||||
|
/**
|
||||||
|
* Maven Registry Implementation
|
||||||
|
* Implements Maven repository protocol for Java artifacts
|
||||||
|
*/
|
||||||
|
|
||||||
|
import { BaseRegistry } from '../core/classes.baseregistry.js';
|
||||||
|
import type { RegistryStorage } from '../core/classes.registrystorage.js';
|
||||||
|
import type { AuthManager } from '../core/classes.authmanager.js';
|
||||||
|
import type { IRequestContext, IResponse, IAuthToken } from '../core/interfaces.core.js';
|
||||||
|
import type { IMavenCoordinate, IMavenMetadata, IChecksums } from './interfaces.maven.js';
|
||||||
|
import {
|
||||||
|
pathToGAV,
|
||||||
|
buildFilename,
|
||||||
|
calculateChecksums,
|
||||||
|
generateMetadataXml,
|
||||||
|
parseMetadataXml,
|
||||||
|
formatMavenTimestamp,
|
||||||
|
isSnapshot,
|
||||||
|
validatePom,
|
||||||
|
extractGAVFromPom,
|
||||||
|
gavToPath,
|
||||||
|
} from './helpers.maven.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Maven Registry class
|
||||||
|
* Handles Maven repository HTTP protocol
|
||||||
|
*/
|
||||||
|
export class MavenRegistry extends BaseRegistry {
|
||||||
|
private storage: RegistryStorage;
|
||||||
|
private authManager: AuthManager;
|
||||||
|
private basePath: string = '/maven';
|
||||||
|
private registryUrl: string;
|
||||||
|
|
||||||
|
constructor(
|
||||||
|
storage: RegistryStorage,
|
||||||
|
authManager: AuthManager,
|
||||||
|
basePath: string,
|
||||||
|
registryUrl: string
|
||||||
|
) {
|
||||||
|
super();
|
||||||
|
this.storage = storage;
|
||||||
|
this.authManager = authManager;
|
||||||
|
this.basePath = basePath;
|
||||||
|
this.registryUrl = registryUrl;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async init(): Promise<void> {
|
||||||
|
// No special initialization needed for Maven
|
||||||
|
}
|
||||||
|
|
||||||
|
public getBasePath(): string {
|
||||||
|
return this.basePath;
|
||||||
|
}
|
||||||
|
|
||||||
|
public async handleRequest(context: IRequestContext): Promise<IResponse> {
|
||||||
|
// Remove base path from URL
|
||||||
|
const path = context.path.replace(this.basePath, '');
|
||||||
|
|
||||||
|
// Extract token from Authorization header
|
||||||
|
const authHeader = context.headers['authorization'] || context.headers['Authorization'];
|
||||||
|
let token: IAuthToken | null = null;
|
||||||
|
|
||||||
|
if (authHeader) {
|
||||||
|
const tokenString = authHeader.replace(/^(Bearer|Basic)\s+/i, '');
|
||||||
|
// For now, try to validate as Maven token (reuse npm token type)
|
||||||
|
token = await this.authManager.validateToken(tokenString, 'maven');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse path to determine request type
|
||||||
|
const coordinate = pathToGAV(path);
|
||||||
|
|
||||||
|
if (!coordinate) {
|
||||||
|
// Not a valid artifact path, could be metadata or root
|
||||||
|
if (path.endsWith('/maven-metadata.xml')) {
|
||||||
|
return this.handleMetadataRequest(context.method, path, token);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 404,
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: { error: 'NOT_FOUND', message: 'Invalid Maven path' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if it's a checksum file
|
||||||
|
if (coordinate.extension === 'md5' || coordinate.extension === 'sha1' ||
|
||||||
|
coordinate.extension === 'sha256' || coordinate.extension === 'sha512') {
|
||||||
|
return this.handleChecksumRequest(context.method, coordinate, token, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Handle artifact requests (JAR, POM, WAR, etc.)
|
||||||
|
return this.handleArtifactRequest(context.method, coordinate, token, context.body);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected async checkPermission(
|
||||||
|
token: IAuthToken | null,
|
||||||
|
resource: string,
|
||||||
|
action: string
|
||||||
|
): Promise<boolean> {
|
||||||
|
if (!token) return false;
|
||||||
|
return this.authManager.authorize(token, `maven:artifact:${resource}`, action);
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// REQUEST HANDLERS
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
private async handleArtifactRequest(
|
||||||
|
method: string,
|
||||||
|
coordinate: IMavenCoordinate,
|
||||||
|
token: IAuthToken | null,
|
||||||
|
body?: Buffer | any
|
||||||
|
): Promise<IResponse> {
|
||||||
|
const { groupId, artifactId, version } = coordinate;
|
||||||
|
const filename = buildFilename(coordinate);
|
||||||
|
const resource = `${groupId}:${artifactId}`;
|
||||||
|
|
||||||
|
switch (method) {
|
||||||
|
case 'GET':
|
||||||
|
case 'HEAD':
|
||||||
|
// Maven repositories typically allow anonymous reads
|
||||||
|
return method === 'GET'
|
||||||
|
? this.getArtifact(groupId, artifactId, version, filename)
|
||||||
|
: this.headArtifact(groupId, artifactId, version, filename);
|
||||||
|
|
||||||
|
case 'PUT':
|
||||||
|
// Write permission required
|
||||||
|
if (!await this.checkPermission(token, resource, 'write')) {
|
||||||
|
return {
|
||||||
|
status: 401,
|
||||||
|
headers: {
|
||||||
|
'WWW-Authenticate': `Bearer realm="${this.basePath}",service="maven-registry"`,
|
||||||
|
},
|
||||||
|
body: { error: 'UNAUTHORIZED', message: 'Write permission required' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!body) {
|
||||||
|
return {
|
||||||
|
status: 400,
|
||||||
|
headers: {},
|
||||||
|
body: { error: 'BAD_REQUEST', message: 'Request body required' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.putArtifact(groupId, artifactId, version, filename, coordinate, body);
|
||||||
|
|
||||||
|
case 'DELETE':
|
||||||
|
// Delete permission required
|
||||||
|
if (!await this.checkPermission(token, resource, 'delete')) {
|
||||||
|
return {
|
||||||
|
status: 401,
|
||||||
|
headers: {
|
||||||
|
'WWW-Authenticate': `Bearer realm="${this.basePath}",service="maven-registry"`,
|
||||||
|
},
|
||||||
|
body: { error: 'UNAUTHORIZED', message: 'Delete permission required' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return this.deleteArtifact(groupId, artifactId, version, filename);
|
||||||
|
|
||||||
|
default:
|
||||||
|
return {
|
||||||
|
status: 405,
|
||||||
|
headers: { 'Allow': 'GET, HEAD, PUT, DELETE' },
|
||||||
|
body: { error: 'METHOD_NOT_ALLOWED', message: 'Method not allowed' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private async handleChecksumRequest(
|
||||||
|
method: string,
|
||||||
|
coordinate: IMavenCoordinate,
|
||||||
|
token: IAuthToken | null,
|
||||||
|
path: string
|
||||||
|
): Promise<IResponse> {
|
||||||
|
const { groupId, artifactId, version, extension } = coordinate;
|
||||||
|
const resource = `${groupId}:${artifactId}`;
|
||||||
|
|
||||||
|
// Checksums follow the same permissions as their artifacts (public read)
|
||||||
|
if (method === 'GET' || method === 'HEAD') {
|
||||||
|
return this.getChecksum(groupId, artifactId, version, coordinate, path);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 405,
|
||||||
|
headers: { 'Allow': 'GET, HEAD' },
|
||||||
|
body: { error: 'METHOD_NOT_ALLOWED', message: 'Checksums are auto-generated' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private async handleMetadataRequest(
|
||||||
|
method: string,
|
||||||
|
path: string,
|
||||||
|
token: IAuthToken | null
|
||||||
|
): Promise<IResponse> {
|
||||||
|
// Parse path to extract groupId and artifactId
|
||||||
|
// Path format: /com/example/my-lib/maven-metadata.xml
|
||||||
|
const parts = path.split('/').filter(p => p && p !== 'maven-metadata.xml');
|
||||||
|
|
||||||
|
if (parts.length < 2) {
|
||||||
|
return {
|
||||||
|
status: 400,
|
||||||
|
headers: {},
|
||||||
|
body: { error: 'BAD_REQUEST', message: 'Invalid metadata path' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const artifactId = parts[parts.length - 1];
|
||||||
|
const groupId = parts.slice(0, -1).join('.');
|
||||||
|
const resource = `${groupId}:${artifactId}`;
|
||||||
|
|
||||||
|
if (method === 'GET') {
|
||||||
|
// Metadata is usually public (read permission optional)
|
||||||
|
// Some registries allow anonymous metadata access
|
||||||
|
return this.getMetadata(groupId, artifactId);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 405,
|
||||||
|
headers: { 'Allow': 'GET' },
|
||||||
|
body: { error: 'METHOD_NOT_ALLOWED', message: 'Metadata is auto-generated' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// ARTIFACT OPERATIONS
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
private async getArtifact(
|
||||||
|
groupId: string,
|
||||||
|
artifactId: string,
|
||||||
|
version: string,
|
||||||
|
filename: string
|
||||||
|
): Promise<IResponse> {
|
||||||
|
const data = await this.storage.getMavenArtifact(groupId, artifactId, version, filename);
|
||||||
|
|
||||||
|
if (!data) {
|
||||||
|
return {
|
||||||
|
status: 404,
|
||||||
|
headers: {},
|
||||||
|
body: { error: 'NOT_FOUND', message: 'Artifact not found' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine content type based on extension
|
||||||
|
const extension = filename.split('.').pop() || '';
|
||||||
|
const contentType = this.getContentType(extension);
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 200,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': contentType,
|
||||||
|
'Content-Length': data.length.toString(),
|
||||||
|
},
|
||||||
|
body: data,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private async headArtifact(
|
||||||
|
groupId: string,
|
||||||
|
artifactId: string,
|
||||||
|
version: string,
|
||||||
|
filename: string
|
||||||
|
): Promise<IResponse> {
|
||||||
|
const exists = await this.storage.mavenArtifactExists(groupId, artifactId, version, filename);
|
||||||
|
|
||||||
|
if (!exists) {
|
||||||
|
return {
|
||||||
|
status: 404,
|
||||||
|
headers: {},
|
||||||
|
body: null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get file size for Content-Length header
|
||||||
|
const data = await this.storage.getMavenArtifact(groupId, artifactId, version, filename);
|
||||||
|
const extension = filename.split('.').pop() || '';
|
||||||
|
const contentType = this.getContentType(extension);
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 200,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': contentType,
|
||||||
|
'Content-Length': data ? data.length.toString() : '0',
|
||||||
|
},
|
||||||
|
body: null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private async putArtifact(
|
||||||
|
groupId: string,
|
||||||
|
artifactId: string,
|
||||||
|
version: string,
|
||||||
|
filename: string,
|
||||||
|
coordinate: IMavenCoordinate,
|
||||||
|
body: Buffer | any
|
||||||
|
): Promise<IResponse> {
|
||||||
|
const data = Buffer.isBuffer(body) ? body : Buffer.from(JSON.stringify(body));
|
||||||
|
|
||||||
|
// Validate POM if uploading .pom file
|
||||||
|
if (coordinate.extension === 'pom') {
|
||||||
|
const pomValid = validatePom(data.toString('utf-8'));
|
||||||
|
if (!pomValid) {
|
||||||
|
return {
|
||||||
|
status: 400,
|
||||||
|
headers: {},
|
||||||
|
body: { error: 'INVALID_POM', message: 'Invalid POM file' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify GAV matches path
|
||||||
|
const pomGAV = extractGAVFromPom(data.toString('utf-8'));
|
||||||
|
if (pomGAV && (pomGAV.groupId !== groupId || pomGAV.artifactId !== artifactId || pomGAV.version !== version)) {
|
||||||
|
return {
|
||||||
|
status: 400,
|
||||||
|
headers: {},
|
||||||
|
body: { error: 'GAV_MISMATCH', message: 'POM coordinates do not match upload path' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Store the artifact
|
||||||
|
await this.storage.putMavenArtifact(groupId, artifactId, version, filename, data);
|
||||||
|
|
||||||
|
// Generate and store checksums
|
||||||
|
const checksums = await calculateChecksums(data);
|
||||||
|
await this.storeChecksums(groupId, artifactId, version, filename, checksums);
|
||||||
|
|
||||||
|
// Update maven-metadata.xml if this is a primary artifact (jar, pom, war)
|
||||||
|
if (['jar', 'pom', 'war', 'ear', 'aar'].includes(coordinate.extension)) {
|
||||||
|
await this.updateMetadata(groupId, artifactId, version);
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 201,
|
||||||
|
headers: {
|
||||||
|
'Location': `${this.registryUrl}/${gavToPath(groupId, artifactId, version)}/${filename}`,
|
||||||
|
},
|
||||||
|
body: { success: true, message: 'Artifact uploaded successfully' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private async deleteArtifact(
|
||||||
|
groupId: string,
|
||||||
|
artifactId: string,
|
||||||
|
version: string,
|
||||||
|
filename: string
|
||||||
|
): Promise<IResponse> {
|
||||||
|
const exists = await this.storage.mavenArtifactExists(groupId, artifactId, version, filename);
|
||||||
|
|
||||||
|
if (!exists) {
|
||||||
|
return {
|
||||||
|
status: 404,
|
||||||
|
headers: {},
|
||||||
|
body: { error: 'NOT_FOUND', message: 'Artifact not found' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
await this.storage.deleteMavenArtifact(groupId, artifactId, version, filename);
|
||||||
|
|
||||||
|
// Also delete checksums
|
||||||
|
for (const ext of ['md5', 'sha1', 'sha256', 'sha512']) {
|
||||||
|
const checksumFile = `${filename}.${ext}`;
|
||||||
|
const checksumExists = await this.storage.mavenArtifactExists(groupId, artifactId, version, checksumFile);
|
||||||
|
if (checksumExists) {
|
||||||
|
await this.storage.deleteMavenArtifact(groupId, artifactId, version, checksumFile);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 204,
|
||||||
|
headers: {},
|
||||||
|
body: null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// CHECKSUM OPERATIONS
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
private async getChecksum(
|
||||||
|
groupId: string,
|
||||||
|
artifactId: string,
|
||||||
|
version: string,
|
||||||
|
coordinate: IMavenCoordinate,
|
||||||
|
fullPath: string
|
||||||
|
): Promise<IResponse> {
|
||||||
|
// Extract the filename from the full path (last component)
|
||||||
|
// The fullPath might be something like /com/example/test/test-artifact/1.0.0/test-artifact-1.0.0.jar.md5
|
||||||
|
const pathParts = fullPath.split('/');
|
||||||
|
const checksumFilename = pathParts[pathParts.length - 1];
|
||||||
|
|
||||||
|
const data = await this.storage.getMavenArtifact(groupId, artifactId, version, checksumFilename);
|
||||||
|
|
||||||
|
if (!data) {
|
||||||
|
return {
|
||||||
|
status: 404,
|
||||||
|
headers: {},
|
||||||
|
body: { error: 'NOT_FOUND', message: 'Checksum not found' },
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 200,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'text/plain',
|
||||||
|
'Content-Length': data.length.toString(),
|
||||||
|
},
|
||||||
|
body: data,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private async storeChecksums(
|
||||||
|
groupId: string,
|
||||||
|
artifactId: string,
|
||||||
|
version: string,
|
||||||
|
filename: string,
|
||||||
|
checksums: IChecksums
|
||||||
|
): Promise<void> {
|
||||||
|
// Store each checksum as a separate file
|
||||||
|
await this.storage.putMavenArtifact(
|
||||||
|
groupId,
|
||||||
|
artifactId,
|
||||||
|
version,
|
||||||
|
`${filename}.md5`,
|
||||||
|
Buffer.from(checksums.md5, 'utf-8')
|
||||||
|
);
|
||||||
|
|
||||||
|
await this.storage.putMavenArtifact(
|
||||||
|
groupId,
|
||||||
|
artifactId,
|
||||||
|
version,
|
||||||
|
`${filename}.sha1`,
|
||||||
|
Buffer.from(checksums.sha1, 'utf-8')
|
||||||
|
);
|
||||||
|
|
||||||
|
if (checksums.sha256) {
|
||||||
|
await this.storage.putMavenArtifact(
|
||||||
|
groupId,
|
||||||
|
artifactId,
|
||||||
|
version,
|
||||||
|
`${filename}.sha256`,
|
||||||
|
Buffer.from(checksums.sha256, 'utf-8')
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (checksums.sha512) {
|
||||||
|
await this.storage.putMavenArtifact(
|
||||||
|
groupId,
|
||||||
|
artifactId,
|
||||||
|
version,
|
||||||
|
`${filename}.sha512`,
|
||||||
|
Buffer.from(checksums.sha512, 'utf-8')
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// METADATA OPERATIONS
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
private async getMetadata(groupId: string, artifactId: string): Promise<IResponse> {
|
||||||
|
const metadataBuffer = await this.storage.getMavenMetadata(groupId, artifactId);
|
||||||
|
|
||||||
|
if (!metadataBuffer) {
|
||||||
|
// Generate empty metadata if none exists
|
||||||
|
const emptyMetadata: IMavenMetadata = {
|
||||||
|
groupId,
|
||||||
|
artifactId,
|
||||||
|
versioning: {
|
||||||
|
versions: [],
|
||||||
|
lastUpdated: formatMavenTimestamp(new Date()),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const xml = generateMetadataXml(emptyMetadata);
|
||||||
|
return {
|
||||||
|
status: 200,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/xml',
|
||||||
|
'Content-Length': xml.length.toString(),
|
||||||
|
},
|
||||||
|
body: Buffer.from(xml, 'utf-8'),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
status: 200,
|
||||||
|
headers: {
|
||||||
|
'Content-Type': 'application/xml',
|
||||||
|
'Content-Length': metadataBuffer.length.toString(),
|
||||||
|
},
|
||||||
|
body: metadataBuffer,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
private async updateMetadata(
|
||||||
|
groupId: string,
|
||||||
|
artifactId: string,
|
||||||
|
newVersion: string
|
||||||
|
): Promise<void> {
|
||||||
|
// Get existing metadata or create new
|
||||||
|
const existingBuffer = await this.storage.getMavenMetadata(groupId, artifactId);
|
||||||
|
let metadata: IMavenMetadata;
|
||||||
|
|
||||||
|
if (existingBuffer) {
|
||||||
|
const parsed = parseMetadataXml(existingBuffer.toString('utf-8'));
|
||||||
|
if (parsed) {
|
||||||
|
metadata = parsed;
|
||||||
|
} else {
|
||||||
|
// Create new if parsing failed
|
||||||
|
metadata = {
|
||||||
|
groupId,
|
||||||
|
artifactId,
|
||||||
|
versioning: {
|
||||||
|
versions: [],
|
||||||
|
lastUpdated: formatMavenTimestamp(new Date()),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
metadata = {
|
||||||
|
groupId,
|
||||||
|
artifactId,
|
||||||
|
versioning: {
|
||||||
|
versions: [],
|
||||||
|
lastUpdated: formatMavenTimestamp(new Date()),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add new version if not already present
|
||||||
|
if (!metadata.versioning.versions.includes(newVersion)) {
|
||||||
|
metadata.versioning.versions.push(newVersion);
|
||||||
|
metadata.versioning.versions.sort(); // Sort versions
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update latest and release
|
||||||
|
const versions = metadata.versioning.versions;
|
||||||
|
metadata.versioning.latest = versions[versions.length - 1];
|
||||||
|
|
||||||
|
// Release is the latest non-SNAPSHOT version
|
||||||
|
const releaseVersions = versions.filter(v => !isSnapshot(v));
|
||||||
|
if (releaseVersions.length > 0) {
|
||||||
|
metadata.versioning.release = releaseVersions[releaseVersions.length - 1];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update timestamp
|
||||||
|
metadata.versioning.lastUpdated = formatMavenTimestamp(new Date());
|
||||||
|
|
||||||
|
// Generate and store XML
|
||||||
|
const xml = generateMetadataXml(metadata);
|
||||||
|
await this.storage.putMavenMetadata(groupId, artifactId, Buffer.from(xml, 'utf-8'));
|
||||||
|
|
||||||
|
// Note: Checksums for maven-metadata.xml are optional and not critical
|
||||||
|
// They would need special handling since metadata uses a different storage path
|
||||||
|
}
|
||||||
|
|
||||||
|
// ========================================================================
|
||||||
|
// UTILITY METHODS
|
||||||
|
// ========================================================================
|
||||||
|
|
||||||
|
private getContentType(extension: string): string {
|
||||||
|
const contentTypes: Record<string, string> = {
|
||||||
|
'jar': 'application/java-archive',
|
||||||
|
'war': 'application/java-archive',
|
||||||
|
'ear': 'application/java-archive',
|
||||||
|
'aar': 'application/java-archive',
|
||||||
|
'pom': 'application/xml',
|
||||||
|
'xml': 'application/xml',
|
||||||
|
'md5': 'text/plain',
|
||||||
|
'sha1': 'text/plain',
|
||||||
|
'sha256': 'text/plain',
|
||||||
|
'sha512': 'text/plain',
|
||||||
|
};
|
||||||
|
|
||||||
|
return contentTypes[extension] || 'application/octet-stream';
|
||||||
|
}
|
||||||
|
}
|
||||||
346
ts/maven/helpers.maven.ts
Normal file
346
ts/maven/helpers.maven.ts
Normal file
@@ -0,0 +1,346 @@
|
|||||||
|
/**
|
||||||
|
* Maven helper utilities
|
||||||
|
* Path conversion, XML generation, checksum calculation
|
||||||
|
*/
|
||||||
|
|
||||||
|
import * as plugins from '../plugins.js';
|
||||||
|
import type {
|
||||||
|
IMavenCoordinate,
|
||||||
|
IMavenMetadata,
|
||||||
|
IChecksums,
|
||||||
|
IMavenPom,
|
||||||
|
} from './interfaces.maven.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert Maven GAV coordinates to storage path
|
||||||
|
* Example: com.example:my-lib:1.0.0 → com/example/my-lib/1.0.0
|
||||||
|
*/
|
||||||
|
export function gavToPath(
|
||||||
|
groupId: string,
|
||||||
|
artifactId: string,
|
||||||
|
version?: string
|
||||||
|
): string {
|
||||||
|
const groupPath = groupId.replace(/\./g, '/');
|
||||||
|
if (version) {
|
||||||
|
return `${groupPath}/${artifactId}/${version}`;
|
||||||
|
}
|
||||||
|
return `${groupPath}/${artifactId}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse Maven path to GAV coordinates
|
||||||
|
* Example: com/example/my-lib/1.0.0/my-lib-1.0.0.jar → {groupId, artifactId, version, ...}
|
||||||
|
*/
|
||||||
|
export function pathToGAV(path: string): IMavenCoordinate | null {
|
||||||
|
// Remove leading slash if present
|
||||||
|
const cleanPath = path.startsWith('/') ? path.substring(1) : path;
|
||||||
|
|
||||||
|
// Split path into parts
|
||||||
|
const parts = cleanPath.split('/');
|
||||||
|
if (parts.length < 4) {
|
||||||
|
return null; // Not a valid artifact path
|
||||||
|
}
|
||||||
|
|
||||||
|
// Last part is filename
|
||||||
|
const filename = parts[parts.length - 1];
|
||||||
|
const version = parts[parts.length - 2];
|
||||||
|
const artifactId = parts[parts.length - 3];
|
||||||
|
const groupId = parts.slice(0, -3).join('.');
|
||||||
|
|
||||||
|
// Parse filename to extract classifier and extension
|
||||||
|
const parsed = parseFilename(filename, artifactId, version);
|
||||||
|
if (!parsed) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
groupId,
|
||||||
|
artifactId,
|
||||||
|
version,
|
||||||
|
classifier: parsed.classifier,
|
||||||
|
extension: parsed.extension,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse Maven artifact filename
|
||||||
|
* Example: my-lib-1.0.0-sources.jar → {classifier: 'sources', extension: 'jar'}
|
||||||
|
* Example: my-lib-1.0.0.jar.md5 → {extension: 'md5'}
|
||||||
|
*/
|
||||||
|
export function parseFilename(
|
||||||
|
filename: string,
|
||||||
|
artifactId: string,
|
||||||
|
version: string
|
||||||
|
): { classifier?: string; extension: string } | null {
|
||||||
|
// Expected format: {artifactId}-{version}[-{classifier}].{extension}[.checksum]
|
||||||
|
const prefix = `${artifactId}-${version}`;
|
||||||
|
|
||||||
|
if (!filename.startsWith(prefix)) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
let remainder = filename.substring(prefix.length);
|
||||||
|
|
||||||
|
// Check if this is a checksum file (double extension like .jar.md5)
|
||||||
|
const checksumExtensions = ['md5', 'sha1', 'sha256', 'sha512'];
|
||||||
|
const lastDotIndex = remainder.lastIndexOf('.');
|
||||||
|
if (lastDotIndex !== -1) {
|
||||||
|
const possibleChecksum = remainder.substring(lastDotIndex + 1);
|
||||||
|
if (checksumExtensions.includes(possibleChecksum)) {
|
||||||
|
// This is a checksum file - just return the checksum extension
|
||||||
|
// The base artifact extension doesn't matter for checksum retrieval
|
||||||
|
return { extension: possibleChecksum };
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Regular artifact file parsing
|
||||||
|
const dotIndex = remainder.lastIndexOf('.');
|
||||||
|
if (dotIndex === -1) {
|
||||||
|
return null; // No extension
|
||||||
|
}
|
||||||
|
|
||||||
|
const extension = remainder.substring(dotIndex + 1);
|
||||||
|
const classifierPart = remainder.substring(0, dotIndex);
|
||||||
|
|
||||||
|
if (classifierPart.length === 0) {
|
||||||
|
// No classifier
|
||||||
|
return { extension };
|
||||||
|
}
|
||||||
|
|
||||||
|
if (classifierPart.startsWith('-')) {
|
||||||
|
// Has classifier
|
||||||
|
const classifier = classifierPart.substring(1);
|
||||||
|
return { classifier, extension };
|
||||||
|
}
|
||||||
|
|
||||||
|
return null; // Invalid format
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Build Maven artifact filename
|
||||||
|
* Example: {artifactId: 'my-lib', version: '1.0.0', classifier: 'sources', extension: 'jar'}
|
||||||
|
* → 'my-lib-1.0.0-sources.jar'
|
||||||
|
*/
|
||||||
|
export function buildFilename(coordinate: IMavenCoordinate): string {
|
||||||
|
const { artifactId, version, classifier, extension } = coordinate;
|
||||||
|
|
||||||
|
let filename = `${artifactId}-${version}`;
|
||||||
|
if (classifier) {
|
||||||
|
filename += `-${classifier}`;
|
||||||
|
}
|
||||||
|
filename += `.${extension}`;
|
||||||
|
|
||||||
|
return filename;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Calculate checksums for Maven artifact
|
||||||
|
* Returns MD5, SHA-1, SHA-256, SHA-512
|
||||||
|
*/
|
||||||
|
export async function calculateChecksums(data: Buffer): Promise<IChecksums> {
|
||||||
|
const crypto = await import('crypto');
|
||||||
|
|
||||||
|
return {
|
||||||
|
md5: crypto.createHash('md5').update(data).digest('hex'),
|
||||||
|
sha1: crypto.createHash('sha1').update(data).digest('hex'),
|
||||||
|
sha256: crypto.createHash('sha256').update(data).digest('hex'),
|
||||||
|
sha512: crypto.createHash('sha512').update(data).digest('hex'),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Generate maven-metadata.xml from metadata object
|
||||||
|
*/
|
||||||
|
export function generateMetadataXml(metadata: IMavenMetadata): string {
|
||||||
|
const { groupId, artifactId, versioning } = metadata;
|
||||||
|
const { latest, release, versions, lastUpdated, snapshot, snapshotVersions } = versioning;
|
||||||
|
|
||||||
|
let xml = '<?xml version="1.0" encoding="UTF-8"?>\n';
|
||||||
|
xml += '<metadata>\n';
|
||||||
|
xml += ` <groupId>${escapeXml(groupId)}</groupId>\n`;
|
||||||
|
xml += ` <artifactId>${escapeXml(artifactId)}</artifactId>\n`;
|
||||||
|
|
||||||
|
// Add version if SNAPSHOT
|
||||||
|
if (snapshot) {
|
||||||
|
const snapshotVersion = versions[versions.length - 1]; // Assume last version is the SNAPSHOT
|
||||||
|
xml += ` <version>${escapeXml(snapshotVersion)}</version>\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
xml += ' <versioning>\n';
|
||||||
|
|
||||||
|
if (latest) {
|
||||||
|
xml += ` <latest>${escapeXml(latest)}</latest>\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (release) {
|
||||||
|
xml += ` <release>${escapeXml(release)}</release>\n`;
|
||||||
|
}
|
||||||
|
|
||||||
|
xml += ' <versions>\n';
|
||||||
|
for (const version of versions) {
|
||||||
|
xml += ` <version>${escapeXml(version)}</version>\n`;
|
||||||
|
}
|
||||||
|
xml += ' </versions>\n';
|
||||||
|
|
||||||
|
xml += ` <lastUpdated>${lastUpdated}</lastUpdated>\n`;
|
||||||
|
|
||||||
|
// Add SNAPSHOT info if present
|
||||||
|
if (snapshot) {
|
||||||
|
xml += ' <snapshot>\n';
|
||||||
|
xml += ` <timestamp>${escapeXml(snapshot.timestamp)}</timestamp>\n`;
|
||||||
|
xml += ` <buildNumber>${snapshot.buildNumber}</buildNumber>\n`;
|
||||||
|
xml += ' </snapshot>\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add SNAPSHOT versions if present
|
||||||
|
if (snapshotVersions && snapshotVersions.length > 0) {
|
||||||
|
xml += ' <snapshotVersions>\n';
|
||||||
|
for (const sv of snapshotVersions) {
|
||||||
|
xml += ' <snapshotVersion>\n';
|
||||||
|
if (sv.classifier) {
|
||||||
|
xml += ` <classifier>${escapeXml(sv.classifier)}</classifier>\n`;
|
||||||
|
}
|
||||||
|
xml += ` <extension>${escapeXml(sv.extension)}</extension>\n`;
|
||||||
|
xml += ` <value>${escapeXml(sv.value)}</value>\n`;
|
||||||
|
xml += ` <updated>${sv.updated}</updated>\n`;
|
||||||
|
xml += ' </snapshotVersion>\n';
|
||||||
|
}
|
||||||
|
xml += ' </snapshotVersions>\n';
|
||||||
|
}
|
||||||
|
|
||||||
|
xml += ' </versioning>\n';
|
||||||
|
xml += '</metadata>\n';
|
||||||
|
|
||||||
|
return xml;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Parse maven-metadata.xml to metadata object
|
||||||
|
* Basic XML parsing for Maven metadata
|
||||||
|
*/
|
||||||
|
export function parseMetadataXml(xml: string): IMavenMetadata | null {
|
||||||
|
try {
|
||||||
|
// Simple regex-based parsing (for basic metadata)
|
||||||
|
// In production, use a proper XML parser
|
||||||
|
|
||||||
|
const groupIdMatch = xml.match(/<groupId>([^<]+)<\/groupId>/);
|
||||||
|
const artifactIdMatch = xml.match(/<artifactId>([^<]+)<\/artifactId>/);
|
||||||
|
const latestMatch = xml.match(/<latest>([^<]+)<\/latest>/);
|
||||||
|
const releaseMatch = xml.match(/<release>([^<]+)<\/release>/);
|
||||||
|
const lastUpdatedMatch = xml.match(/<lastUpdated>([^<]+)<\/lastUpdated>/);
|
||||||
|
|
||||||
|
if (!groupIdMatch || !artifactIdMatch) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse versions
|
||||||
|
const versionsMatch = xml.match(/<versions>([\s\S]*?)<\/versions>/);
|
||||||
|
const versions: string[] = [];
|
||||||
|
if (versionsMatch) {
|
||||||
|
const versionMatches = versionsMatch[1].matchAll(/<version>([^<]+)<\/version>/g);
|
||||||
|
for (const match of versionMatches) {
|
||||||
|
versions.push(match[1]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
groupId: groupIdMatch[1],
|
||||||
|
artifactId: artifactIdMatch[1],
|
||||||
|
versioning: {
|
||||||
|
latest: latestMatch ? latestMatch[1] : undefined,
|
||||||
|
release: releaseMatch ? releaseMatch[1] : undefined,
|
||||||
|
versions,
|
||||||
|
lastUpdated: lastUpdatedMatch ? lastUpdatedMatch[1] : formatMavenTimestamp(new Date()),
|
||||||
|
},
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Escape XML special characters
|
||||||
|
*/
|
||||||
|
function escapeXml(str: string): string {
|
||||||
|
return str
|
||||||
|
.replace(/&/g, '&')
|
||||||
|
.replace(/</g, '<')
|
||||||
|
.replace(/>/g, '>')
|
||||||
|
.replace(/"/g, '"')
|
||||||
|
.replace(/'/g, ''');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format timestamp in Maven format: yyyyMMddHHmmss
|
||||||
|
*/
|
||||||
|
export function formatMavenTimestamp(date: Date): string {
|
||||||
|
const year = date.getUTCFullYear();
|
||||||
|
const month = String(date.getUTCMonth() + 1).padStart(2, '0');
|
||||||
|
const day = String(date.getUTCDate()).padStart(2, '0');
|
||||||
|
const hours = String(date.getUTCHours()).padStart(2, '0');
|
||||||
|
const minutes = String(date.getUTCMinutes()).padStart(2, '0');
|
||||||
|
const seconds = String(date.getUTCSeconds()).padStart(2, '0');
|
||||||
|
|
||||||
|
return `${year}${month}${day}${hours}${minutes}${seconds}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Format SNAPSHOT timestamp: yyyyMMdd.HHmmss
|
||||||
|
*/
|
||||||
|
export function formatSnapshotTimestamp(date: Date): string {
|
||||||
|
const year = date.getUTCFullYear();
|
||||||
|
const month = String(date.getUTCMonth() + 1).padStart(2, '0');
|
||||||
|
const day = String(date.getUTCDate()).padStart(2, '0');
|
||||||
|
const hours = String(date.getUTCHours()).padStart(2, '0');
|
||||||
|
const minutes = String(date.getUTCMinutes()).padStart(2, '0');
|
||||||
|
const seconds = String(date.getUTCSeconds()).padStart(2, '0');
|
||||||
|
|
||||||
|
return `${year}${month}${day}.${hours}${minutes}${seconds}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if version is a SNAPSHOT
|
||||||
|
*/
|
||||||
|
export function isSnapshot(version: string): boolean {
|
||||||
|
return version.endsWith('-SNAPSHOT');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Validate POM basic structure
|
||||||
|
*/
|
||||||
|
export function validatePom(pomXml: string): boolean {
|
||||||
|
try {
|
||||||
|
// Basic validation - check for required fields
|
||||||
|
return (
|
||||||
|
pomXml.includes('<groupId>') &&
|
||||||
|
pomXml.includes('<artifactId>') &&
|
||||||
|
pomXml.includes('<version>') &&
|
||||||
|
pomXml.includes('<modelVersion>')
|
||||||
|
);
|
||||||
|
} catch (error) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Extract GAV from POM XML
|
||||||
|
*/
|
||||||
|
export function extractGAVFromPom(pomXml: string): { groupId: string; artifactId: string; version: string } | null {
|
||||||
|
try {
|
||||||
|
const groupIdMatch = pomXml.match(/<groupId>([^<]+)<\/groupId>/);
|
||||||
|
const artifactIdMatch = pomXml.match(/<artifactId>([^<]+)<\/artifactId>/);
|
||||||
|
const versionMatch = pomXml.match(/<version>([^<]+)<\/version>/);
|
||||||
|
|
||||||
|
if (groupIdMatch && artifactIdMatch && versionMatch) {
|
||||||
|
return {
|
||||||
|
groupId: groupIdMatch[1],
|
||||||
|
artifactId: artifactIdMatch[1],
|
||||||
|
version: versionMatch[1],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
} catch (error) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}
|
||||||
7
ts/maven/index.ts
Normal file
7
ts/maven/index.ts
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
/**
|
||||||
|
* Maven Registry module exports
|
||||||
|
*/
|
||||||
|
|
||||||
|
export { MavenRegistry } from './classes.mavenregistry.js';
|
||||||
|
export * from './interfaces.maven.js';
|
||||||
|
export * from './helpers.maven.js';
|
||||||
127
ts/maven/interfaces.maven.ts
Normal file
127
ts/maven/interfaces.maven.ts
Normal file
@@ -0,0 +1,127 @@
|
|||||||
|
/**
|
||||||
|
* Maven registry type definitions
|
||||||
|
* Supports Maven repository protocol for Java artifacts
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Maven coordinate system (GAV + optional classifier)
|
||||||
|
* Example: com.example:my-library:1.0.0:sources:jar
|
||||||
|
*/
|
||||||
|
export interface IMavenCoordinate {
|
||||||
|
groupId: string; // e.g., "com.example.myapp"
|
||||||
|
artifactId: string; // e.g., "my-library"
|
||||||
|
version: string; // e.g., "1.0.0" or "1.0-SNAPSHOT"
|
||||||
|
classifier?: string; // e.g., "sources", "javadoc"
|
||||||
|
extension: string; // e.g., "jar", "war", "pom"
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Maven metadata (maven-metadata.xml) structure
|
||||||
|
* Contains version list and latest/release information
|
||||||
|
*/
|
||||||
|
export interface IMavenMetadata {
|
||||||
|
groupId: string;
|
||||||
|
artifactId: string;
|
||||||
|
versioning: IMavenVersioning;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Maven versioning information
|
||||||
|
*/
|
||||||
|
export interface IMavenVersioning {
|
||||||
|
latest?: string; // Latest version (including SNAPSHOTs)
|
||||||
|
release?: string; // Latest release version (excluding SNAPSHOTs)
|
||||||
|
versions: string[]; // List of all versions
|
||||||
|
lastUpdated: string; // Format: yyyyMMddHHmmss
|
||||||
|
snapshot?: IMavenSnapshot; // For SNAPSHOT versions
|
||||||
|
snapshotVersions?: IMavenSnapshotVersion[]; // For SNAPSHOT builds
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SNAPSHOT build information
|
||||||
|
*/
|
||||||
|
export interface IMavenSnapshot {
|
||||||
|
timestamp: string; // Format: yyyyMMdd.HHmmss
|
||||||
|
buildNumber: number; // Incremental build number
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* SNAPSHOT version entry
|
||||||
|
*/
|
||||||
|
export interface IMavenSnapshotVersion {
|
||||||
|
classifier?: string;
|
||||||
|
extension: string;
|
||||||
|
value: string; // Timestamped version
|
||||||
|
updated: string; // Format: yyyyMMddHHmmss
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Checksums for Maven artifacts
|
||||||
|
* Maven requires separate checksum files for each artifact
|
||||||
|
*/
|
||||||
|
export interface IChecksums {
|
||||||
|
md5: string; // MD5 hash
|
||||||
|
sha1: string; // SHA-1 hash (required)
|
||||||
|
sha256?: string; // SHA-256 hash (optional)
|
||||||
|
sha512?: string; // SHA-512 hash (optional)
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Maven artifact file information
|
||||||
|
*/
|
||||||
|
export interface IMavenArtifactFile {
|
||||||
|
filename: string; // Full filename with extension
|
||||||
|
data: Buffer; // File content
|
||||||
|
coordinate: IMavenCoordinate; // Parsed GAV coordinates
|
||||||
|
checksums?: IChecksums; // Calculated checksums
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Maven upload request
|
||||||
|
* Contains all files for a single version (JAR, POM, sources, etc.)
|
||||||
|
*/
|
||||||
|
export interface IMavenUploadRequest {
|
||||||
|
groupId: string;
|
||||||
|
artifactId: string;
|
||||||
|
version: string;
|
||||||
|
files: IMavenArtifactFile[];
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Maven protocol configuration
|
||||||
|
*/
|
||||||
|
export interface IMavenProtocolConfig {
|
||||||
|
enabled: boolean;
|
||||||
|
basePath: string; // Default: '/maven'
|
||||||
|
features?: {
|
||||||
|
snapshots?: boolean; // Support SNAPSHOT versions (default: true)
|
||||||
|
checksums?: boolean; // Auto-generate checksums (default: true)
|
||||||
|
metadata?: boolean; // Auto-generate maven-metadata.xml (default: true)
|
||||||
|
allowedExtensions?: string[]; // Allowed file extensions (default: jar, war, pom, etc.)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Maven POM (Project Object Model) minimal structure
|
||||||
|
* Only essential fields for validation
|
||||||
|
*/
|
||||||
|
export interface IMavenPom {
|
||||||
|
modelVersion: string; // Always "4.0.0"
|
||||||
|
groupId: string;
|
||||||
|
artifactId: string;
|
||||||
|
version: string;
|
||||||
|
packaging?: string; // jar, war, pom, etc.
|
||||||
|
name?: string;
|
||||||
|
description?: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Maven repository search result
|
||||||
|
*/
|
||||||
|
export interface IMavenSearchResult {
|
||||||
|
groupId: string;
|
||||||
|
artifactId: string;
|
||||||
|
latestVersion: string;
|
||||||
|
versions: string[];
|
||||||
|
lastUpdated: string;
|
||||||
|
}
|
||||||
@@ -284,12 +284,15 @@ export class OciRegistry extends BaseRegistry {
|
|||||||
private async getManifest(
|
private async getManifest(
|
||||||
repository: string,
|
repository: string,
|
||||||
reference: string,
|
reference: string,
|
||||||
token: IAuthToken | null
|
token: IAuthToken | null,
|
||||||
|
headers?: Record<string, string>
|
||||||
): Promise<IResponse> {
|
): Promise<IResponse> {
|
||||||
if (!await this.checkPermission(token, repository, 'pull')) {
|
if (!await this.checkPermission(token, repository, 'pull')) {
|
||||||
return {
|
return {
|
||||||
status: 401,
|
status: 401,
|
||||||
headers: {},
|
headers: {
|
||||||
|
'WWW-Authenticate': `Bearer realm="${this.basePath}/v2/token",service="registry",scope="repository:${repository}:pull"`,
|
||||||
|
},
|
||||||
body: this.createError('DENIED', 'Insufficient permissions'),
|
body: this.createError('DENIED', 'Insufficient permissions'),
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
@@ -402,11 +405,12 @@ export class OciRegistry extends BaseRegistry {
|
|||||||
// Store manifest by digest
|
// Store manifest by digest
|
||||||
await this.storage.putOciManifest(repository, digest, manifestData, contentType);
|
await this.storage.putOciManifest(repository, digest, manifestData, contentType);
|
||||||
|
|
||||||
// If reference is a tag (not a digest), create tag reference
|
// If reference is a tag (not a digest), update tags mapping
|
||||||
if (!reference.startsWith('sha256:')) {
|
if (!reference.startsWith('sha256:')) {
|
||||||
// Store tag -> digest mapping
|
const tags = await this.getTagsData(repository);
|
||||||
const tagPath = `oci/repositories/${repository}/tags/${reference}`;
|
tags[reference] = digest;
|
||||||
await this.storage.putObject(tagPath, Buffer.from(digest, 'utf-8'));
|
const tagsPath = `oci/tags/${repository}/tags.json`;
|
||||||
|
await this.storage.putObject(tagsPath, Buffer.from(JSON.stringify(tags), 'utf-8'));
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
|||||||
Reference in New Issue
Block a user