feat(auth): Implement HMAC-SHA256 OCI JWTs; enhance PyPI & RubyGems uploads and normalize responses
This commit is contained in:
@@ -1,5 +1,6 @@
|
||||
import * as qenv from '@push.rocks/qenv';
|
||||
import * as crypto from 'crypto';
|
||||
import * as smartarchive from '@push.rocks/smartarchive';
|
||||
import { SmartRegistry } from '../../ts/classes.smartregistry.js';
|
||||
import type { IRegistryConfig } from '../../ts/core/interfaces.core.js';
|
||||
|
||||
@@ -241,7 +242,7 @@ export function calculateMavenChecksums(data: Buffer) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a Composer package ZIP
|
||||
* Helper to create a Composer package ZIP using smartarchive
|
||||
*/
|
||||
export async function createComposerZip(
|
||||
vendorPackage: string,
|
||||
@@ -252,8 +253,7 @@ export async function createComposerZip(
|
||||
authors?: Array<{ name: string; email?: string }>;
|
||||
}
|
||||
): Promise<Buffer> {
|
||||
const AdmZip = (await import('adm-zip')).default;
|
||||
const zip = new AdmZip();
|
||||
const zipTools = new smartarchive.ZipTools();
|
||||
|
||||
const composerJson = {
|
||||
name: vendorPackage,
|
||||
@@ -272,9 +272,6 @@ export async function createComposerZip(
|
||||
},
|
||||
};
|
||||
|
||||
// Add composer.json
|
||||
zip.addFile('composer.json', Buffer.from(JSON.stringify(composerJson, null, 2), 'utf-8'));
|
||||
|
||||
// Add a test PHP file
|
||||
const [vendor, pkg] = vendorPackage.split('/');
|
||||
const namespace = `${vendor.charAt(0).toUpperCase() + vendor.slice(1)}\\${pkg.charAt(0).toUpperCase() + pkg.slice(1).replace(/-/g, '')}`;
|
||||
@@ -290,24 +287,33 @@ class TestClass
|
||||
}
|
||||
`;
|
||||
|
||||
zip.addFile('src/TestClass.php', Buffer.from(testPhpContent, 'utf-8'));
|
||||
const entries: smartarchive.IArchiveEntry[] = [
|
||||
{
|
||||
archivePath: 'composer.json',
|
||||
content: Buffer.from(JSON.stringify(composerJson, null, 2), 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: 'src/TestClass.php',
|
||||
content: Buffer.from(testPhpContent, 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: 'README.md',
|
||||
content: Buffer.from(`# ${vendorPackage}\n\nTest package`, 'utf-8'),
|
||||
},
|
||||
];
|
||||
|
||||
// Add README
|
||||
zip.addFile('README.md', Buffer.from(`# ${vendorPackage}\n\nTest package`, 'utf-8'));
|
||||
|
||||
return zip.toBuffer();
|
||||
return zipTools.createZip(entries);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a test Python wheel file (minimal ZIP structure)
|
||||
* Helper to create a test Python wheel file (minimal ZIP structure) using smartarchive
|
||||
*/
|
||||
export async function createPythonWheel(
|
||||
packageName: string,
|
||||
version: string,
|
||||
pyVersion: string = 'py3'
|
||||
): Promise<Buffer> {
|
||||
const AdmZip = (await import('adm-zip')).default;
|
||||
const zip = new AdmZip();
|
||||
const zipTools = new smartarchive.ZipTools();
|
||||
|
||||
const normalizedName = packageName.replace(/-/g, '_');
|
||||
const distInfoDir = `${normalizedName}-${version}.dist-info`;
|
||||
@@ -331,8 +337,6 @@ Description-Content-Type: text/markdown
|
||||
Test package for SmartRegistry
|
||||
`;
|
||||
|
||||
zip.addFile(`${distInfoDir}/METADATA`, Buffer.from(metadata, 'utf-8'));
|
||||
|
||||
// Create WHEEL file
|
||||
const wheelContent = `Wheel-Version: 1.0
|
||||
Generator: test 1.0.0
|
||||
@@ -340,14 +344,6 @@ Root-Is-Purelib: true
|
||||
Tag: ${pyVersion}-none-any
|
||||
`;
|
||||
|
||||
zip.addFile(`${distInfoDir}/WHEEL`, Buffer.from(wheelContent, 'utf-8'));
|
||||
|
||||
// Create RECORD file (empty for test)
|
||||
zip.addFile(`${distInfoDir}/RECORD`, Buffer.from('', 'utf-8'));
|
||||
|
||||
// Create top_level.txt
|
||||
zip.addFile(`${distInfoDir}/top_level.txt`, Buffer.from(normalizedName, 'utf-8'));
|
||||
|
||||
// Create a simple Python module
|
||||
const moduleContent = `"""${packageName} module"""
|
||||
|
||||
@@ -357,27 +353,44 @@ def hello():
|
||||
return "Hello from ${packageName}!"
|
||||
`;
|
||||
|
||||
zip.addFile(`${normalizedName}/__init__.py`, Buffer.from(moduleContent, 'utf-8'));
|
||||
const entries: smartarchive.IArchiveEntry[] = [
|
||||
{
|
||||
archivePath: `${distInfoDir}/METADATA`,
|
||||
content: Buffer.from(metadata, 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: `${distInfoDir}/WHEEL`,
|
||||
content: Buffer.from(wheelContent, 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: `${distInfoDir}/RECORD`,
|
||||
content: Buffer.from('', 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: `${distInfoDir}/top_level.txt`,
|
||||
content: Buffer.from(normalizedName, 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: `${normalizedName}/__init__.py`,
|
||||
content: Buffer.from(moduleContent, 'utf-8'),
|
||||
},
|
||||
];
|
||||
|
||||
return zip.toBuffer();
|
||||
return zipTools.createZip(entries);
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a test Python source distribution (sdist)
|
||||
* Helper to create a test Python source distribution (sdist) using smartarchive
|
||||
*/
|
||||
export async function createPythonSdist(
|
||||
packageName: string,
|
||||
version: string
|
||||
): Promise<Buffer> {
|
||||
const tar = await import('tar-stream');
|
||||
const zlib = await import('zlib');
|
||||
const { Readable } = await import('stream');
|
||||
const tarTools = new smartarchive.TarTools();
|
||||
|
||||
const normalizedName = packageName.replace(/-/g, '_');
|
||||
const dirPrefix = `${packageName}-${version}`;
|
||||
|
||||
const pack = tar.pack();
|
||||
|
||||
// PKG-INFO
|
||||
const pkgInfo = `Metadata-Version: 2.1
|
||||
Name: ${packageName}
|
||||
@@ -389,8 +402,6 @@ Author-email: test@example.com
|
||||
License: MIT
|
||||
`;
|
||||
|
||||
pack.entry({ name: `${dirPrefix}/PKG-INFO` }, pkgInfo);
|
||||
|
||||
// setup.py
|
||||
const setupPy = `from setuptools import setup, find_packages
|
||||
|
||||
@@ -402,8 +413,6 @@ setup(
|
||||
)
|
||||
`;
|
||||
|
||||
pack.entry({ name: `${dirPrefix}/setup.py` }, setupPy);
|
||||
|
||||
// Module file
|
||||
const moduleContent = `"""${packageName} module"""
|
||||
|
||||
@@ -413,20 +422,22 @@ def hello():
|
||||
return "Hello from ${packageName}!"
|
||||
`;
|
||||
|
||||
pack.entry({ name: `${dirPrefix}/${normalizedName}/__init__.py` }, moduleContent);
|
||||
const entries: smartarchive.IArchiveEntry[] = [
|
||||
{
|
||||
archivePath: `${dirPrefix}/PKG-INFO`,
|
||||
content: Buffer.from(pkgInfo, 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: `${dirPrefix}/setup.py`,
|
||||
content: Buffer.from(setupPy, 'utf-8'),
|
||||
},
|
||||
{
|
||||
archivePath: `${dirPrefix}/${normalizedName}/__init__.py`,
|
||||
content: Buffer.from(moduleContent, 'utf-8'),
|
||||
},
|
||||
];
|
||||
|
||||
pack.finalize();
|
||||
|
||||
// Convert to gzipped tar
|
||||
const chunks: Buffer[] = [];
|
||||
const gzip = zlib.createGzip();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
pack.pipe(gzip);
|
||||
gzip.on('data', (chunk) => chunks.push(chunk));
|
||||
gzip.on('end', () => resolve(Buffer.concat(chunks)));
|
||||
gzip.on('error', reject);
|
||||
});
|
||||
return tarTools.packFilesToTarGz(entries);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -441,17 +452,15 @@ export function calculatePypiHashes(data: Buffer) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Helper to create a test RubyGem file (minimal tar.gz structure)
|
||||
* Helper to create a test RubyGem file (minimal tar.gz structure) using smartarchive
|
||||
*/
|
||||
export async function createRubyGem(
|
||||
gemName: string,
|
||||
version: string,
|
||||
platform: string = 'ruby'
|
||||
): Promise<Buffer> {
|
||||
const tar = await import('tar-stream');
|
||||
const zlib = await import('zlib');
|
||||
|
||||
const pack = tar.pack();
|
||||
const tarTools = new smartarchive.TarTools();
|
||||
const gzipTools = new smartarchive.GzipTools();
|
||||
|
||||
// Create metadata.gz (simplified)
|
||||
const metadataYaml = `--- !ruby/object:Gem::Specification
|
||||
@@ -499,10 +508,9 @@ summary: Test gem for SmartRegistry
|
||||
test_files: []
|
||||
`;
|
||||
|
||||
pack.entry({ name: 'metadata.gz' }, zlib.gzipSync(Buffer.from(metadataYaml, 'utf-8')));
|
||||
const metadataGz = await gzipTools.compress(Buffer.from(metadataYaml, 'utf-8'));
|
||||
|
||||
// Create data.tar.gz (simplified)
|
||||
const dataPack = tar.pack();
|
||||
// Create data.tar.gz content
|
||||
const libContent = `# ${gemName}
|
||||
|
||||
module ${gemName.charAt(0).toUpperCase() + gemName.slice(1).replace(/-/g, '')}
|
||||
@@ -514,32 +522,28 @@ module ${gemName.charAt(0).toUpperCase() + gemName.slice(1).replace(/-/g, '')}
|
||||
end
|
||||
`;
|
||||
|
||||
dataPack.entry({ name: `lib/${gemName}.rb` }, libContent);
|
||||
dataPack.finalize();
|
||||
const dataEntries: smartarchive.IArchiveEntry[] = [
|
||||
{
|
||||
archivePath: `lib/${gemName}.rb`,
|
||||
content: Buffer.from(libContent, 'utf-8'),
|
||||
},
|
||||
];
|
||||
|
||||
const dataChunks: Buffer[] = [];
|
||||
const dataGzip = zlib.createGzip();
|
||||
dataPack.pipe(dataGzip);
|
||||
const dataTarGz = await tarTools.packFilesToTarGz(dataEntries);
|
||||
|
||||
await new Promise((resolve) => {
|
||||
dataGzip.on('data', (chunk) => dataChunks.push(chunk));
|
||||
dataGzip.on('end', resolve);
|
||||
});
|
||||
// Create the outer gem (tar.gz containing metadata.gz and data.tar.gz)
|
||||
const gemEntries: smartarchive.IArchiveEntry[] = [
|
||||
{
|
||||
archivePath: 'metadata.gz',
|
||||
content: metadataGz,
|
||||
},
|
||||
{
|
||||
archivePath: 'data.tar.gz',
|
||||
content: dataTarGz,
|
||||
},
|
||||
];
|
||||
|
||||
pack.entry({ name: 'data.tar.gz' }, Buffer.concat(dataChunks));
|
||||
|
||||
pack.finalize();
|
||||
|
||||
// Convert to gzipped tar
|
||||
const chunks: Buffer[] = [];
|
||||
const gzip = zlib.createGzip();
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
pack.pipe(gzip);
|
||||
gzip.on('data', (chunk) => chunks.push(chunk));
|
||||
gzip.on('end', () => resolve(Buffer.concat(chunks)));
|
||||
gzip.on('error', reject);
|
||||
});
|
||||
return tarTools.packFilesToTarGz(gemEntries);
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
Reference in New Issue
Block a user