Add tests for authentication and security features
- Implement unit tests for password handling in `auth_test.ts`, covering bcrypt and legacy password hashes. - Create a fake database for user management to facilitate testing of the `AdminHandler`. - Validate JWT-based identity verification against database records. - Introduce tests for credential encryption and registry management in `security_test.ts`. - Ensure registry passwords are securely stored and can be decrypted correctly, including legacy support. - Add utility functions for password hashing and verification in `auth.ts`.
This commit is contained in:
@@ -43,6 +43,14 @@ const IV_LENGTH = 12;
|
||||
const SALT_LENGTH = 32;
|
||||
const PBKDF2_ITERATIONS = 100000;
|
||||
|
||||
interface IS3ConnectionInfo {
|
||||
endpoint: string;
|
||||
accessKey: string;
|
||||
secretKey: string;
|
||||
bucket: string;
|
||||
region: string;
|
||||
}
|
||||
|
||||
export class BackupManager {
|
||||
private oneboxRef: Onebox;
|
||||
public archive: plugins.ContainerArchive | null = null;
|
||||
@@ -519,7 +527,8 @@ export class BackupManager {
|
||||
* Get backup password from settings
|
||||
*/
|
||||
private getBackupPassword(): string | null {
|
||||
return this.oneboxRef.database.getSetting('backup_encryption_password');
|
||||
return this.oneboxRef.database.getSetting('backup_encryption_password')
|
||||
|| this.oneboxRef.database.getSetting('backupPassword');
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -860,47 +869,48 @@ export class BackupManager {
|
||||
const bucketDir = `${dataDir}/${resource.resourceName}`;
|
||||
await Deno.mkdir(bucketDir, { recursive: true });
|
||||
|
||||
const endpoint = credentials.endpoint || credentials.S3_ENDPOINT;
|
||||
const accessKey = credentials.accessKey || credentials.S3_ACCESS_KEY;
|
||||
const secretKey = credentials.secretKey || credentials.S3_SECRET_KEY;
|
||||
const bucket = credentials.bucket || credentials.S3_BUCKET;
|
||||
const s3Info = this.getS3ConnectionInfo(credentials);
|
||||
const s3Client = this.createS3Client(s3Info);
|
||||
let objectCount = 0;
|
||||
let continuationToken: string | undefined;
|
||||
|
||||
if (!endpoint || !accessKey || !secretKey || !bucket) {
|
||||
throw new Error('MinIO credentials incomplete');
|
||||
}
|
||||
do {
|
||||
const response = await s3Client.send(
|
||||
new plugins.awsS3.ListObjectsV2Command({
|
||||
Bucket: s3Info.bucket,
|
||||
ContinuationToken: continuationToken,
|
||||
}),
|
||||
);
|
||||
|
||||
const s3Client = new plugins.smartstorage.SmartStorage({
|
||||
endpoint,
|
||||
accessKey,
|
||||
secretKey,
|
||||
bucket,
|
||||
});
|
||||
for (const object of response.Contents || []) {
|
||||
const objectKey = object.Key;
|
||||
if (!objectKey) continue;
|
||||
|
||||
await s3Client.start();
|
||||
const objectResponse = await s3Client.send(
|
||||
new plugins.awsS3.GetObjectCommand({
|
||||
Bucket: s3Info.bucket,
|
||||
Key: objectKey,
|
||||
}),
|
||||
);
|
||||
|
||||
const objects = await s3Client.listObjects();
|
||||
if (!objectResponse.Body) continue;
|
||||
|
||||
for (const obj of objects) {
|
||||
const objectKey = obj.Key;
|
||||
if (!objectKey) continue;
|
||||
|
||||
const objectData = await s3Client.getObject(objectKey);
|
||||
if (objectData) {
|
||||
const objectPath = `${bucketDir}/${objectKey}`;
|
||||
const parentDir = plugins.path.dirname(objectPath);
|
||||
await Deno.mkdir(parentDir, { recursive: true });
|
||||
await Deno.writeFile(objectPath, objectData);
|
||||
await Deno.writeFile(objectPath, await objectResponse.Body.transformToByteArray());
|
||||
objectCount++;
|
||||
}
|
||||
}
|
||||
|
||||
await s3Client.stop();
|
||||
continuationToken = response.IsTruncated ? response.NextContinuationToken : undefined;
|
||||
} while (continuationToken);
|
||||
|
||||
await Deno.writeTextFile(
|
||||
`${bucketDir}/_metadata.json`,
|
||||
JSON.stringify({ bucket, objectCount: objects.length }, null, 2)
|
||||
JSON.stringify({ bucket: s3Info.bucket, objectCount }, null, 2)
|
||||
);
|
||||
|
||||
logger.success(`MinIO bucket exported: ${resource.resourceName} (${objects.length} objects)`);
|
||||
logger.success(`MinIO bucket exported: ${resource.resourceName} (${objectCount} objects)`);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -1279,40 +1289,26 @@ export class BackupManager {
|
||||
|
||||
const bucketDir = `${dataDir}/${backupResourceName}`;
|
||||
|
||||
const endpoint = credentials.endpoint || credentials.S3_ENDPOINT;
|
||||
const accessKey = credentials.accessKey || credentials.S3_ACCESS_KEY;
|
||||
const secretKey = credentials.secretKey || credentials.S3_SECRET_KEY;
|
||||
const bucket = credentials.bucket || credentials.S3_BUCKET;
|
||||
|
||||
if (!endpoint || !accessKey || !secretKey || !bucket) {
|
||||
throw new Error('MinIO credentials incomplete');
|
||||
}
|
||||
|
||||
const s3Client = new plugins.smartstorage.SmartStorage({
|
||||
endpoint,
|
||||
accessKey,
|
||||
secretKey,
|
||||
bucket,
|
||||
});
|
||||
|
||||
await s3Client.start();
|
||||
const s3Info = this.getS3ConnectionInfo(credentials);
|
||||
const s3Client = this.createS3Client(s3Info);
|
||||
|
||||
let uploadedCount = 0;
|
||||
|
||||
for await (const entry of Deno.readDir(bucketDir)) {
|
||||
if (entry.name === '_metadata.json') continue;
|
||||
for await (const filePath of this.walkFiles(bucketDir)) {
|
||||
if (plugins.path.basename(filePath) === '_metadata.json') continue;
|
||||
|
||||
const filePath = `${bucketDir}/${entry.name}`;
|
||||
|
||||
if (entry.isFile) {
|
||||
const fileData = await Deno.readFile(filePath);
|
||||
await s3Client.putObject(entry.name, fileData);
|
||||
uploadedCount++;
|
||||
}
|
||||
const fileData = await Deno.readFile(filePath);
|
||||
const objectKey = plugins.path.relative(bucketDir, filePath).replaceAll('\\', '/');
|
||||
await s3Client.send(
|
||||
new plugins.awsS3.PutObjectCommand({
|
||||
Bucket: s3Info.bucket,
|
||||
Key: objectKey,
|
||||
Body: fileData,
|
||||
}),
|
||||
);
|
||||
uploadedCount++;
|
||||
}
|
||||
|
||||
await s3Client.stop();
|
||||
|
||||
logger.success(`MinIO bucket imported: ${resource.resourceName} (${uploadedCount} objects)`);
|
||||
}
|
||||
|
||||
@@ -1585,7 +1581,7 @@ export class BackupManager {
|
||||
return await crypto.subtle.deriveKey(
|
||||
{
|
||||
name: 'PBKDF2',
|
||||
salt,
|
||||
salt: this.toArrayBuffer(salt),
|
||||
iterations: PBKDF2_ITERATIONS,
|
||||
hash: 'SHA-256',
|
||||
},
|
||||
@@ -1600,8 +1596,54 @@ export class BackupManager {
|
||||
* Compute SHA-256 checksum
|
||||
*/
|
||||
private async computeChecksum(data: Uint8Array): Promise<string> {
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', data);
|
||||
const hashBuffer = await crypto.subtle.digest('SHA-256', this.toArrayBuffer(data));
|
||||
const hashArray = new Uint8Array(hashBuffer);
|
||||
return 'sha256:' + Array.from(hashArray).map((b) => b.toString(16).padStart(2, '0')).join('');
|
||||
}
|
||||
|
||||
private getS3ConnectionInfo(credentials: Record<string, string>): IS3ConnectionInfo {
|
||||
const endpoint = credentials.endpoint || credentials.S3_ENDPOINT;
|
||||
const accessKey = credentials.accessKey || credentials.S3_ACCESS_KEY;
|
||||
const secretKey = credentials.secretKey || credentials.S3_SECRET_KEY;
|
||||
const bucket = credentials.bucket || credentials.S3_BUCKET;
|
||||
|
||||
if (!endpoint || !accessKey || !secretKey || !bucket) {
|
||||
throw new Error('MinIO credentials incomplete');
|
||||
}
|
||||
|
||||
return {
|
||||
endpoint,
|
||||
accessKey,
|
||||
secretKey,
|
||||
bucket,
|
||||
region: credentials.region || credentials.AWS_REGION || 'us-east-1',
|
||||
};
|
||||
}
|
||||
|
||||
private createS3Client(s3Info: IS3ConnectionInfo) {
|
||||
return new plugins.awsS3.S3Client({
|
||||
endpoint: s3Info.endpoint,
|
||||
region: s3Info.region,
|
||||
forcePathStyle: true,
|
||||
credentials: {
|
||||
accessKeyId: s3Info.accessKey,
|
||||
secretAccessKey: s3Info.secretKey,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
private async *walkFiles(directory: string): AsyncGenerator<string> {
|
||||
for await (const entry of Deno.readDir(directory)) {
|
||||
const entryPath = plugins.path.join(directory, entry.name);
|
||||
if (entry.isDirectory) {
|
||||
yield* this.walkFiles(entryPath);
|
||||
} else if (entry.isFile) {
|
||||
yield entryPath;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private toArrayBuffer(data: Uint8Array): ArrayBuffer {
|
||||
return data.slice().buffer as ArrayBuffer;
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user