feat(tests): fully implemented test suite
This commit is contained in:
416
test/suite/einvoice_corpus-validation/test.corp-10.regression.ts
Normal file
416
test/suite/einvoice_corpus-validation/test.corp-10.regression.ts
Normal file
@ -0,0 +1,416 @@
|
||||
import { tap, expect } from '@git.zone/tstest/tapbundle';
|
||||
import { EInvoice } from '../../../ts/index.js';
|
||||
import { InvoiceFormat, ValidationLevel } from '../../../ts/interfaces/common.js';
|
||||
import { CorpusLoader, PerformanceTracker } from '../../helpers/test-utils.js';
|
||||
import * as path from 'path';
|
||||
import * as fs from 'fs/promises';
|
||||
import * as crypto from 'crypto';
|
||||
|
||||
/**
|
||||
* Test ID: CORP-10
|
||||
* Test Description: Regression Testing
|
||||
* Priority: High
|
||||
*
|
||||
* This test ensures that processing results remain consistent across versions
|
||||
* by comparing current results with baseline snapshots.
|
||||
*/
|
||||
|
||||
tap.test('CORP-10: Regression Testing - should maintain consistent processing results', async (t) => {
|
||||
const baselinePath = path.join(process.cwd(), '.nogit', 'regression-baseline.json');
|
||||
const currentResultsPath = path.join(process.cwd(), '.nogit', 'regression-current.json');
|
||||
|
||||
// Load or create baseline
|
||||
let baseline: RegressionBaseline | null = null;
|
||||
try {
|
||||
const baselineData = await fs.readFile(baselinePath, 'utf-8');
|
||||
baseline = JSON.parse(baselineData);
|
||||
console.log(`Loaded baseline from ${baseline?.date}`);
|
||||
} catch (e) {
|
||||
console.log('No baseline found, will create one');
|
||||
}
|
||||
|
||||
// Select representative test files
|
||||
const testSets = [
|
||||
{ category: 'XML_RECHNUNG_UBL', files: 2 },
|
||||
{ category: 'XML_RECHNUNG_CII', files: 2 },
|
||||
{ category: 'ZUGFERD_V2_CORRECT', files: 2 },
|
||||
{ category: 'PEPPOL', files: 1 }
|
||||
];
|
||||
|
||||
const currentResults: RegressionResults = {
|
||||
date: new Date().toISOString(),
|
||||
version: process.env.npm_package_version || 'unknown',
|
||||
files: new Map(),
|
||||
aggregates: {
|
||||
totalFiles: 0,
|
||||
parseSuccesses: 0,
|
||||
validationSuccesses: 0,
|
||||
avgParseTime: 0,
|
||||
fieldCounts: new Map()
|
||||
}
|
||||
};
|
||||
|
||||
const regressions: RegressionIssue[] = [];
|
||||
|
||||
// Process test files
|
||||
for (const testSet of testSets) {
|
||||
try {
|
||||
const files = await CorpusLoader.loadCategory(testSet.category);
|
||||
const selectedFiles = files.slice(0, testSet.files);
|
||||
|
||||
for (const file of selectedFiles) {
|
||||
currentResults.aggregates.totalFiles++;
|
||||
|
||||
const fileResult: FileResult = {
|
||||
path: file.path,
|
||||
size: file.size,
|
||||
hash: '',
|
||||
parseSuccess: false,
|
||||
validationSuccess: false,
|
||||
parseTime: 0,
|
||||
extractedData: {}
|
||||
};
|
||||
|
||||
try {
|
||||
// Calculate file hash
|
||||
const fileBuffer = await CorpusLoader.loadFile(file.path);
|
||||
fileResult.hash = crypto.createHash('md5').update(fileBuffer).digest('hex');
|
||||
|
||||
// Parse and measure time
|
||||
const startTime = Date.now();
|
||||
const invoice = new EInvoice();
|
||||
const xmlString = fileBuffer.toString('utf-8');
|
||||
|
||||
await invoice.fromXmlString(xmlString);
|
||||
fileResult.parseTime = Date.now() - startTime;
|
||||
fileResult.parseSuccess = true;
|
||||
currentResults.aggregates.parseSuccesses++;
|
||||
|
||||
// Extract key data for comparison
|
||||
fileResult.extractedData = {
|
||||
format: invoice.metadata?.format,
|
||||
id: invoice.id,
|
||||
issueDate: invoice.issueDate?.toISOString(),
|
||||
currency: invoice.currency,
|
||||
sellerName: invoice.from?.name,
|
||||
sellerVAT: invoice.from?.vatNumber,
|
||||
buyerName: invoice.to?.name,
|
||||
itemCount: invoice.items?.length || 0,
|
||||
totalNet: invoice.totalNet,
|
||||
totalGross: invoice.totalGross,
|
||||
taxBreakdown: invoice.taxBreakdown?.map(t => ({
|
||||
rate: t.taxPercent,
|
||||
amount: t.taxAmount
|
||||
}))
|
||||
};
|
||||
|
||||
// Count fields
|
||||
const fieldCount = countFields(fileResult.extractedData);
|
||||
currentResults.aggregates.fieldCounts.set(file.path, fieldCount);
|
||||
|
||||
// Validate
|
||||
try {
|
||||
const validationResult = await invoice.validate(ValidationLevel.EXTENDED);
|
||||
fileResult.validationSuccess = validationResult.valid;
|
||||
if (validationResult.valid) {
|
||||
currentResults.aggregates.validationSuccesses++;
|
||||
}
|
||||
fileResult.validationErrors = validationResult.errors?.map(e => e.code || e.message);
|
||||
} catch (valError: any) {
|
||||
fileResult.validationSuccess = false;
|
||||
fileResult.validationErrors = [valError.message];
|
||||
}
|
||||
|
||||
} catch (parseError: any) {
|
||||
fileResult.parseSuccess = false;
|
||||
fileResult.parseError = parseError.message;
|
||||
}
|
||||
|
||||
currentResults.files.set(file.path, fileResult);
|
||||
|
||||
// Compare with baseline if available
|
||||
if (baseline) {
|
||||
const baselineFile = baseline.files.get(file.path);
|
||||
if (baselineFile) {
|
||||
const regression = compareResults(file.path, baselineFile, fileResult);
|
||||
if (regression) {
|
||||
regressions.push(regression);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(`Error processing ${testSet.category}: ${e}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate aggregates
|
||||
const parseTimes = Array.from(currentResults.files.values())
|
||||
.filter(f => f.parseSuccess)
|
||||
.map(f => f.parseTime);
|
||||
currentResults.aggregates.avgParseTime = parseTimes.length > 0 ?
|
||||
parseTimes.reduce((a, b) => a + b, 0) / parseTimes.length : 0;
|
||||
|
||||
// Report results
|
||||
console.log('\n=== REGRESSION TEST RESULTS ===\n');
|
||||
console.log(`Total files tested: ${currentResults.aggregates.totalFiles}`);
|
||||
console.log(`Parse successes: ${currentResults.aggregates.parseSuccesses}`);
|
||||
console.log(`Validation successes: ${currentResults.aggregates.validationSuccesses}`);
|
||||
console.log(`Average parse time: ${currentResults.aggregates.avgParseTime.toFixed(2)}ms`);
|
||||
|
||||
if (baseline) {
|
||||
console.log('\nCOMPARISON WITH BASELINE:');
|
||||
|
||||
// Compare aggregates
|
||||
const parseRateDiff = (currentResults.aggregates.parseSuccesses / currentResults.aggregates.totalFiles) -
|
||||
(baseline.aggregates.parseSuccesses / baseline.aggregates.totalFiles);
|
||||
const validationRateDiff = (currentResults.aggregates.validationSuccesses / currentResults.aggregates.totalFiles) -
|
||||
(baseline.aggregates.validationSuccesses / baseline.aggregates.totalFiles);
|
||||
const parseTimeDiff = currentResults.aggregates.avgParseTime - baseline.aggregates.avgParseTime;
|
||||
|
||||
console.log(` Parse rate change: ${(parseRateDiff * 100).toFixed(2)}%`);
|
||||
console.log(` Validation rate change: ${(validationRateDiff * 100).toFixed(2)}%`);
|
||||
console.log(` Parse time change: ${parseTimeDiff > 0 ? '+' : ''}${parseTimeDiff.toFixed(2)}ms`);
|
||||
|
||||
// Performance regression check
|
||||
if (parseTimeDiff > baseline.aggregates.avgParseTime * 0.2) {
|
||||
regressions.push({
|
||||
type: 'performance',
|
||||
file: 'aggregate',
|
||||
message: `Performance regression: average parse time increased by ${((parseTimeDiff / baseline.aggregates.avgParseTime) * 100).toFixed(1)}%`
|
||||
});
|
||||
}
|
||||
|
||||
if (regressions.length > 0) {
|
||||
console.log('\nREGRESSIONS DETECTED:');
|
||||
regressions.forEach(r => {
|
||||
console.log(` [${r.type}] ${r.file}: ${r.message}`);
|
||||
if (r.details) {
|
||||
console.log(` Details: ${r.details}`);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
console.log('\n✓ No regressions detected');
|
||||
}
|
||||
}
|
||||
|
||||
// Save current results
|
||||
try {
|
||||
await fs.mkdir(path.dirname(currentResultsPath), { recursive: true });
|
||||
|
||||
// Convert Map to object for JSON serialization
|
||||
const resultsForSave = {
|
||||
...currentResults,
|
||||
files: Object.fromEntries(currentResults.files),
|
||||
aggregates: {
|
||||
...currentResults.aggregates,
|
||||
fieldCounts: Object.fromEntries(currentResults.aggregates.fieldCounts)
|
||||
}
|
||||
};
|
||||
|
||||
await fs.writeFile(currentResultsPath, JSON.stringify(resultsForSave, null, 2));
|
||||
console.log(`\nCurrent results saved to: ${currentResultsPath}`);
|
||||
|
||||
if (!baseline) {
|
||||
// Create baseline if it doesn't exist
|
||||
await fs.writeFile(baselinePath, JSON.stringify(resultsForSave, null, 2));
|
||||
console.log(`Baseline created at: ${baselinePath}`);
|
||||
}
|
||||
} catch (e) {
|
||||
console.error('Error saving results:', e);
|
||||
}
|
||||
|
||||
// Test specific regression scenarios
|
||||
t.test('Field extraction consistency', async (st) => {
|
||||
const criticalFields = ['id', 'currency', 'sellerVAT', 'totalNet'];
|
||||
let fieldConsistency = true;
|
||||
|
||||
currentResults.files.forEach((result, filePath) => {
|
||||
if (result.parseSuccess && baseline) {
|
||||
const baselineResult = baseline.files.get(filePath);
|
||||
if (baselineResult?.parseSuccess) {
|
||||
for (const field of criticalFields) {
|
||||
const current = result.extractedData[field];
|
||||
const base = baselineResult.extractedData[field];
|
||||
|
||||
if (current !== base && !(current === undefined && base === null)) {
|
||||
st.fail(`Field ${field} changed in ${path.basename(filePath)}: ${base} -> ${current}`);
|
||||
fieldConsistency = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (fieldConsistency) {
|
||||
st.pass('✓ Critical fields remain consistent');
|
||||
}
|
||||
});
|
||||
|
||||
t.test('Validation stability', async (st) => {
|
||||
let validationStable = true;
|
||||
|
||||
currentResults.files.forEach((result, filePath) => {
|
||||
if (baseline) {
|
||||
const baselineResult = baseline.files.get(filePath);
|
||||
if (baselineResult) {
|
||||
if (result.validationSuccess !== baselineResult.validationSuccess) {
|
||||
st.fail(`Validation result changed for ${path.basename(filePath)}: ${baselineResult.validationSuccess} -> ${result.validationSuccess}`);
|
||||
validationStable = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (validationStable) {
|
||||
st.pass('✓ Validation results remain stable');
|
||||
}
|
||||
});
|
||||
|
||||
t.test('Performance benchmarks', async (st) => {
|
||||
// Test that parsing doesn't exceed thresholds
|
||||
const performanceThresholds = {
|
||||
small: 50, // < 50KB files should parse in < 50ms
|
||||
medium: 100, // < 200KB files should parse in < 100ms
|
||||
large: 500 // > 200KB files should parse in < 500ms
|
||||
};
|
||||
|
||||
let performanceOk = true;
|
||||
|
||||
currentResults.files.forEach((result, filePath) => {
|
||||
if (result.parseSuccess) {
|
||||
const threshold = result.size < 50 * 1024 ? performanceThresholds.small :
|
||||
result.size < 200 * 1024 ? performanceThresholds.medium :
|
||||
performanceThresholds.large;
|
||||
|
||||
if (result.parseTime > threshold) {
|
||||
st.comment(`Performance warning: ${path.basename(filePath)} (${(result.size/1024).toFixed(0)}KB) took ${result.parseTime}ms (threshold: ${threshold}ms)`);
|
||||
performanceOk = false;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (performanceOk) {
|
||||
st.pass('✓ All files parsed within performance thresholds');
|
||||
}
|
||||
});
|
||||
|
||||
// Assertions
|
||||
expect(regressions.length).toBeLessThan(3); // Allow maximum 2 regressions
|
||||
expect(currentResults.aggregates.parseSuccesses).toBeGreaterThan(currentResults.aggregates.totalFiles * 0.9);
|
||||
|
||||
if (baseline) {
|
||||
// Parse rate should not decrease by more than 5%
|
||||
const currentParseRate = currentResults.aggregates.parseSuccesses / currentResults.aggregates.totalFiles;
|
||||
const baselineParseRate = baseline.aggregates.parseSuccesses / baseline.aggregates.totalFiles;
|
||||
expect(currentParseRate).toBeGreaterThan(baselineParseRate * 0.95);
|
||||
}
|
||||
});
|
||||
|
||||
// Type definitions
|
||||
interface RegressionBaseline {
|
||||
date: string;
|
||||
version: string;
|
||||
files: Map<string, FileResult>;
|
||||
aggregates: {
|
||||
totalFiles: number;
|
||||
parseSuccesses: number;
|
||||
validationSuccesses: number;
|
||||
avgParseTime: number;
|
||||
fieldCounts: Map<string, number>;
|
||||
};
|
||||
}
|
||||
|
||||
interface RegressionResults extends RegressionBaseline {}
|
||||
|
||||
interface FileResult {
|
||||
path: string;
|
||||
size: number;
|
||||
hash: string;
|
||||
parseSuccess: boolean;
|
||||
parseError?: string;
|
||||
parseTime: number;
|
||||
validationSuccess: boolean;
|
||||
validationErrors?: string[];
|
||||
extractedData: any;
|
||||
}
|
||||
|
||||
interface RegressionIssue {
|
||||
type: 'parse' | 'validation' | 'data' | 'performance';
|
||||
file: string;
|
||||
message: string;
|
||||
details?: string;
|
||||
}
|
||||
|
||||
// Helper function to compare results
|
||||
function compareResults(filePath: string, baseline: FileResult, current: FileResult): RegressionIssue | null {
|
||||
// Check parse regression
|
||||
if (baseline.parseSuccess && !current.parseSuccess) {
|
||||
return {
|
||||
type: 'parse',
|
||||
file: path.basename(filePath),
|
||||
message: 'File no longer parses successfully',
|
||||
details: current.parseError
|
||||
};
|
||||
}
|
||||
|
||||
// Check validation regression
|
||||
if (baseline.validationSuccess && !current.validationSuccess) {
|
||||
return {
|
||||
type: 'validation',
|
||||
file: path.basename(filePath),
|
||||
message: 'File no longer validates successfully',
|
||||
details: current.validationErrors?.join(', ')
|
||||
};
|
||||
}
|
||||
|
||||
// Check data consistency (only for successfully parsed files)
|
||||
if (baseline.parseSuccess && current.parseSuccess) {
|
||||
const criticalFields = ['id', 'currency', 'totalNet', 'itemCount'];
|
||||
for (const field of criticalFields) {
|
||||
const baseValue = baseline.extractedData[field];
|
||||
const currValue = current.extractedData[field];
|
||||
|
||||
if (baseValue !== currValue && !(baseValue === null && currValue === undefined)) {
|
||||
return {
|
||||
type: 'data',
|
||||
file: path.basename(filePath),
|
||||
message: `Field '${field}' value changed`,
|
||||
details: `${baseValue} -> ${currValue}`
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check performance regression (>50% increase)
|
||||
if (baseline.parseSuccess && current.parseSuccess) {
|
||||
if (current.parseTime > baseline.parseTime * 1.5) {
|
||||
return {
|
||||
type: 'performance',
|
||||
file: path.basename(filePath),
|
||||
message: 'Significant performance degradation',
|
||||
details: `${baseline.parseTime}ms -> ${current.parseTime}ms`
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Helper function to count fields
|
||||
function countFields(obj: any, depth = 0): number {
|
||||
if (depth > 5) return 0; // Prevent infinite recursion
|
||||
|
||||
let count = 0;
|
||||
for (const key in obj) {
|
||||
if (obj[key] !== null && obj[key] !== undefined) {
|
||||
count++;
|
||||
if (typeof obj[key] === 'object' && !Array.isArray(obj[key])) {
|
||||
count += countFields(obj[key], depth + 1);
|
||||
}
|
||||
}
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
tap.start();
|
Reference in New Issue
Block a user