This commit is contained in:
2025-05-29 13:35:36 +00:00
parent 756964aabd
commit 960bbc2208
15 changed files with 2373 additions and 3396 deletions

View File

@ -6,25 +6,19 @@
import { tap } from '@git.zone/tstest/tapbundle';
import * as plugins from '../../plugins.js';
import { EInvoice } from '../../../ts/index.js';
import { FormatDetector } from '../../../ts/formats/utils/format.detector.js';
import { CorpusLoader } from '../../suite/corpus.loader.js';
import { PerformanceTracker } from '../../suite/performance.tracker.js';
import * as os from 'os';
const corpusLoader = new CorpusLoader();
const performanceTracker = new PerformanceTracker('PERF-07: Concurrent Processing');
tap.test('PERF-07: Concurrent Processing - should handle concurrent operations efficiently', async (t) => {
tap.test('PERF-07: Concurrent Processing - should handle concurrent operations efficiently', async () => {
// Test 1: Concurrent format detection
const concurrentDetection = await performanceTracker.measureAsync(
await performanceTracker.measureAsync(
'concurrent-format-detection',
async () => {
const einvoice = new EInvoice();
const results = {
concurrencyLevels: [],
optimalConcurrency: 0,
maxThroughput: 0
};
// Create test data with different formats
const testData = [
...Array(25).fill(null).map((_, i) => ({
@ -42,7 +36,10 @@ tap.test('PERF-07: Concurrent Processing - should handle concurrent operations e
];
// Test different concurrency levels
const levels = [1, 2, 4, 8, 16, 32, 64];
const levels = [1, 4, 8, 16, 32];
console.log('\nConcurrent Format Detection:');
console.log('Concurrency | Duration | Throughput | Accuracy');
console.log('------------|----------|------------|----------');
for (const concurrency of levels) {
const startTime = Date.now();
@ -50,16 +47,10 @@ tap.test('PERF-07: Concurrent Processing - should handle concurrent operations e
let correct = 0;
// Process in batches
const batchSize = concurrency;
const batches = [];
for (let i = 0; i < testData.length; i += batchSize) {
batches.push(testData.slice(i, i + batchSize));
}
for (const batch of batches) {
for (let i = 0; i < testData.length; i += concurrency) {
const batch = testData.slice(i, i + concurrency);
const promises = batch.map(async (item) => {
const format = await einvoice.detectFormat(item.content);
const format = await FormatDetector.detectFormat(item.content);
completed++;
// Verify correctness
@ -77,203 +68,134 @@ tap.test('PERF-07: Concurrent Processing - should handle concurrent operations e
const duration = Date.now() - startTime;
const throughput = (completed / (duration / 1000));
const accuracy = ((correct / completed) * 100).toFixed(2);
const result = {
concurrency,
duration,
completed,
correct,
accuracy: ((correct / completed) * 100).toFixed(2),
throughput: throughput.toFixed(2),
avgLatency: (duration / completed).toFixed(2)
};
results.concurrencyLevels.push(result);
if (throughput > results.maxThroughput) {
results.maxThroughput = throughput;
results.optimalConcurrency = concurrency;
}
console.log(`${String(concurrency).padEnd(11)} | ${String(duration + 'ms').padEnd(8)} | ${throughput.toFixed(2).padEnd(10)}/s | ${accuracy}%`);
}
return results;
}
);
// Test 2: Concurrent validation
const concurrentValidation = await performanceTracker.measureAsync(
await performanceTracker.measureAsync(
'concurrent-validation',
async () => {
const einvoice = new EInvoice();
const results = {
scenarios: [],
resourceContention: null
};
console.log('\nConcurrent Validation:');
// Create test invoices with varying complexity
const createInvoice = (id: number, complexity: 'simple' | 'medium' | 'complex') => {
const itemCount = complexity === 'simple' ? 5 : complexity === 'medium' ? 20 : 50;
const invoice = {
format: 'ubl' as const,
data: {
documentType: 'INVOICE',
invoiceNumber: `CONC-VAL-${complexity}-${id}`,
issueDate: '2024-02-20',
seller: { name: `Seller ${id}`, address: 'Address', country: 'US', taxId: `US${id}` },
buyer: { name: `Buyer ${id}`, address: 'Address', country: 'US', taxId: `US${id + 1000}` },
items: Array.from({ length: itemCount }, (_, i) => ({
description: `Item ${i + 1} for invoice ${id}`,
quantity: Math.random() * 10,
unitPrice: Math.random() * 100,
vatRate: [5, 10, 15, 20][Math.floor(Math.random() * 4)],
lineTotal: 0
})),
totals: { netAmount: 0, vatAmount: 0, grossAmount: 0 }
}
};
// Create test invoice XMLs
const createInvoiceXml = (id: number, itemCount: number) => {
const items = Array.from({ length: itemCount }, (_, i) => `
<cac:InvoiceLine>
<cbc:ID>${i + 1}</cbc:ID>
<cbc:InvoicedQuantity unitCode="EA">1</cbc:InvoicedQuantity>
<cbc:LineExtensionAmount currencyID="USD">100.00</cbc:LineExtensionAmount>
<cac:Item>
<cbc:Description>Item ${i + 1}</cbc:Description>
</cac:Item>
</cac:InvoiceLine>`).join('');
// Calculate totals
invoice.data.items.forEach(item => {
item.lineTotal = item.quantity * item.unitPrice;
invoice.data.totals.netAmount += item.lineTotal;
invoice.data.totals.vatAmount += item.lineTotal * (item.vatRate / 100);
});
invoice.data.totals.grossAmount = invoice.data.totals.netAmount + invoice.data.totals.vatAmount;
return invoice;
return `<?xml version="1.0" encoding="UTF-8"?>
<Invoice xmlns="urn:oasis:names:specification:ubl:schema:xsd:Invoice-2"
xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2"
xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2">
<cbc:ID>INV-${id}</cbc:ID>
<cbc:IssueDate>2024-02-20</cbc:IssueDate>
<cac:AccountingSupplierParty>
<cac:Party>
<cac:PartyName>
<cbc:Name>Test Seller</cbc:Name>
</cac:PartyName>
</cac:Party>
</cac:AccountingSupplierParty>
<cac:AccountingCustomerParty>
<cac:Party>
<cac:PartyName>
<cbc:Name>Test Buyer</cbc:Name>
</cac:PartyName>
</cac:Party>
</cac:AccountingCustomerParty>
<cac:LegalMonetaryTotal>
<cbc:TaxExclusiveAmount currencyID="USD">${(itemCount * 100).toFixed(2)}</cbc:TaxExclusiveAmount>
<cbc:PayableAmount currencyID="USD">${(itemCount * 100).toFixed(2)}</cbc:PayableAmount>
</cac:LegalMonetaryTotal>${items}
</Invoice>`;
};
// Test scenarios
const scenarios = [
{ name: 'All simple', distribution: { simple: 30, medium: 0, complex: 0 } },
{ name: 'Mixed load', distribution: { simple: 10, medium: 15, complex: 5 } },
{ name: 'All complex', distribution: { simple: 0, medium: 0, complex: 30 } }
{ name: 'Small invoices (5 items)', count: 30, itemCount: 5 },
{ name: 'Medium invoices (20 items)', count: 20, itemCount: 20 },
{ name: 'Large invoices (50 items)', count: 10, itemCount: 50 }
];
for (const scenario of scenarios) {
const invoices = [];
let id = 0;
console.log(`\n${scenario.name}:`);
const invoices = Array.from({ length: scenario.count }, (_, i) =>
createInvoiceXml(i, scenario.itemCount)
);
// Create invoices according to distribution
for (const [complexity, count] of Object.entries(scenario.distribution)) {
for (let i = 0; i < count; i++) {
invoices.push(createInvoice(id++, complexity as any));
}
}
// Test with optimal concurrency from previous test
const concurrency = concurrentDetection.result.optimalConcurrency || 8;
const concurrency = 8;
const startTime = Date.now();
const startCPU = process.cpuUsage();
let validCount = 0;
// Process concurrently
const results = [];
for (let i = 0; i < invoices.length; i += concurrency) {
const batch = invoices.slice(i, i + concurrency);
const batchResults = await Promise.all(
batch.map(async (invoice) => {
const start = Date.now();
const result = await einvoice.validateInvoice(invoice);
return {
duration: Date.now() - start,
valid: result.isValid,
errors: result.errors?.length || 0
};
const results = await Promise.all(
batch.map(async (invoiceXml) => {
try {
const einvoice = await EInvoice.fromXml(invoiceXml);
const result = await einvoice.validate();
return result.isValid;
} catch {
return false;
}
})
);
results.push(...batchResults);
validCount += results.filter(v => v).length;
}
const totalDuration = Date.now() - startTime;
const cpuUsage = process.cpuUsage(startCPU);
const duration = Date.now() - startTime;
const throughput = (scenario.count / (duration / 1000)).toFixed(2);
const validationRate = ((validCount / scenario.count) * 100).toFixed(2);
// Analyze results
const validCount = results.filter(r => r.valid).length;
const avgDuration = results.reduce((sum, r) => sum + r.duration, 0) / results.length;
const maxDuration = Math.max(...results.map(r => r.duration));
results.scenarios.push({
name: scenario.name,
invoiceCount: invoices.length,
concurrency,
totalDuration,
throughput: (invoices.length / (totalDuration / 1000)).toFixed(2),
validCount,
validationRate: ((validCount / invoices.length) * 100).toFixed(2),
avgLatency: avgDuration.toFixed(2),
maxLatency: maxDuration,
cpuTime: ((cpuUsage.user + cpuUsage.system) / 1000).toFixed(2),
cpuEfficiency: (((cpuUsage.user + cpuUsage.system) / 1000) / totalDuration * 100).toFixed(2)
});
console.log(` - Processed: ${scenario.count} invoices`);
console.log(` - Duration: ${duration}ms`);
console.log(` - Throughput: ${throughput} invoices/sec`);
console.log(` - Validation rate: ${validationRate}%`);
}
// Test resource contention
const contentionTest = async () => {
const invoice = createInvoice(9999, 'medium');
const concurrencyLevels = [1, 10, 50, 100];
const results = [];
for (const level of concurrencyLevels) {
const start = Date.now();
const promises = Array(level).fill(null).map(() =>
einvoice.validateInvoice(invoice)
);
await Promise.all(promises);
const duration = Date.now() - start;
results.push({
concurrency: level,
totalTime: duration,
avgTime: (duration / level).toFixed(2),
throughput: (level / (duration / 1000)).toFixed(2)
});
}
return results;
};
results.resourceContention = await contentionTest();
return results;
}
);
// Test 3: Concurrent file processing
const concurrentFileProcessing = await performanceTracker.measureAsync(
await performanceTracker.measureAsync(
'concurrent-file-processing',
async () => {
const files = await corpusLoader.getFilesByPattern('**/*.xml');
const einvoice = new EInvoice();
const results = {
fileCount: 0,
processedCount: 0,
concurrencyTests: [],
errorRates: new Map<number, number>()
};
console.log('\nConcurrent File Processing:');
// Sample files
const sampleFiles = files.slice(0, 50);
results.fileCount = sampleFiles.length;
const testDataset = await CorpusLoader.createTestDataset({
formats: ['UBL', 'CII'],
maxFiles: 50,
validOnly: true
});
const files = testDataset.map(f => f.path).filter(p => p.endsWith('.xml'));
console.log(`Processing ${files.length} files from corpus...`);
// Test different concurrency strategies
const strategies = [
{ name: 'Sequential', concurrency: 1 },
{ name: 'Conservative', concurrency: 4 },
{ name: 'Moderate', concurrency: 8 },
{ name: 'Aggressive', concurrency: 16 },
{ name: 'Max', concurrency: os.cpus().length * 2 }
{ name: 'Aggressive', concurrency: 16 }
];
for (const strategy of strategies) {
const startTime = Date.now();
const startMemory = process.memoryUsage();
let processed = 0;
let errors = 0;
// Process files with specified concurrency
const queue = [...sampleFiles];
const activePromises = new Set();
const queue = [...files];
const activePromises = new Set<Promise<void>>();
while (queue.length > 0 || activePromises.size > 0) {
// Start new tasks up to concurrency limit
@ -282,14 +204,18 @@ tap.test('PERF-07: Concurrent Processing - should handle concurrent operations e
const promise = (async () => {
try {
const content = await plugins.fs.readFile(file, 'utf-8');
const format = await einvoice.detectFormat(content);
const format = await FormatDetector.detectFormat(content);
if (format && format !== 'unknown') {
const invoice = await einvoice.parseInvoice(content, format);
await einvoice.validateInvoice(invoice);
processed++;
if (format && format !== 'unknown' && format !== 'pdf' && format !== 'xml') {
try {
const invoice = await EInvoice.fromXml(content);
await invoice.validate();
processed++;
} catch {
// Skip unparseable files
}
}
} catch (error) {
} catch {
errors++;
}
})();
@ -305,359 +231,130 @@ tap.test('PERF-07: Concurrent Processing - should handle concurrent operations e
}
const duration = Date.now() - startTime;
const endMemory = process.memoryUsage();
const throughput = (processed / (duration / 1000)).toFixed(2);
results.concurrencyTests.push({
strategy: strategy.name,
concurrency: strategy.concurrency,
duration,
processed,
errors,
throughput: (processed / (duration / 1000)).toFixed(2),
avgFileTime: (duration / sampleFiles.length).toFixed(2),
memoryIncrease: ((endMemory.heapUsed - startMemory.heapUsed) / 1024 / 1024).toFixed(2),
errorRate: ((errors / sampleFiles.length) * 100).toFixed(2)
});
results.errorRates.set(strategy.concurrency, errors);
results.processedCount = Math.max(results.processedCount, processed);
console.log(`\n${strategy.name} (concurrency: ${strategy.concurrency}):`);
console.log(` - Duration: ${duration}ms`);
console.log(` - Processed: ${processed} files`);
console.log(` - Throughput: ${throughput} files/sec`);
console.log(` - Errors: ${errors}`);
}
return results;
}
);
// Test 4: Mixed operation concurrency
const mixedOperationConcurrency = await performanceTracker.measureAsync(
'mixed-operation-concurrency',
// Test 4: Mixed operations
await performanceTracker.measureAsync(
'mixed-operations',
async () => {
const einvoice = new EInvoice();
const results = {
operations: [],
contentionAnalysis: null
};
console.log('\nMixed Operations Concurrency:');
// Define mixed operations
// Define operations
const operations = [
{
name: 'detect',
fn: async (id: number) => {
const xml = `<?xml version="1.0"?><Invoice xmlns="urn:oasis:names:specification:ubl:schema:xsd:Invoice-2"><ID>MIXED-${id}</ID></Invoice>`;
return await einvoice.detectFormat(xml);
fn: async () => {
const xml = `<?xml version="1.0"?><Invoice xmlns="urn:oasis:names:specification:ubl:schema:xsd:Invoice-2"><ID>TEST</ID></Invoice>`;
return await FormatDetector.detectFormat(xml);
}
},
{
name: 'parse',
fn: async (id: number) => {
const xml = `<?xml version="1.0"?><Invoice xmlns="urn:oasis:names:specification:ubl:schema:xsd:Invoice-2"><ID>PARSE-${id}</ID><IssueDate>2024-01-01</IssueDate></Invoice>`;
return await einvoice.parseInvoice(xml, 'ubl');
fn: async () => {
const xml = `<?xml version="1.0"?><Invoice xmlns="urn:oasis:names:specification:ubl:schema:xsd:Invoice-2"><ID>TEST</ID><IssueDate>2024-01-01</IssueDate></Invoice>`;
const invoice = await EInvoice.fromXml(xml);
return invoice.getFormat();
}
},
{
name: 'validate',
fn: async (id: number) => {
const invoice = {
format: 'ubl' as const,
data: {
documentType: 'INVOICE',
invoiceNumber: `VAL-${id}`,
issueDate: '2024-02-20',
seller: { name: 'Seller', address: 'Address', country: 'US', taxId: 'US123' },
buyer: { name: 'Buyer', address: 'Address', country: 'US', taxId: 'US456' },
items: [{ description: 'Item', quantity: 1, unitPrice: 100, vatRate: 10, lineTotal: 100 }],
totals: { netAmount: 100, vatAmount: 10, grossAmount: 110 }
}
};
return await einvoice.validateInvoice(invoice);
}
},
{
name: 'convert',
fn: async (id: number) => {
const invoice = {
format: 'ubl' as const,
data: {
documentType: 'INVOICE',
invoiceNumber: `CONV-${id}`,
issueDate: '2024-02-20',
seller: { name: 'Seller', address: 'Address', country: 'US', taxId: 'US123' },
buyer: { name: 'Buyer', address: 'Address', country: 'US', taxId: 'US456' },
items: [{ description: 'Item', quantity: 1, unitPrice: 100, vatRate: 10, lineTotal: 100 }],
totals: { netAmount: 100, vatAmount: 10, grossAmount: 110 }
}
};
return await einvoice.convertFormat(invoice, 'cii');
fn: async () => {
const xml = `<?xml version="1.0"?><Invoice xmlns="urn:oasis:names:specification:ubl:schema:xsd:Invoice-2" xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2">
<cbc:ID>TEST</cbc:ID>
<cbc:IssueDate>2024-02-20</cbc:IssueDate>
<cac:AccountingSupplierParty><cac:Party><cac:PartyName><cbc:Name>Seller</cbc:Name></cac:PartyName></cac:Party></cac:AccountingSupplierParty>
<cac:AccountingCustomerParty><cac:Party><cac:PartyName><cbc:Name>Buyer</cbc:Name></cac:PartyName></cac:Party></cac:AccountingCustomerParty>
</Invoice>`;
const invoice = await EInvoice.fromXml(xml);
return await invoice.validate();
}
}
];
// Test mixed workload
const totalOperations = 200;
const totalOperations = 150;
const operationMix = Array.from({ length: totalOperations }, (_, i) => ({
operation: operations[i % operations.length],
id: i
}));
// Shuffle to simulate real-world mix
for (let i = operationMix.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
[operationMix[i], operationMix[j]] = [operationMix[j], operationMix[i]];
}
const concurrency = 10;
const startTime = Date.now();
const operationCounts = new Map(operations.map(op => [op.name, 0]));
// Test with different concurrency levels
const concurrencyLevels = [1, 5, 10, 20];
for (const concurrency of concurrencyLevels) {
const startTime = Date.now();
const operationStats = new Map(operations.map(op => [op.name, { count: 0, totalTime: 0, errors: 0 }]));
// Process operations
for (let i = 0; i < operationMix.length; i += concurrency) {
const batch = operationMix.slice(i, i + concurrency);
// Process operations
for (let i = 0; i < operationMix.length; i += concurrency) {
const batch = operationMix.slice(i, i + concurrency);
await Promise.all(batch.map(async ({ operation, id }) => {
const opStart = Date.now();
try {
await operation.fn(id);
operationStats.get(operation.name)!.count++;
} catch {
operationStats.get(operation.name)!.errors++;
}
operationStats.get(operation.name)!.totalTime += Date.now() - opStart;
}));
}
const totalDuration = Date.now() - startTime;
results.operations.push({
concurrency,
totalDuration,
throughput: (totalOperations / (totalDuration / 1000)).toFixed(2),
operationBreakdown: Array.from(operationStats.entries()).map(([name, stats]) => ({
operation: name,
count: stats.count,
avgTime: stats.count > 0 ? (stats.totalTime / stats.count).toFixed(2) : 'N/A',
errorRate: ((stats.errors / (stats.count + stats.errors)) * 100).toFixed(2)
}))
});
}
// Analyze operation contention
const contentionTest = async () => {
const promises = [];
const contentionResults = [];
// Run all operations concurrently
for (let i = 0; i < 10; i++) {
for (const op of operations) {
promises.push(
(async () => {
const start = Date.now();
await op.fn(1000 + i);
return { operation: op.name, duration: Date.now() - start };
})()
);
await Promise.all(batch.map(async ({ operation }) => {
try {
await operation.fn();
operationCounts.set(operation.name, operationCounts.get(operation.name)! + 1);
} catch {
// Ignore errors
}
}
const results = await Promise.all(promises);
// Group by operation
const grouped = results.reduce((acc, r) => {
if (!acc[r.operation]) acc[r.operation] = [];
acc[r.operation].push(r.duration);
return acc;
}, {} as Record<string, number[]>);
for (const [op, durations] of Object.entries(grouped)) {
const avg = durations.reduce((a, b) => a + b, 0) / durations.length;
const min = Math.min(...durations);
const max = Math.max(...durations);
contentionResults.push({
operation: op,
avgDuration: avg.toFixed(2),
minDuration: min,
maxDuration: max,
variance: ((max - min) / avg * 100).toFixed(2)
});
}
return contentionResults;
};
}));
}
results.contentionAnalysis = await contentionTest();
const totalDuration = Date.now() - startTime;
const throughput = (totalOperations / (totalDuration / 1000)).toFixed(2);
return results;
console.log(` Total operations: ${totalOperations}`);
console.log(` Duration: ${totalDuration}ms`);
console.log(` Throughput: ${throughput} ops/sec`);
console.log(` Operation breakdown:`);
operationCounts.forEach((count, name) => {
console.log(` - ${name}: ${count} operations`);
});
}
);
// Test 5: Concurrent corpus processing
const concurrentCorpusProcessing = await performanceTracker.measureAsync(
'concurrent-corpus-processing',
// Test 5: Resource contention
await performanceTracker.measureAsync(
'resource-contention',
async () => {
const files = await corpusLoader.getFilesByPattern('**/*.xml');
const einvoice = new EInvoice();
const results = {
totalFiles: files.length,
processedFiles: 0,
formatDistribution: new Map<string, number>(),
performanceMetrics: {
startTime: Date.now(),
endTime: 0,
peakConcurrency: 0,
avgResponseTime: 0,
throughputOverTime: []
}
};
console.log('\nResource Contention Test:');
// Process entire corpus with optimal concurrency
const optimalConcurrency = concurrentDetection.result.optimalConcurrency || 16;
const queue = [...files];
const activeOperations = new Map<string, { start: number; format?: string }>();
const responseTimes = [];
const xml = `<?xml version="1.0"?><Invoice xmlns="urn:oasis:names:specification:ubl:schema:xsd:Invoice-2" xmlns:cac="urn:oasis:names:specification:ubl:schema:xsd:CommonAggregateComponents-2" xmlns:cbc="urn:oasis:names:specification:ubl:schema:xsd:CommonBasicComponents-2">
<cbc:ID>CONTENTION-TEST</cbc:ID>
<cbc:IssueDate>2024-02-20</cbc:IssueDate>
<cac:AccountingSupplierParty><cac:Party><cac:PartyName><cbc:Name>Seller</cbc:Name></cac:PartyName></cac:Party></cac:AccountingSupplierParty>
<cac:AccountingCustomerParty><cac:Party><cac:PartyName><cbc:Name>Buyer</cbc:Name></cac:PartyName></cac:Party></cac:AccountingCustomerParty>
</Invoice>`;
// Track throughput over time
const throughputInterval = setInterval(() => {
const elapsed = (Date.now() - results.performanceMetrics.startTime) / 1000;
const current = results.processedFiles;
results.performanceMetrics.throughputOverTime.push({
time: elapsed,
throughput: current / elapsed
const concurrencyLevels = [1, 10, 50, 100];
console.log('Concurrency | Duration | Throughput');
console.log('------------|----------|------------');
for (const level of concurrencyLevels) {
const start = Date.now();
const promises = Array(level).fill(null).map(async () => {
const invoice = await EInvoice.fromXml(xml);
return invoice.validate();
});
}, 1000);
while (queue.length > 0 || activeOperations.size > 0) {
// Start new operations
while (activeOperations.size < optimalConcurrency && queue.length > 0) {
const file = queue.shift()!;
const operationId = `op-${Date.now()}-${Math.random()}`;
activeOperations.set(operationId, { start: Date.now() });
(async () => {
try {
const content = await plugins.fs.readFile(file, 'utf-8');
const format = await einvoice.detectFormat(content);
if (format && format !== 'unknown') {
activeOperations.get(operationId)!.format = format;
results.formatDistribution.set(format,
(results.formatDistribution.get(format) || 0) + 1
);
const invoice = await einvoice.parseInvoice(content, format);
await einvoice.validateInvoice(invoice);
results.processedFiles++;
}
const duration = Date.now() - activeOperations.get(operationId)!.start;
responseTimes.push(duration);
} catch (error) {
// Skip failed files
} finally {
activeOperations.delete(operationId);
}
})();
if (activeOperations.size > results.performanceMetrics.peakConcurrency) {
results.performanceMetrics.peakConcurrency = activeOperations.size;
}
}
// Wait for some to complete
if (activeOperations.size > 0) {
await new Promise(resolve => setTimeout(resolve, 10));
}
await Promise.all(promises);
const duration = Date.now() - start;
const throughput = (level / (duration / 1000)).toFixed(2);
console.log(`${String(level).padEnd(11)} | ${String(duration + 'ms').padEnd(8)} | ${throughput} ops/sec`);
}
clearInterval(throughputInterval);
results.performanceMetrics.endTime = Date.now();
// Calculate final metrics
const totalDuration = results.performanceMetrics.endTime - results.performanceMetrics.startTime;
results.performanceMetrics.avgResponseTime = responseTimes.length > 0 ?
responseTimes.reduce((a, b) => a + b, 0) / responseTimes.length : 0;
return {
totalFiles: results.totalFiles,
processedFiles: results.processedFiles,
successRate: ((results.processedFiles / results.totalFiles) * 100).toFixed(2),
totalDuration: totalDuration,
overallThroughput: (results.processedFiles / (totalDuration / 1000)).toFixed(2),
avgResponseTime: results.performanceMetrics.avgResponseTime.toFixed(2),
peakConcurrency: results.performanceMetrics.peakConcurrency,
formatDistribution: Array.from(results.formatDistribution.entries()),
throughputProgression: results.performanceMetrics.throughputOverTime.slice(-5)
};
}
);
// Summary
t.comment('\n=== PERF-07: Concurrent Processing Test Summary ===');
t.comment('\nConcurrent Format Detection:');
t.comment(' Concurrency | Duration | Throughput | Accuracy | Avg Latency');
t.comment(' ------------|----------|------------|----------|------------');
concurrentDetection.result.concurrencyLevels.forEach(level => {
t.comment(` ${String(level.concurrency).padEnd(11)} | ${String(level.duration + 'ms').padEnd(8)} | ${level.throughput.padEnd(10)}/s | ${level.accuracy.padEnd(8)}% | ${level.avgLatency}ms`);
});
t.comment(` Optimal concurrency: ${concurrentDetection.result.optimalConcurrency} (${concurrentDetection.result.maxThroughput.toFixed(2)} ops/sec)`);
t.comment('\nConcurrent Validation Scenarios:');
concurrentValidation.result.scenarios.forEach(scenario => {
t.comment(` ${scenario.name}:`);
t.comment(` - Invoices: ${scenario.invoiceCount}, Concurrency: ${scenario.concurrency}`);
t.comment(` - Duration: ${scenario.totalDuration}ms, Throughput: ${scenario.throughput}/sec`);
t.comment(` - Validation rate: ${scenario.validationRate}%`);
t.comment(` - Avg latency: ${scenario.avgLatency}ms, Max: ${scenario.maxLatency}ms`);
t.comment(` - CPU efficiency: ${scenario.cpuEfficiency}%`);
});
t.comment('\nConcurrent File Processing:');
t.comment(' Strategy | Concur. | Duration | Processed | Throughput | Errors | Memory');
t.comment(' ------------|---------|----------|-----------|------------|--------|-------');
concurrentFileProcessing.result.concurrencyTests.forEach(test => {
t.comment(` ${test.strategy.padEnd(11)} | ${String(test.concurrency).padEnd(7)} | ${String(test.duration + 'ms').padEnd(8)} | ${String(test.processed).padEnd(9)} | ${test.throughput.padEnd(10)}/s | ${test.errorRate.padEnd(6)}% | ${test.memoryIncrease}MB`);
});
t.comment('\nMixed Operation Concurrency:');
mixedOperationConcurrency.result.operations.forEach(test => {
t.comment(` Concurrency ${test.concurrency}: ${test.throughput} ops/sec`);
test.operationBreakdown.forEach(op => {
t.comment(` - ${op.operation}: ${op.count} ops, avg ${op.avgTime}ms, ${op.errorRate}% errors`);
});
});
t.comment('\nOperation Contention Analysis:');
mixedOperationConcurrency.result.contentionAnalysis.forEach(op => {
t.comment(` ${op.operation}: avg ${op.avgDuration}ms (${op.minDuration}-${op.maxDuration}ms), variance ${op.variance}%`);
});
t.comment('\nCorpus Concurrent Processing:');
t.comment(` Total files: ${concurrentCorpusProcessing.result.totalFiles}`);
t.comment(` Processed: ${concurrentCorpusProcessing.result.processedFiles}`);
t.comment(` Success rate: ${concurrentCorpusProcessing.result.successRate}%`);
t.comment(` Duration: ${(concurrentCorpusProcessing.result.totalDuration / 1000).toFixed(2)}s`);
t.comment(` Throughput: ${concurrentCorpusProcessing.result.overallThroughput} files/sec`);
t.comment(` Avg response time: ${concurrentCorpusProcessing.result.avgResponseTime}ms`);
t.comment(` Peak concurrency: ${concurrentCorpusProcessing.result.peakConcurrency}`);
// Performance targets check
t.comment('\n=== Performance Targets Check ===');
const targetConcurrency = 100; // Target: >100 concurrent ops/sec
const achievedThroughput = parseFloat(concurrentDetection.result.maxThroughput.toFixed(2));
t.comment(`Concurrent throughput: ${achievedThroughput} ops/sec ${achievedThroughput > targetConcurrency ? '✅' : '⚠️'} (target: >${targetConcurrency}/sec)`);
t.comment(`Optimal concurrency: ${concurrentDetection.result.optimalConcurrency} threads`);
// Overall performance summary
t.comment('\n=== Overall Performance Summary ===');
performanceTracker.logSummary();
t.end();
// Overall summary
console.log('\n=== PERF-07: Overall Performance Summary ===');
console.log(performanceTracker.getSummary());
});
tap.start();