440 lines
14 KiB
TypeScript
440 lines
14 KiB
TypeScript
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
|
import * as einvoice from '../../../ts/index.js';
|
|
import * as plugins from '../../plugins.js';
|
|
import { PerformanceTracker } from '../../helpers/performance.tracker.js';
|
|
|
|
tap.test('ERR-04: Network/API Errors - Handle remote validation and service failures', async (t) => {
|
|
const performanceTracker = new PerformanceTracker('ERR-04');
|
|
|
|
await t.test('Network timeout errors', async () => {
|
|
performanceTracker.startOperation('network-timeouts');
|
|
|
|
const timeoutScenarios = [
|
|
{
|
|
name: 'Validation API timeout',
|
|
endpoint: 'https://validator.example.com/validate',
|
|
timeout: 5000,
|
|
expectedError: /timeout|timed out|request timeout/i
|
|
},
|
|
{
|
|
name: 'Schema download timeout',
|
|
endpoint: 'https://schemas.example.com/en16931.xsd',
|
|
timeout: 3000,
|
|
expectedError: /timeout|failed to download|connection timeout/i
|
|
},
|
|
{
|
|
name: 'Code list fetch timeout',
|
|
endpoint: 'https://codelists.example.com/currencies.xml',
|
|
timeout: 2000,
|
|
expectedError: /timeout|unavailable|failed to fetch/i
|
|
}
|
|
];
|
|
|
|
for (const scenario of timeoutScenarios) {
|
|
const startTime = performance.now();
|
|
|
|
try {
|
|
// Simulate network timeout
|
|
const timeoutPromise = new Promise((_, reject) => {
|
|
setTimeout(() => {
|
|
reject(new Error(`Network timeout: Failed to connect to ${scenario.endpoint} after ${scenario.timeout}ms`));
|
|
}, 100); // Simulate quick timeout for testing
|
|
});
|
|
|
|
await timeoutPromise;
|
|
expect(false).toBeTrue(); // Should not reach here
|
|
} catch (error) {
|
|
expect(error).toBeTruthy();
|
|
expect(error.message.toLowerCase()).toMatch(scenario.expectedError);
|
|
console.log(`✓ ${scenario.name}: ${error.message}`);
|
|
}
|
|
|
|
performanceTracker.recordMetric('timeout-handling', performance.now() - startTime);
|
|
}
|
|
|
|
performanceTracker.endOperation('network-timeouts');
|
|
});
|
|
|
|
await t.test('Connection failure errors', async () => {
|
|
performanceTracker.startOperation('connection-failures');
|
|
|
|
const connectionErrors = [
|
|
{
|
|
name: 'DNS resolution failure',
|
|
error: 'ENOTFOUND',
|
|
message: 'getaddrinfo ENOTFOUND validator.invalid-domain.com',
|
|
expectedError: /enotfound|dns|cannot resolve/i
|
|
},
|
|
{
|
|
name: 'Connection refused',
|
|
error: 'ECONNREFUSED',
|
|
message: 'connect ECONNREFUSED 127.0.0.1:8080',
|
|
expectedError: /econnrefused|connection refused|cannot connect/i
|
|
},
|
|
{
|
|
name: 'Network unreachable',
|
|
error: 'ENETUNREACH',
|
|
message: 'connect ENETUNREACH 192.168.1.100:443',
|
|
expectedError: /enetunreach|network unreachable|no route/i
|
|
},
|
|
{
|
|
name: 'SSL/TLS error',
|
|
error: 'CERT_INVALID',
|
|
message: 'SSL certificate verification failed',
|
|
expectedError: /ssl|tls|certificate/i
|
|
}
|
|
];
|
|
|
|
for (const connError of connectionErrors) {
|
|
const startTime = performance.now();
|
|
|
|
try {
|
|
// Simulate connection error
|
|
const error = new Error(connError.message);
|
|
(error as any).code = connError.error;
|
|
throw error;
|
|
} catch (error) {
|
|
expect(error).toBeTruthy();
|
|
expect(error.message.toLowerCase()).toMatch(connError.expectedError);
|
|
console.log(`✓ ${connError.name}: ${error.message}`);
|
|
}
|
|
|
|
performanceTracker.recordMetric('connection-error-handling', performance.now() - startTime);
|
|
}
|
|
|
|
performanceTracker.endOperation('connection-failures');
|
|
});
|
|
|
|
await t.test('HTTP error responses', async () => {
|
|
performanceTracker.startOperation('http-errors');
|
|
|
|
const httpErrors = [
|
|
{
|
|
status: 400,
|
|
statusText: 'Bad Request',
|
|
body: { error: 'Invalid invoice format' },
|
|
expectedError: /bad request|invalid.*format|400/i
|
|
},
|
|
{
|
|
status: 401,
|
|
statusText: 'Unauthorized',
|
|
body: { error: 'API key required' },
|
|
expectedError: /unauthorized|api key|401/i
|
|
},
|
|
{
|
|
status: 403,
|
|
statusText: 'Forbidden',
|
|
body: { error: 'Rate limit exceeded' },
|
|
expectedError: /forbidden|rate limit|403/i
|
|
},
|
|
{
|
|
status: 404,
|
|
statusText: 'Not Found',
|
|
body: { error: 'Validation endpoint not found' },
|
|
expectedError: /not found|404|endpoint/i
|
|
},
|
|
{
|
|
status: 500,
|
|
statusText: 'Internal Server Error',
|
|
body: { error: 'Validation service error' },
|
|
expectedError: /server error|500|service error/i
|
|
},
|
|
{
|
|
status: 503,
|
|
statusText: 'Service Unavailable',
|
|
body: { error: 'Service temporarily unavailable' },
|
|
expectedError: /unavailable|503|maintenance/i
|
|
}
|
|
];
|
|
|
|
for (const httpError of httpErrors) {
|
|
const startTime = performance.now();
|
|
|
|
try {
|
|
// Simulate HTTP error response
|
|
const response = {
|
|
ok: false,
|
|
status: httpError.status,
|
|
statusText: httpError.statusText,
|
|
json: async () => httpError.body
|
|
};
|
|
|
|
if (!response.ok) {
|
|
const body = await response.json();
|
|
throw new Error(`HTTP ${response.status}: ${body.error || response.statusText}`);
|
|
}
|
|
} catch (error) {
|
|
expect(error).toBeTruthy();
|
|
expect(error.message.toLowerCase()).toMatch(httpError.expectedError);
|
|
console.log(`✓ HTTP ${httpError.status}: ${error.message}`);
|
|
}
|
|
|
|
performanceTracker.recordMetric('http-error-handling', performance.now() - startTime);
|
|
}
|
|
|
|
performanceTracker.endOperation('http-errors');
|
|
});
|
|
|
|
await t.test('Retry mechanisms', async () => {
|
|
performanceTracker.startOperation('retry-mechanisms');
|
|
|
|
class RetryableOperation {
|
|
private attempts = 0;
|
|
private maxAttempts = 3;
|
|
private backoffMs = 100;
|
|
|
|
async executeWithRetry(operation: () => Promise<any>): Promise<any> {
|
|
while (this.attempts < this.maxAttempts) {
|
|
this.attempts++;
|
|
|
|
try {
|
|
return await operation();
|
|
} catch (error) {
|
|
if (this.attempts >= this.maxAttempts) {
|
|
throw new Error(`Operation failed after ${this.attempts} attempts: ${error.message}`);
|
|
}
|
|
|
|
// Exponential backoff
|
|
const delay = this.backoffMs * Math.pow(2, this.attempts - 1);
|
|
console.log(` Retry ${this.attempts}/${this.maxAttempts} after ${delay}ms...`);
|
|
await new Promise(resolve => setTimeout(resolve, delay));
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
const retryScenarios = [
|
|
{
|
|
name: 'Successful after 2 retries',
|
|
failCount: 2,
|
|
shouldSucceed: true
|
|
},
|
|
{
|
|
name: 'Failed after max retries',
|
|
failCount: 5,
|
|
shouldSucceed: false
|
|
},
|
|
{
|
|
name: 'Immediate success',
|
|
failCount: 0,
|
|
shouldSucceed: true
|
|
}
|
|
];
|
|
|
|
for (const scenario of retryScenarios) {
|
|
const startTime = performance.now();
|
|
let attemptCount = 0;
|
|
|
|
const operation = async () => {
|
|
attemptCount++;
|
|
if (attemptCount <= scenario.failCount) {
|
|
throw new Error('Temporary network error');
|
|
}
|
|
return { success: true, data: 'Validation result' };
|
|
};
|
|
|
|
const retryable = new RetryableOperation();
|
|
|
|
try {
|
|
const result = await retryable.executeWithRetry(operation);
|
|
expect(scenario.shouldSucceed).toBeTrue();
|
|
console.log(`✓ ${scenario.name}: Success after ${attemptCount} attempts`);
|
|
} catch (error) {
|
|
expect(scenario.shouldSucceed).toBeFalse();
|
|
console.log(`✓ ${scenario.name}: ${error.message}`);
|
|
}
|
|
|
|
performanceTracker.recordMetric('retry-execution', performance.now() - startTime);
|
|
}
|
|
|
|
performanceTracker.endOperation('retry-mechanisms');
|
|
});
|
|
|
|
await t.test('Circuit breaker pattern', async () => {
|
|
performanceTracker.startOperation('circuit-breaker');
|
|
|
|
class CircuitBreaker {
|
|
private failures = 0;
|
|
private lastFailureTime = 0;
|
|
private state: 'closed' | 'open' | 'half-open' = 'closed';
|
|
private readonly threshold = 3;
|
|
private readonly timeout = 1000; // 1 second
|
|
|
|
async execute(operation: () => Promise<any>): Promise<any> {
|
|
if (this.state === 'open') {
|
|
if (Date.now() - this.lastFailureTime > this.timeout) {
|
|
this.state = 'half-open';
|
|
console.log(' Circuit breaker: half-open (testing)');
|
|
} else {
|
|
throw new Error('Circuit breaker is OPEN - service unavailable');
|
|
}
|
|
}
|
|
|
|
try {
|
|
const result = await operation();
|
|
if (this.state === 'half-open') {
|
|
this.state = 'closed';
|
|
this.failures = 0;
|
|
console.log(' Circuit breaker: closed (recovered)');
|
|
}
|
|
return result;
|
|
} catch (error) {
|
|
this.failures++;
|
|
this.lastFailureTime = Date.now();
|
|
|
|
if (this.failures >= this.threshold) {
|
|
this.state = 'open';
|
|
console.log(' Circuit breaker: OPEN (threshold reached)');
|
|
}
|
|
|
|
throw error;
|
|
}
|
|
}
|
|
}
|
|
|
|
const breaker = new CircuitBreaker();
|
|
let callCount = 0;
|
|
|
|
// Simulate multiple failures
|
|
for (let i = 0; i < 5; i++) {
|
|
const startTime = performance.now();
|
|
|
|
try {
|
|
await breaker.execute(async () => {
|
|
callCount++;
|
|
throw new Error('Service unavailable');
|
|
});
|
|
} catch (error) {
|
|
console.log(` Attempt ${i + 1}: ${error.message}`);
|
|
expect(error.message).toBeTruthy();
|
|
}
|
|
|
|
performanceTracker.recordMetric('circuit-breaker-call', performance.now() - startTime);
|
|
}
|
|
|
|
// Wait for timeout and try again
|
|
await new Promise(resolve => setTimeout(resolve, 1100));
|
|
|
|
try {
|
|
await breaker.execute(async () => {
|
|
return { success: true };
|
|
});
|
|
console.log('✓ Circuit breaker recovered after timeout');
|
|
} catch (error) {
|
|
console.log(`✗ Circuit breaker still failing: ${error.message}`);
|
|
}
|
|
|
|
performanceTracker.endOperation('circuit-breaker');
|
|
});
|
|
|
|
await t.test('Fallback strategies', async () => {
|
|
performanceTracker.startOperation('fallback-strategies');
|
|
|
|
const fallbackStrategies = [
|
|
{
|
|
name: 'Local cache fallback',
|
|
primary: async () => { throw new Error('Remote validation failed'); },
|
|
fallback: async () => {
|
|
console.log(' Using cached validation rules...');
|
|
return { valid: true, source: 'cache', warning: 'Using cached rules - may be outdated' };
|
|
}
|
|
},
|
|
{
|
|
name: 'Degraded validation',
|
|
primary: async () => { throw new Error('Full validation service unavailable'); },
|
|
fallback: async () => {
|
|
console.log(' Performing basic validation only...');
|
|
return { valid: true, level: 'basic', warning: 'Only basic validation performed' };
|
|
}
|
|
},
|
|
{
|
|
name: 'Alternative service',
|
|
primary: async () => { throw new Error('Primary validator down'); },
|
|
fallback: async () => {
|
|
console.log(' Switching to backup validator...');
|
|
return { valid: true, source: 'backup', latency: 'higher' };
|
|
}
|
|
}
|
|
];
|
|
|
|
for (const strategy of fallbackStrategies) {
|
|
const startTime = performance.now();
|
|
|
|
try {
|
|
await strategy.primary();
|
|
} catch (primaryError) {
|
|
console.log(` Primary failed: ${primaryError.message}`);
|
|
|
|
try {
|
|
const result = await strategy.fallback();
|
|
console.log(`✓ ${strategy.name}: Fallback successful`);
|
|
if (result.warning) {
|
|
console.log(` ⚠️ ${result.warning}`);
|
|
}
|
|
} catch (fallbackError) {
|
|
console.log(`✗ ${strategy.name}: Fallback also failed`);
|
|
}
|
|
}
|
|
|
|
performanceTracker.recordMetric('fallback-execution', performance.now() - startTime);
|
|
}
|
|
|
|
performanceTracker.endOperation('fallback-strategies');
|
|
});
|
|
|
|
await t.test('Network error recovery patterns', async () => {
|
|
performanceTracker.startOperation('recovery-patterns');
|
|
|
|
const recoveryPatterns = [
|
|
{
|
|
name: 'Exponential backoff with jitter',
|
|
baseDelay: 100,
|
|
maxDelay: 2000,
|
|
jitter: 0.3
|
|
},
|
|
{
|
|
name: 'Linear backoff',
|
|
increment: 200,
|
|
maxDelay: 1000
|
|
},
|
|
{
|
|
name: 'Adaptive timeout',
|
|
initialTimeout: 1000,
|
|
timeoutMultiplier: 1.5,
|
|
maxTimeout: 10000
|
|
}
|
|
];
|
|
|
|
for (const pattern of recoveryPatterns) {
|
|
console.log(`\nTesting ${pattern.name}:`);
|
|
|
|
if (pattern.name.includes('Exponential')) {
|
|
for (let attempt = 1; attempt <= 3; attempt++) {
|
|
const delay = Math.min(
|
|
pattern.baseDelay * Math.pow(2, attempt - 1),
|
|
pattern.maxDelay
|
|
);
|
|
const jitteredDelay = delay * (1 + (Math.random() - 0.5) * pattern.jitter);
|
|
console.log(` Attempt ${attempt}: ${Math.round(jitteredDelay)}ms delay`);
|
|
}
|
|
}
|
|
}
|
|
|
|
performanceTracker.endOperation('recovery-patterns');
|
|
});
|
|
|
|
// Performance summary
|
|
console.log('\n' + performanceTracker.getSummary());
|
|
|
|
// Network error handling best practices
|
|
console.log('\nNetwork Error Handling Best Practices:');
|
|
console.log('1. Implement retry logic with exponential backoff');
|
|
console.log('2. Use circuit breakers to prevent cascading failures');
|
|
console.log('3. Provide fallback mechanisms for critical operations');
|
|
console.log('4. Set appropriate timeouts for all network operations');
|
|
console.log('5. Log detailed error information including retry attempts');
|
|
console.log('6. Implement health checks for external services');
|
|
console.log('7. Use connection pooling to improve reliability');
|
|
});
|
|
|
|
tap.start(); |