Compare commits
2 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| 3be2f0b855 | |||
| c38f895a72 |
11
changelog.md
11
changelog.md
@@ -1,5 +1,16 @@
|
|||||||
# Changelog
|
# Changelog
|
||||||
|
|
||||||
|
## 2025-11-07 - 3.5.0 - feat(stocks)
|
||||||
|
Add provider fetch limits, intraday incremental fetch, cache deduplication, and provider safety/warning improvements
|
||||||
|
|
||||||
|
- Add maxRecords and defaultIntradayLimit to IProviderConfig to control maximum records per request and default intraday limits.
|
||||||
|
- CoinGecko provider: enforce maxRecords when processing historical data, warn when large historical/intraday results are returned without explicit limits, preserve priority mappings when rebuilding the coin cache, and improve cache load logging.
|
||||||
|
- Marketstack provider: make safety maxRecords configurable, apply a configurable default intraday limit, warn when no explicit limit is provided, and ensure effective limits are applied to returned results.
|
||||||
|
- StockPriceService: always attempt incremental fetch for intraday requests without a date to fetch only new data since the last cached timestamp and fall back to full fetch when necessary.
|
||||||
|
- StockPriceService: deduplicate price arrays by timestamp before caching and after merges to avoid duplicate timestamps and reduce cache bloat.
|
||||||
|
- Introduce StockDataService for unified access to prices and fundamentals with automatic enrichment (market cap, P/E, price-to-book) and caching improvements.
|
||||||
|
- Various cache/TTL improvements and safer default behaviors for intraday, historical and live data to improve performance and memory usage.
|
||||||
|
|
||||||
## 2025-11-06 - 3.4.0 - feat(stocks)
|
## 2025-11-06 - 3.4.0 - feat(stocks)
|
||||||
Introduce unified stock data service, new providers, improved caching and German business data tooling
|
Introduce unified stock data service, new providers, improved caching and German business data tooling
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "@fin.cx/opendata",
|
"name": "@fin.cx/opendata",
|
||||||
"version": "3.4.0",
|
"version": "3.5.0",
|
||||||
"private": false,
|
"private": false,
|
||||||
"description": "A comprehensive TypeScript library for accessing business data and real-time financial information. Features include German company data management with MongoDB integration, JSONL bulk processing, automated Handelsregister interactions, and real-time stock market data from multiple providers.",
|
"description": "A comprehensive TypeScript library for accessing business data and real-time financial information. Features include German company data management with MongoDB integration, JSONL bulk processing, automated Handelsregister interactions, and real-time stock market data from multiple providers.",
|
||||||
"main": "dist_ts/index.js",
|
"main": "dist_ts/index.js",
|
||||||
|
|||||||
395
test/test.cache-inspection.node+bun+deno.ts
Normal file
395
test/test.cache-inspection.node+bun+deno.ts
Normal file
@@ -0,0 +1,395 @@
|
|||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as opendata from '../ts/index.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test to inspect actual cache contents and verify data integrity
|
||||||
|
*/
|
||||||
|
|
||||||
|
class MockProvider implements opendata.IStockProvider {
|
||||||
|
name = 'MockProvider';
|
||||||
|
priority = 100;
|
||||||
|
requiresAuth = false;
|
||||||
|
|
||||||
|
public callLog: Array<{ type: string; ticker: string; timestamp: Date }> = [];
|
||||||
|
|
||||||
|
async fetchData(request: opendata.IStockDataRequest): Promise<opendata.IStockPrice | opendata.IStockPrice[]> {
|
||||||
|
this.callLog.push({
|
||||||
|
type: request.type,
|
||||||
|
ticker: request.type === 'batch' ? request.tickers.join(',') : (request as any).ticker,
|
||||||
|
timestamp: new Date()
|
||||||
|
});
|
||||||
|
|
||||||
|
if (request.type === 'intraday') {
|
||||||
|
const count = request.limit || 10;
|
||||||
|
const prices: opendata.IStockPrice[] = [];
|
||||||
|
const baseTime = request.date || new Date('2025-01-07T09:30:00.000Z');
|
||||||
|
|
||||||
|
for (let i = 0; i < count; i++) {
|
||||||
|
prices.push({
|
||||||
|
ticker: request.ticker,
|
||||||
|
price: 100 + i,
|
||||||
|
currency: 'USD',
|
||||||
|
timestamp: new Date(baseTime.getTime() + i * 60 * 1000),
|
||||||
|
fetchedAt: new Date(),
|
||||||
|
provider: this.name,
|
||||||
|
dataType: 'intraday',
|
||||||
|
marketState: 'REGULAR',
|
||||||
|
open: 100,
|
||||||
|
high: 101,
|
||||||
|
low: 99,
|
||||||
|
volume: 1000000,
|
||||||
|
change: 0,
|
||||||
|
changePercent: 0,
|
||||||
|
previousClose: 100
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return prices;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Default single price
|
||||||
|
return {
|
||||||
|
ticker: (request as any).ticker,
|
||||||
|
price: 150,
|
||||||
|
currency: 'USD',
|
||||||
|
timestamp: new Date(),
|
||||||
|
fetchedAt: new Date(),
|
||||||
|
provider: this.name,
|
||||||
|
dataType: 'eod',
|
||||||
|
marketState: 'CLOSED',
|
||||||
|
open: 149,
|
||||||
|
high: 151,
|
||||||
|
low: 148,
|
||||||
|
volume: 5000000,
|
||||||
|
change: 1,
|
||||||
|
changePercent: 0.67,
|
||||||
|
previousClose: 149
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async isAvailable(): Promise<boolean> {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let stockService: opendata.StockPriceService;
|
||||||
|
let mockProvider: MockProvider;
|
||||||
|
|
||||||
|
tap.test('Cache Inspection - Setup', async () => {
|
||||||
|
stockService = new opendata.StockPriceService({
|
||||||
|
ttl: 60000,
|
||||||
|
maxEntries: 100
|
||||||
|
});
|
||||||
|
|
||||||
|
mockProvider = new MockProvider();
|
||||||
|
stockService.register(mockProvider);
|
||||||
|
|
||||||
|
console.log('✓ Service and provider initialized');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Cache Inspection - Verify Cache Key Generation', async () => {
|
||||||
|
await tap.test('should generate unique cache keys for different requests', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
mockProvider.callLog = [];
|
||||||
|
|
||||||
|
// Fetch with different parameters
|
||||||
|
await stockService.getData({ type: 'intraday', ticker: 'AAPL', interval: '1min', limit: 10 });
|
||||||
|
await stockService.getData({ type: 'intraday', ticker: 'AAPL', interval: '1min', limit: 20 });
|
||||||
|
await stockService.getData({ type: 'intraday', ticker: 'AAPL', interval: '5min', limit: 10 });
|
||||||
|
await stockService.getData({ type: 'intraday', ticker: 'MSFT', interval: '1min', limit: 10 });
|
||||||
|
|
||||||
|
// Should have made 4 provider calls (all different cache keys)
|
||||||
|
expect(mockProvider.callLog.length).toEqual(4);
|
||||||
|
|
||||||
|
console.log('✓ Cache keys are unique for different parameters');
|
||||||
|
console.log(` Total provider calls: ${mockProvider.callLog.length}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
await tap.test('should reuse cache for identical requests', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
mockProvider.callLog = [];
|
||||||
|
|
||||||
|
// Same request 3 times
|
||||||
|
const result1 = await stockService.getData({ type: 'intraday', ticker: 'AAPL', interval: '1min', limit: 10 });
|
||||||
|
const result2 = await stockService.getData({ type: 'intraday', ticker: 'AAPL', interval: '1min', limit: 10 });
|
||||||
|
const result3 = await stockService.getData({ type: 'intraday', ticker: 'AAPL', interval: '1min', limit: 10 });
|
||||||
|
|
||||||
|
// Should have made only 1 provider call
|
||||||
|
expect(mockProvider.callLog.length).toEqual(1);
|
||||||
|
|
||||||
|
// All results should be identical (same reference from cache)
|
||||||
|
expect((result1 as opendata.IStockPrice[]).length).toEqual((result2 as opendata.IStockPrice[]).length);
|
||||||
|
expect((result1 as opendata.IStockPrice[]).length).toEqual((result3 as opendata.IStockPrice[]).length);
|
||||||
|
|
||||||
|
// Verify timestamps match (exact same cached data)
|
||||||
|
const ts1 = (result1 as opendata.IStockPrice[])[0].timestamp.getTime();
|
||||||
|
const ts2 = (result2 as opendata.IStockPrice[])[0].timestamp.getTime();
|
||||||
|
const ts3 = (result3 as opendata.IStockPrice[])[0].timestamp.getTime();
|
||||||
|
|
||||||
|
expect(ts1).toEqual(ts2);
|
||||||
|
expect(ts2).toEqual(ts3);
|
||||||
|
|
||||||
|
console.log('✓ Cache reused for identical requests');
|
||||||
|
console.log(` 3 requests → 1 provider call`);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Cache Inspection - Verify Data Structure', async () => {
|
||||||
|
await tap.test('should cache complete IStockPrice objects', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
|
||||||
|
const result = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'TSLA',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 5
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result).toBeArray();
|
||||||
|
const prices = result as opendata.IStockPrice[];
|
||||||
|
|
||||||
|
// Verify structure of cached data
|
||||||
|
for (const price of prices) {
|
||||||
|
expect(price).toHaveProperty('ticker');
|
||||||
|
expect(price).toHaveProperty('price');
|
||||||
|
expect(price).toHaveProperty('currency');
|
||||||
|
expect(price).toHaveProperty('timestamp');
|
||||||
|
expect(price).toHaveProperty('fetchedAt');
|
||||||
|
expect(price).toHaveProperty('provider');
|
||||||
|
expect(price).toHaveProperty('dataType');
|
||||||
|
expect(price).toHaveProperty('marketState');
|
||||||
|
expect(price).toHaveProperty('open');
|
||||||
|
expect(price).toHaveProperty('high');
|
||||||
|
expect(price).toHaveProperty('low');
|
||||||
|
expect(price).toHaveProperty('volume');
|
||||||
|
|
||||||
|
// Verify types
|
||||||
|
expect(typeof price.ticker).toEqual('string');
|
||||||
|
expect(typeof price.price).toEqual('number');
|
||||||
|
expect(price.timestamp).toBeInstanceOf(Date);
|
||||||
|
expect(price.fetchedAt).toBeInstanceOf(Date);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('✓ Cached data has complete IStockPrice structure');
|
||||||
|
console.log(` Sample: ${prices[0].ticker} @ $${prices[0].price} (${prices[0].timestamp.toISOString()})`);
|
||||||
|
});
|
||||||
|
|
||||||
|
await tap.test('should preserve array order in cache', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
|
||||||
|
const result1 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'AAPL',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 10
|
||||||
|
});
|
||||||
|
|
||||||
|
const result2 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'AAPL',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 10
|
||||||
|
});
|
||||||
|
|
||||||
|
const prices1 = result1 as opendata.IStockPrice[];
|
||||||
|
const prices2 = result2 as opendata.IStockPrice[];
|
||||||
|
|
||||||
|
// Verify order is preserved
|
||||||
|
for (let i = 0; i < prices1.length; i++) {
|
||||||
|
expect(prices1[i].timestamp.getTime()).toEqual(prices2[i].timestamp.getTime());
|
||||||
|
expect(prices1[i].price).toEqual(prices2[i].price);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('✓ Array order preserved in cache');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Cache Inspection - Verify TTL Behavior', async () => {
|
||||||
|
await tap.test('should respect cache TTL for intraday data', async (testArg) => {
|
||||||
|
// Create service with very short TTL for testing
|
||||||
|
const shortTTLService = new opendata.StockPriceService({
|
||||||
|
ttl: 100, // 100ms
|
||||||
|
maxEntries: 100
|
||||||
|
});
|
||||||
|
|
||||||
|
const testProvider = new MockProvider();
|
||||||
|
shortTTLService.register(testProvider);
|
||||||
|
|
||||||
|
// First fetch
|
||||||
|
await shortTTLService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'TEST',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 5
|
||||||
|
});
|
||||||
|
|
||||||
|
const callCount1 = testProvider.callLog.length;
|
||||||
|
|
||||||
|
// Immediate second fetch - should hit cache
|
||||||
|
await shortTTLService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'TEST',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 5
|
||||||
|
});
|
||||||
|
|
||||||
|
const callCount2 = testProvider.callLog.length;
|
||||||
|
expect(callCount2).toEqual(callCount1); // No new call
|
||||||
|
|
||||||
|
// Wait for TTL to expire
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 150));
|
||||||
|
|
||||||
|
// Third fetch - should hit provider (cache expired)
|
||||||
|
await shortTTLService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'TEST',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 5
|
||||||
|
});
|
||||||
|
|
||||||
|
const callCount3 = testProvider.callLog.length;
|
||||||
|
expect(callCount3).toBeGreaterThan(callCount2); // New call made
|
||||||
|
|
||||||
|
console.log('✓ Cache TTL working correctly');
|
||||||
|
console.log(` Before expiry: ${callCount2 - callCount1} new calls`);
|
||||||
|
console.log(` After expiry: ${callCount3 - callCount2} new calls`);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Cache Inspection - Memory Efficiency', async () => {
|
||||||
|
await tap.test('should store deduplicated data in cache', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
mockProvider.callLog = [];
|
||||||
|
|
||||||
|
// Fetch data
|
||||||
|
const result1 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'AAPL',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 100
|
||||||
|
});
|
||||||
|
|
||||||
|
const prices = result1 as opendata.IStockPrice[];
|
||||||
|
|
||||||
|
// Verify no duplicate timestamps in cached data
|
||||||
|
const timestamps = prices.map(p => p.timestamp.getTime());
|
||||||
|
const uniqueTimestamps = new Set(timestamps);
|
||||||
|
|
||||||
|
expect(uniqueTimestamps.size).toEqual(timestamps.length);
|
||||||
|
|
||||||
|
console.log('✓ No duplicate timestamps in cached data');
|
||||||
|
console.log(` Records: ${prices.length}`);
|
||||||
|
console.log(` Unique timestamps: ${uniqueTimestamps.size}`);
|
||||||
|
});
|
||||||
|
|
||||||
|
await tap.test('should estimate memory usage', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
|
||||||
|
// Fetch various sizes
|
||||||
|
await stockService.getData({ type: 'intraday', ticker: 'AAPL', interval: '1min', limit: 100 });
|
||||||
|
await stockService.getData({ type: 'intraday', ticker: 'MSFT', interval: '1min', limit: 100 });
|
||||||
|
await stockService.getData({ type: 'intraday', ticker: 'GOOGL', interval: '5min', limit: 50 });
|
||||||
|
|
||||||
|
// Estimate memory (rough calculation)
|
||||||
|
// Each IStockPrice is approximately 300-400 bytes
|
||||||
|
const totalRecords = 100 + 100 + 50;
|
||||||
|
const estimatedBytes = totalRecords * 350; // Average 350 bytes per record
|
||||||
|
const estimatedKB = (estimatedBytes / 1024).toFixed(2);
|
||||||
|
|
||||||
|
console.log('✓ Cache memory estimation:');
|
||||||
|
console.log(` Total records cached: ${totalRecords}`);
|
||||||
|
console.log(` Estimated memory: ~${estimatedKB} KB`);
|
||||||
|
console.log(` Average per record: ~350 bytes`);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Cache Inspection - Edge Cases', async () => {
|
||||||
|
await tap.test('should handle empty results', async () => {
|
||||||
|
const emptyProvider = new MockProvider();
|
||||||
|
emptyProvider.fetchData = async () => [];
|
||||||
|
|
||||||
|
const emptyService = new opendata.StockPriceService();
|
||||||
|
emptyService.register(emptyProvider);
|
||||||
|
|
||||||
|
const result = await emptyService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'EMPTY',
|
||||||
|
interval: '1min'
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result).toBeArray();
|
||||||
|
expect((result as opendata.IStockPrice[]).length).toEqual(0);
|
||||||
|
|
||||||
|
// Second fetch should still hit cache (even though empty)
|
||||||
|
const result2 = await emptyService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'EMPTY',
|
||||||
|
interval: '1min'
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result2).toBeArray();
|
||||||
|
expect((result2 as opendata.IStockPrice[]).length).toEqual(0);
|
||||||
|
|
||||||
|
console.log('✓ Empty results cached correctly');
|
||||||
|
});
|
||||||
|
|
||||||
|
await tap.test('should handle single record', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
|
||||||
|
const result = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'SINGLE',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 1
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result).toBeArray();
|
||||||
|
expect((result as opendata.IStockPrice[]).length).toEqual(1);
|
||||||
|
|
||||||
|
console.log('✓ Single record cached correctly');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Cache Inspection - Verify fetchedAt Timestamps', async () => {
|
||||||
|
await tap.test('should preserve fetchedAt in cached data', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
|
||||||
|
const beforeFetch = Date.now();
|
||||||
|
const result = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'AAPL',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 5
|
||||||
|
});
|
||||||
|
const afterFetch = Date.now();
|
||||||
|
|
||||||
|
const prices = result as opendata.IStockPrice[];
|
||||||
|
|
||||||
|
for (const price of prices) {
|
||||||
|
const fetchedTime = price.fetchedAt.getTime();
|
||||||
|
expect(fetchedTime).toBeGreaterThanOrEqual(beforeFetch);
|
||||||
|
expect(fetchedTime).toBeLessThanOrEqual(afterFetch);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fetch again - fetchedAt should be the same (from cache)
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 50)); // Small delay
|
||||||
|
|
||||||
|
const result2 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'AAPL',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 5
|
||||||
|
});
|
||||||
|
|
||||||
|
const prices2 = result2 as opendata.IStockPrice[];
|
||||||
|
|
||||||
|
// Verify fetchedAt matches (same cached data)
|
||||||
|
for (let i = 0; i < prices.length; i++) {
|
||||||
|
expect(prices2[i].fetchedAt.getTime()).toEqual(prices[i].fetchedAt.getTime());
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('✓ fetchedAt timestamps preserved in cache');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
582
test/test.incremental-cache.node+bun+deno.ts
Normal file
582
test/test.incremental-cache.node+bun+deno.ts
Normal file
@@ -0,0 +1,582 @@
|
|||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as opendata from '../ts/index.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Mock provider for testing incremental cache behavior
|
||||||
|
* Allows precise control over what data is returned to test cache logic
|
||||||
|
*/
|
||||||
|
class MockIntradayProvider implements opendata.IStockProvider {
|
||||||
|
name = 'MockIntraday';
|
||||||
|
priority = 100;
|
||||||
|
requiresAuth = false;
|
||||||
|
|
||||||
|
// Track fetch calls for testing
|
||||||
|
public fetchCallCount = 0;
|
||||||
|
public lastRequest: opendata.IStockDataRequest | null = null;
|
||||||
|
|
||||||
|
// Mock data to return
|
||||||
|
private mockData: opendata.IStockPrice[] = [];
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Set the mock data that will be returned on next fetch
|
||||||
|
*/
|
||||||
|
public setMockData(data: opendata.IStockPrice[]): void {
|
||||||
|
this.mockData = data;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reset fetch tracking
|
||||||
|
*/
|
||||||
|
public resetTracking(): void {
|
||||||
|
this.fetchCallCount = 0;
|
||||||
|
this.lastRequest = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async fetchData(request: opendata.IStockDataRequest): Promise<opendata.IStockPrice | opendata.IStockPrice[]> {
|
||||||
|
this.fetchCallCount++;
|
||||||
|
this.lastRequest = request;
|
||||||
|
|
||||||
|
// For intraday requests, return filtered data based on date
|
||||||
|
if (request.type === 'intraday') {
|
||||||
|
let filteredData = [...this.mockData];
|
||||||
|
|
||||||
|
// Filter by date if specified (simulate incremental fetch)
|
||||||
|
if (request.date) {
|
||||||
|
filteredData = filteredData.filter(p => p.timestamp > request.date!);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply limit
|
||||||
|
if (request.limit) {
|
||||||
|
filteredData = filteredData.slice(-request.limit);
|
||||||
|
}
|
||||||
|
|
||||||
|
return filteredData;
|
||||||
|
}
|
||||||
|
|
||||||
|
// For other requests, return first item or empty array
|
||||||
|
if (this.mockData.length > 0) {
|
||||||
|
return this.mockData[0];
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error('No mock data available');
|
||||||
|
}
|
||||||
|
|
||||||
|
async isAvailable(): Promise<boolean> {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Helper to generate mock intraday prices
|
||||||
|
*/
|
||||||
|
function generateMockIntradayPrices(
|
||||||
|
ticker: string,
|
||||||
|
count: number,
|
||||||
|
startTime: Date,
|
||||||
|
intervalMinutes: number = 1
|
||||||
|
): opendata.IStockPrice[] {
|
||||||
|
const prices: opendata.IStockPrice[] = [];
|
||||||
|
let basePrice = 100;
|
||||||
|
|
||||||
|
for (let i = 0; i < count; i++) {
|
||||||
|
const timestamp = new Date(startTime.getTime() + i * intervalMinutes * 60 * 1000);
|
||||||
|
basePrice += (Math.random() - 0.5) * 2; // Random walk
|
||||||
|
|
||||||
|
prices.push({
|
||||||
|
ticker,
|
||||||
|
price: basePrice,
|
||||||
|
currency: 'USD',
|
||||||
|
timestamp,
|
||||||
|
fetchedAt: new Date(),
|
||||||
|
provider: 'MockIntraday',
|
||||||
|
dataType: 'intraday',
|
||||||
|
marketState: 'REGULAR',
|
||||||
|
open: basePrice - 0.5,
|
||||||
|
high: basePrice + 1,
|
||||||
|
low: basePrice - 1,
|
||||||
|
volume: 1000000,
|
||||||
|
change: 0,
|
||||||
|
changePercent: 0,
|
||||||
|
previousClose: basePrice
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return prices;
|
||||||
|
}
|
||||||
|
|
||||||
|
let stockService: opendata.StockPriceService;
|
||||||
|
let mockProvider: MockIntradayProvider;
|
||||||
|
|
||||||
|
tap.test('Incremental Cache Setup', async () => {
|
||||||
|
await tap.test('should create StockPriceService and MockProvider', async () => {
|
||||||
|
stockService = new opendata.StockPriceService({
|
||||||
|
ttl: 60000, // 1 minute default (will be overridden by smart TTL)
|
||||||
|
maxEntries: 1000
|
||||||
|
});
|
||||||
|
expect(stockService).toBeInstanceOf(opendata.StockPriceService);
|
||||||
|
|
||||||
|
mockProvider = new MockIntradayProvider();
|
||||||
|
stockService.register(mockProvider);
|
||||||
|
|
||||||
|
const providers = stockService.getEnabledProviders();
|
||||||
|
expect(providers).toContainEqual(mockProvider);
|
||||||
|
|
||||||
|
console.log('✓ Test setup complete');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Incremental Cache - Basic Behavior', async () => {
|
||||||
|
await tap.test('should cache intraday data on first fetch', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
|
||||||
|
const startTime = new Date('2025-01-07T09:30:00.000Z');
|
||||||
|
const mockData = generateMockIntradayPrices('AAPL', 10, startTime, 1);
|
||||||
|
mockProvider.setMockData(mockData);
|
||||||
|
|
||||||
|
// First fetch - should hit provider
|
||||||
|
const result1 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'AAPL',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 10
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result1).toBeArray();
|
||||||
|
expect((result1 as opendata.IStockPrice[]).length).toEqual(10);
|
||||||
|
expect(mockProvider.fetchCallCount).toEqual(1);
|
||||||
|
|
||||||
|
console.log('✓ First fetch cached 10 records');
|
||||||
|
});
|
||||||
|
|
||||||
|
await tap.test('should serve from cache on second identical request', async () => {
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
|
||||||
|
// Second fetch - should hit cache (no provider call)
|
||||||
|
const result2 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'AAPL',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 10
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result2).toBeArray();
|
||||||
|
expect((result2 as opendata.IStockPrice[]).length).toEqual(10);
|
||||||
|
expect(mockProvider.fetchCallCount).toEqual(0); // Should NOT call provider
|
||||||
|
|
||||||
|
console.log('✓ Second fetch served from cache (0 provider calls)');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Incremental Cache - Incremental Fetch', async () => {
|
||||||
|
await tap.test('should only fetch NEW data on refresh', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
|
||||||
|
const startTime = new Date('2025-01-07T09:30:00.000Z');
|
||||||
|
|
||||||
|
// First fetch: 10 records from 9:30-9:39
|
||||||
|
const mockData1 = generateMockIntradayPrices('MSFT', 10, startTime, 1);
|
||||||
|
mockProvider.setMockData(mockData1);
|
||||||
|
|
||||||
|
const result1 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'MSFT',
|
||||||
|
interval: '1min'
|
||||||
|
});
|
||||||
|
|
||||||
|
expect((result1 as opendata.IStockPrice[]).length).toEqual(10);
|
||||||
|
expect(mockProvider.fetchCallCount).toEqual(1);
|
||||||
|
|
||||||
|
const latestTimestamp1 = (result1 as opendata.IStockPrice[])[9].timestamp;
|
||||||
|
console.log(`✓ First fetch: 10 records, latest timestamp: ${latestTimestamp1.toISOString()}`);
|
||||||
|
|
||||||
|
// Simulate 5 minutes passing - 5 new records available
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
const mockData2 = generateMockIntradayPrices('MSFT', 15, startTime, 1); // 15 total (10 old + 5 new)
|
||||||
|
mockProvider.setMockData(mockData2);
|
||||||
|
|
||||||
|
// Second fetch - should detect cache and only fetch NEW data
|
||||||
|
const result2 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'MSFT',
|
||||||
|
interval: '1min'
|
||||||
|
});
|
||||||
|
|
||||||
|
expect((result2 as opendata.IStockPrice[]).length).toEqual(15);
|
||||||
|
expect(mockProvider.fetchCallCount).toEqual(1); // Should call provider
|
||||||
|
|
||||||
|
// Verify the request had a date filter (incremental fetch)
|
||||||
|
expect(mockProvider.lastRequest).not.toEqual(null);
|
||||||
|
expect(mockProvider.lastRequest!.type).toEqual('intraday');
|
||||||
|
expect((mockProvider.lastRequest as opendata.IStockIntradayRequest).date).not.toEqual(undefined);
|
||||||
|
|
||||||
|
const requestDate = (mockProvider.lastRequest as opendata.IStockIntradayRequest).date;
|
||||||
|
console.log(`✓ Incremental fetch requested data since: ${requestDate!.toISOString()}`);
|
||||||
|
console.log(`✓ Total records after merge: ${(result2 as opendata.IStockPrice[]).length}`);
|
||||||
|
console.log('✓ Only fetched NEW data (incremental fetch working)');
|
||||||
|
});
|
||||||
|
|
||||||
|
await tap.test('should return cached data when no new records available', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
|
||||||
|
const startTime = new Date('2025-01-07T09:30:00.000Z');
|
||||||
|
const mockData = generateMockIntradayPrices('GOOGL', 10, startTime, 1);
|
||||||
|
mockProvider.setMockData(mockData);
|
||||||
|
|
||||||
|
// First fetch
|
||||||
|
const result1 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'GOOGL',
|
||||||
|
interval: '1min'
|
||||||
|
});
|
||||||
|
|
||||||
|
expect((result1 as opendata.IStockPrice[]).length).toEqual(10);
|
||||||
|
|
||||||
|
// Second fetch - same data (no new records)
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
mockProvider.setMockData(mockData); // Same data
|
||||||
|
|
||||||
|
const result2 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'GOOGL',
|
||||||
|
interval: '1min'
|
||||||
|
});
|
||||||
|
|
||||||
|
expect((result2 as opendata.IStockPrice[]).length).toEqual(10);
|
||||||
|
expect(mockProvider.fetchCallCount).toEqual(1); // Incremental fetch attempted
|
||||||
|
|
||||||
|
console.log('✓ No new records - returned cached data');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Incremental Cache - Deduplication', async () => {
|
||||||
|
await tap.test('should deduplicate by timestamp in merged data', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
|
||||||
|
const startTime = new Date('2025-01-07T09:30:00.000Z');
|
||||||
|
|
||||||
|
// First fetch: 10 records
|
||||||
|
const mockData1 = generateMockIntradayPrices('TSLA', 10, startTime, 1);
|
||||||
|
mockProvider.setMockData(mockData1);
|
||||||
|
|
||||||
|
const result1 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'TSLA',
|
||||||
|
interval: '1min'
|
||||||
|
});
|
||||||
|
|
||||||
|
expect((result1 as opendata.IStockPrice[]).length).toEqual(10);
|
||||||
|
|
||||||
|
// Second fetch: Return overlapping data (last 5 old + 5 new)
|
||||||
|
// This simulates provider returning some duplicate timestamps
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
const mockData2 = generateMockIntradayPrices('TSLA', 15, startTime, 1);
|
||||||
|
mockProvider.setMockData(mockData2);
|
||||||
|
|
||||||
|
const result2 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'TSLA',
|
||||||
|
interval: '1min'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Should have 15 unique timestamps (deduplication worked)
|
||||||
|
expect((result2 as opendata.IStockPrice[]).length).toEqual(15);
|
||||||
|
|
||||||
|
// Verify timestamps are unique
|
||||||
|
const timestamps = (result2 as opendata.IStockPrice[]).map(p => p.timestamp.getTime());
|
||||||
|
const uniqueTimestamps = new Set(timestamps);
|
||||||
|
expect(uniqueTimestamps.size).toEqual(15);
|
||||||
|
|
||||||
|
console.log('✓ Deduplication working - 15 unique timestamps');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Incremental Cache - Limit Handling', async () => {
|
||||||
|
await tap.test('should respect limit parameter in merged results', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
|
||||||
|
const startTime = new Date('2025-01-07T09:30:00.000Z');
|
||||||
|
|
||||||
|
// First fetch with limit 100
|
||||||
|
const mockData1 = generateMockIntradayPrices('AMZN', 100, startTime, 1);
|
||||||
|
mockProvider.setMockData(mockData1);
|
||||||
|
|
||||||
|
const result1 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'AMZN',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 100
|
||||||
|
});
|
||||||
|
|
||||||
|
expect((result1 as opendata.IStockPrice[]).length).toEqual(100);
|
||||||
|
|
||||||
|
// Second fetch: 10 new records available
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
const mockData2 = generateMockIntradayPrices('AMZN', 110, startTime, 1);
|
||||||
|
mockProvider.setMockData(mockData2);
|
||||||
|
|
||||||
|
const result2 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'AMZN',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 100 // Same limit
|
||||||
|
});
|
||||||
|
|
||||||
|
// Should still return 100 (most recent 100 after merge)
|
||||||
|
expect((result2 as opendata.IStockPrice[]).length).toEqual(100);
|
||||||
|
|
||||||
|
// Verify we got the most RECENT 100 (should include new data)
|
||||||
|
const lastTimestamp = (result2 as opendata.IStockPrice[])[99].timestamp;
|
||||||
|
const expectedLastTimestamp = mockData2[109].timestamp;
|
||||||
|
expect(lastTimestamp.getTime()).toEqual(expectedLastTimestamp.getTime());
|
||||||
|
|
||||||
|
console.log('✓ Limit respected - returned most recent 100 records');
|
||||||
|
});
|
||||||
|
|
||||||
|
await tap.test('should handle different limits without cache collision', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
|
||||||
|
const startTime = new Date('2025-01-07T09:30:00.000Z');
|
||||||
|
const mockData = generateMockIntradayPrices('NVDA', 1000, startTime, 1);
|
||||||
|
mockProvider.setMockData(mockData);
|
||||||
|
|
||||||
|
// Fetch with limit 100
|
||||||
|
const result1 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'NVDA',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 100
|
||||||
|
});
|
||||||
|
expect((result1 as opendata.IStockPrice[]).length).toEqual(100);
|
||||||
|
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
|
||||||
|
// Fetch with limit 500 (should NOT use cached limit:100 data)
|
||||||
|
const result2 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'NVDA',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 500
|
||||||
|
});
|
||||||
|
expect((result2 as opendata.IStockPrice[]).length).toEqual(500);
|
||||||
|
|
||||||
|
// Should have made a new provider call (different cache key)
|
||||||
|
expect(mockProvider.fetchCallCount).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
console.log('✓ Different limits use different cache keys');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Incremental Cache - Dashboard Polling Scenario', async () => {
|
||||||
|
await tap.test('should efficiently handle repeated polling requests', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
|
||||||
|
const startTime = new Date('2025-01-07T09:30:00.000Z');
|
||||||
|
let currentDataSize = 100;
|
||||||
|
|
||||||
|
// Initial fetch: 100 records
|
||||||
|
let mockData = generateMockIntradayPrices('AAPL', currentDataSize, startTime, 1);
|
||||||
|
mockProvider.setMockData(mockData);
|
||||||
|
|
||||||
|
const result1 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'AAPL',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 1000
|
||||||
|
});
|
||||||
|
|
||||||
|
expect((result1 as opendata.IStockPrice[]).length).toEqual(100);
|
||||||
|
const initialFetchCount = mockProvider.fetchCallCount;
|
||||||
|
|
||||||
|
console.log(`✓ Initial fetch: ${(result1 as opendata.IStockPrice[]).length} records (${initialFetchCount} API calls)`);
|
||||||
|
|
||||||
|
// Simulate 5 dashboard refreshes (1 new record each time)
|
||||||
|
let totalNewRecords = 0;
|
||||||
|
for (let i = 0; i < 5; i++) {
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
currentDataSize += 1; // 1 new record
|
||||||
|
totalNewRecords += 1;
|
||||||
|
|
||||||
|
mockData = generateMockIntradayPrices('AAPL', currentDataSize, startTime, 1);
|
||||||
|
mockProvider.setMockData(mockData);
|
||||||
|
|
||||||
|
const result = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'AAPL',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 1000
|
||||||
|
});
|
||||||
|
|
||||||
|
expect((result as opendata.IStockPrice[]).length).toEqual(currentDataSize);
|
||||||
|
expect(mockProvider.fetchCallCount).toEqual(1); // Incremental fetch
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`✓ Dashboard polling: 5 refreshes with ${totalNewRecords} new records`);
|
||||||
|
console.log('✓ Each refresh only fetched NEW data (incremental cache working)');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Incremental Cache - Memory Impact', async () => {
|
||||||
|
await tap.test('should demonstrate memory savings from deduplication', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
|
||||||
|
const startTime = new Date('2025-01-07T09:30:00.000Z');
|
||||||
|
|
||||||
|
// Create data with intentional duplicates
|
||||||
|
const baseData = generateMockIntradayPrices('MSFT', 1000, startTime, 1);
|
||||||
|
const duplicatedData = [...baseData, ...baseData.slice(-100)]; // Duplicate last 100
|
||||||
|
|
||||||
|
expect(duplicatedData.length).toEqual(1100); // Before deduplication
|
||||||
|
|
||||||
|
mockProvider.setMockData(duplicatedData);
|
||||||
|
|
||||||
|
const result = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'MSFT',
|
||||||
|
interval: '1min'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Should have 1000 unique records (100 duplicates removed)
|
||||||
|
expect((result as opendata.IStockPrice[]).length).toEqual(1000);
|
||||||
|
|
||||||
|
console.log('✓ Deduplication removed 100 duplicate timestamps');
|
||||||
|
console.log(`✓ Memory saved: ~${Math.round((100 / 1100) * 100)}%`);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Incremental Cache - Fallback Behavior', async () => {
|
||||||
|
await tap.test('should not use incremental fetch for requests with date filter', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
|
||||||
|
const startTime = new Date('2025-01-07T09:30:00.000Z');
|
||||||
|
const mockData = generateMockIntradayPrices('GOOGL', 100, startTime, 1);
|
||||||
|
mockProvider.setMockData(mockData);
|
||||||
|
|
||||||
|
// First fetch without date
|
||||||
|
await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'GOOGL',
|
||||||
|
interval: '1min'
|
||||||
|
});
|
||||||
|
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
|
||||||
|
// Second fetch WITH date filter - should NOT use incremental cache
|
||||||
|
const result = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'GOOGL',
|
||||||
|
interval: '1min',
|
||||||
|
date: new Date('2025-01-07T10:00:00.000Z') // Explicit date filter
|
||||||
|
});
|
||||||
|
|
||||||
|
// Should have made normal fetch (not incremental)
|
||||||
|
expect(mockProvider.fetchCallCount).toEqual(1);
|
||||||
|
expect((mockProvider.lastRequest as opendata.IStockIntradayRequest).date).not.toEqual(undefined);
|
||||||
|
|
||||||
|
console.log('✓ Incremental cache skipped for requests with explicit date filter');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Incremental Cache - Performance Benchmark', async () => {
|
||||||
|
await tap.test('should demonstrate API call reduction', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
|
||||||
|
const startTime = new Date('2025-01-07T09:30:00.000Z');
|
||||||
|
|
||||||
|
// Initial dataset: 1000 records
|
||||||
|
let mockData = generateMockIntradayPrices('BENCHMARK', 1000, startTime, 1);
|
||||||
|
mockProvider.setMockData(mockData);
|
||||||
|
|
||||||
|
// Initial fetch
|
||||||
|
await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'BENCHMARK',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 1000
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(mockProvider.fetchCallCount).toEqual(1);
|
||||||
|
console.log('✓ Initial fetch: 1000 records');
|
||||||
|
|
||||||
|
let totalProviderCalls = 1;
|
||||||
|
let totalNewRecords = 0;
|
||||||
|
|
||||||
|
// Simulate 10 refreshes (5 new records each)
|
||||||
|
for (let i = 0; i < 10; i++) {
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
|
||||||
|
// Add 5 new records
|
||||||
|
const newCount = 5;
|
||||||
|
mockData = generateMockIntradayPrices('BENCHMARK', 1000 + totalNewRecords + newCount, startTime, 1);
|
||||||
|
mockProvider.setMockData(mockData);
|
||||||
|
|
||||||
|
await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'BENCHMARK',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 1000
|
||||||
|
});
|
||||||
|
|
||||||
|
totalProviderCalls += mockProvider.fetchCallCount;
|
||||||
|
totalNewRecords += newCount;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('\n📊 Performance Benchmark:');
|
||||||
|
console.log(` Total refreshes: 10`);
|
||||||
|
console.log(` New records fetched: ${totalNewRecords}`);
|
||||||
|
console.log(` Total provider calls: ${totalProviderCalls}`);
|
||||||
|
console.log(` Without incremental cache: ${11} calls (1 initial + 10 full refreshes)`);
|
||||||
|
console.log(` With incremental cache: ${totalProviderCalls} calls (1 initial + 10 incremental)`);
|
||||||
|
console.log(` Data transfer reduction: ~${Math.round((1 - (totalNewRecords / (10 * 1000))) * 100)}%`);
|
||||||
|
console.log(' (Only fetched NEW data instead of refetching all 1000 records each time)');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Incremental Cache - Timestamp Ordering', async () => {
|
||||||
|
await tap.test('should maintain timestamp order after merge', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
|
||||||
|
const startTime = new Date('2025-01-07T09:30:00.000Z');
|
||||||
|
|
||||||
|
// First fetch
|
||||||
|
const mockData1 = generateMockIntradayPrices('TSLA', 10, startTime, 1);
|
||||||
|
mockProvider.setMockData(mockData1);
|
||||||
|
|
||||||
|
await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'TSLA',
|
||||||
|
interval: '1min'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Second fetch with new data
|
||||||
|
mockProvider.resetTracking();
|
||||||
|
const mockData2 = generateMockIntradayPrices('TSLA', 15, startTime, 1);
|
||||||
|
mockProvider.setMockData(mockData2);
|
||||||
|
|
||||||
|
const result = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'TSLA',
|
||||||
|
interval: '1min'
|
||||||
|
});
|
||||||
|
|
||||||
|
// Verify ascending timestamp order
|
||||||
|
const timestamps = (result as opendata.IStockPrice[]).map(p => p.timestamp.getTime());
|
||||||
|
for (let i = 1; i < timestamps.length; i++) {
|
||||||
|
expect(timestamps[i]).toBeGreaterThan(timestamps[i - 1]);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log('✓ Timestamps correctly ordered (ascending)');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
365
test/test.stale-data-fix.node+bun+deno.ts
Normal file
365
test/test.stale-data-fix.node+bun+deno.ts
Normal file
@@ -0,0 +1,365 @@
|
|||||||
|
import { expect, tap } from '@git.zone/tstest/tapbundle';
|
||||||
|
import * as opendata from '../ts/index.js';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Test to verify we NEVER return stale intraday data
|
||||||
|
* Even when cache hasn't expired, we should check for new data
|
||||||
|
*/
|
||||||
|
|
||||||
|
class MockIntradayProvider implements opendata.IStockProvider {
|
||||||
|
name = 'MockIntradayProvider';
|
||||||
|
priority = 100;
|
||||||
|
requiresAuth = false;
|
||||||
|
|
||||||
|
public fetchCount = 0;
|
||||||
|
public lastRequestDate: Date | undefined;
|
||||||
|
private currentDataCount = 10; // Start with 10 records
|
||||||
|
private baseTime = new Date('2025-01-07T09:30:00.000Z');
|
||||||
|
|
||||||
|
async fetchData(request: opendata.IStockDataRequest): Promise<opendata.IStockPrice | opendata.IStockPrice[]> {
|
||||||
|
this.fetchCount++;
|
||||||
|
|
||||||
|
if (request.type === 'intraday') {
|
||||||
|
this.lastRequestDate = request.date;
|
||||||
|
|
||||||
|
const startTime = request.date || this.baseTime;
|
||||||
|
const prices: opendata.IStockPrice[] = [];
|
||||||
|
|
||||||
|
// Simulate provider returning data AFTER the requested date
|
||||||
|
for (let i = 0; i < this.currentDataCount; i++) {
|
||||||
|
const timestamp = new Date(startTime.getTime() + i * 60 * 1000);
|
||||||
|
|
||||||
|
// Only return data AFTER request date if date filter is present
|
||||||
|
if (request.date && timestamp <= request.date) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
prices.push({
|
||||||
|
ticker: request.ticker,
|
||||||
|
price: 100 + i,
|
||||||
|
currency: 'USD',
|
||||||
|
timestamp,
|
||||||
|
fetchedAt: new Date(),
|
||||||
|
provider: this.name,
|
||||||
|
dataType: 'intraday',
|
||||||
|
marketState: 'REGULAR',
|
||||||
|
open: 100,
|
||||||
|
high: 101,
|
||||||
|
low: 99,
|
||||||
|
volume: 1000000,
|
||||||
|
change: 0,
|
||||||
|
changePercent: 0,
|
||||||
|
previousClose: 100
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return prices;
|
||||||
|
}
|
||||||
|
|
||||||
|
throw new Error('Only intraday supported in this mock');
|
||||||
|
}
|
||||||
|
|
||||||
|
async isAvailable(): Promise<boolean> {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
public addNewRecords(count: number): void {
|
||||||
|
this.currentDataCount += count;
|
||||||
|
}
|
||||||
|
|
||||||
|
public advanceTime(minutes: number): void {
|
||||||
|
this.baseTime = new Date(this.baseTime.getTime() + minutes * 60 * 1000);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let stockService: opendata.StockPriceService;
|
||||||
|
let mockProvider: MockIntradayProvider;
|
||||||
|
|
||||||
|
tap.test('Stale Data Fix - Setup', async () => {
|
||||||
|
// Use LONG TTL so cache doesn't expire during test
|
||||||
|
stockService = new opendata.StockPriceService({
|
||||||
|
ttl: 300000, // 5 minutes
|
||||||
|
maxEntries: 1000
|
||||||
|
});
|
||||||
|
|
||||||
|
mockProvider = new MockIntradayProvider();
|
||||||
|
stockService.register(mockProvider);
|
||||||
|
|
||||||
|
console.log('✓ Service initialized with 5-minute cache TTL');
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Stale Data Fix - Check for New Data Even When Cache Valid', async () => {
|
||||||
|
await tap.test('should return cached data if less than 1 minute old (freshness check)', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
mockProvider.fetchCount = 0;
|
||||||
|
mockProvider.currentDataCount = 10;
|
||||||
|
|
||||||
|
console.log('\n📊 Scenario: Request twice within 1 minute\n');
|
||||||
|
|
||||||
|
// First request - fetch 10 records
|
||||||
|
console.log('⏰ First request (initial fetch)');
|
||||||
|
const result1 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'AAPL',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 1000
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result1).toBeArray();
|
||||||
|
expect((result1 as opendata.IStockPrice[]).length).toEqual(10);
|
||||||
|
expect(mockProvider.fetchCount).toEqual(1);
|
||||||
|
|
||||||
|
const latestTimestamp1 = (result1 as opendata.IStockPrice[])[9].timestamp;
|
||||||
|
console.log(` ✓ Fetched 10 records, latest: ${latestTimestamp1.toISOString()}`);
|
||||||
|
|
||||||
|
// Second request immediately - should return cache (data < 1min old)
|
||||||
|
console.log('\n⏰ Second request (< 1 minute later)');
|
||||||
|
mockProvider.fetchCount = 0;
|
||||||
|
mockProvider.addNewRecords(10); // New data available, but won't fetch yet
|
||||||
|
|
||||||
|
const result2 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'AAPL',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 1000
|
||||||
|
});
|
||||||
|
|
||||||
|
// Should return cached data (freshness check prevents fetch)
|
||||||
|
expect((result2 as opendata.IStockPrice[]).length).toEqual(10);
|
||||||
|
expect(mockProvider.fetchCount).toEqual(0); // No provider call
|
||||||
|
|
||||||
|
console.log(` ✓ Returned cached 10 records (no provider call)`);
|
||||||
|
console.log(` ✓ Freshness check: Data < 1min old, no fetch needed`);
|
||||||
|
});
|
||||||
|
|
||||||
|
await tap.test('should fetch NEW data when cache is > 1 minute old', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
mockProvider.fetchCount = 0;
|
||||||
|
mockProvider.currentDataCount = 10;
|
||||||
|
|
||||||
|
console.log('\n📊 Scenario: Request after 2 minutes (data > 1min old)\n');
|
||||||
|
|
||||||
|
// First request - fetch 10 records at 9:30am
|
||||||
|
console.log('⏰ 9:30:00 - First request (initial fetch)');
|
||||||
|
const result1 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'MSFT',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 1000
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result1).toBeArray();
|
||||||
|
expect((result1 as opendata.IStockPrice[]).length).toEqual(10);
|
||||||
|
const latestTimestamp1 = (result1 as opendata.IStockPrice[])[9].timestamp;
|
||||||
|
console.log(` ✓ Fetched 10 records, latest: ${latestTimestamp1.toISOString()}`);
|
||||||
|
|
||||||
|
// Advance time by 2 minutes - now data is > 1 minute old
|
||||||
|
console.log('\n⏰ 9:32:00 - Second request (2 minutes later, data > 1min old)');
|
||||||
|
console.log(' 📝 Advancing provider time by 2 minutes...');
|
||||||
|
|
||||||
|
mockProvider.fetchCount = 0;
|
||||||
|
mockProvider.advanceTime(2); // Advance 2 minutes
|
||||||
|
mockProvider.addNewRecords(10); // Now provider has 20 records total
|
||||||
|
|
||||||
|
const result2 = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'MSFT',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 1000
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(result2).toBeArray();
|
||||||
|
const prices2 = result2 as opendata.IStockPrice[];
|
||||||
|
|
||||||
|
// Should have 20 records (10 cached + 10 new)
|
||||||
|
expect(prices2.length).toEqual(20);
|
||||||
|
|
||||||
|
// Should have made a provider call (data was stale)
|
||||||
|
expect(mockProvider.fetchCount).toBeGreaterThan(0);
|
||||||
|
|
||||||
|
const latestTimestamp2 = prices2[prices2.length - 1].timestamp;
|
||||||
|
console.log(` ✓ Now have ${prices2.length} records, latest: ${latestTimestamp2.toISOString()}`);
|
||||||
|
console.log(` ✓ Provider calls: ${mockProvider.fetchCount} (fetched new data)`);
|
||||||
|
console.log(` ✓ Data was > 1min old, incremental fetch triggered!`);
|
||||||
|
|
||||||
|
// Verify we got NEW data
|
||||||
|
expect(latestTimestamp2.getTime()).toBeGreaterThan(latestTimestamp1.getTime());
|
||||||
|
|
||||||
|
console.log('\n✅ SUCCESS: Fetched new data when cache was stale!');
|
||||||
|
});
|
||||||
|
|
||||||
|
await tap.test('should handle polling with > 1 minute intervals efficiently', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
mockProvider.fetchCount = 0;
|
||||||
|
mockProvider.currentDataCount = 100;
|
||||||
|
|
||||||
|
console.log('\n📊 Scenario: Dashboard polling every 2 minutes\n');
|
||||||
|
|
||||||
|
// Initial request at 9:30am
|
||||||
|
console.log('⏰ 9:30:00 - Request 1 (initial fetch)');
|
||||||
|
await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'GOOGL',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 1000
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(mockProvider.fetchCount).toEqual(1);
|
||||||
|
console.log(` ✓ Fetched 100 records (provider calls: 1)`);
|
||||||
|
|
||||||
|
let totalProviderCalls = 1;
|
||||||
|
let totalNewRecords = 0;
|
||||||
|
|
||||||
|
// Simulate 3 polling refreshes (2 minutes apart, 5 new records each)
|
||||||
|
for (let i = 2; i <= 4; i++) {
|
||||||
|
mockProvider.fetchCount = 0;
|
||||||
|
mockProvider.advanceTime(2); // Advance 2 minutes (triggers freshness check)
|
||||||
|
mockProvider.addNewRecords(5);
|
||||||
|
totalNewRecords += 5;
|
||||||
|
|
||||||
|
const minutes = (i - 1) * 2;
|
||||||
|
console.log(`\n⏰ 9:${30 + minutes}:00 - Request ${i} (${minutes} minutes later, +5 new records)`);
|
||||||
|
|
||||||
|
const result = await stockService.getData({
|
||||||
|
type: 'intraday',
|
||||||
|
ticker: 'GOOGL',
|
||||||
|
interval: '1min',
|
||||||
|
limit: 1000
|
||||||
|
});
|
||||||
|
|
||||||
|
const expectedTotal = 100 + totalNewRecords;
|
||||||
|
expect((result as opendata.IStockPrice[]).length).toEqual(expectedTotal);
|
||||||
|
|
||||||
|
// Should have made exactly 1 provider call (incremental fetch)
|
||||||
|
expect(mockProvider.fetchCount).toEqual(1);
|
||||||
|
totalProviderCalls++;
|
||||||
|
|
||||||
|
console.log(` ✓ Now have ${expectedTotal} records (incremental fetch: 1 call)`);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`\n📊 Summary:`);
|
||||||
|
console.log(` Total requests: 4`);
|
||||||
|
console.log(` Total provider calls: ${totalProviderCalls}`);
|
||||||
|
console.log(` New records fetched: ${totalNewRecords}`);
|
||||||
|
console.log(` Without incremental cache: Would fetch 100 records × 3 refreshes = 300 records`);
|
||||||
|
console.log(` With incremental cache: Only fetched ${totalNewRecords} new records`);
|
||||||
|
console.log(` Data transfer reduction: ${Math.round((1 - (totalNewRecords / 300)) * 100)}%`);
|
||||||
|
console.log('\n✅ SUCCESS: Only fetched NEW data on each refresh!');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
tap.test('Stale Data Fix - Verify No Regression for Other Request Types', async () => {
|
||||||
|
await tap.test('historical requests should still use simple cache', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
|
||||||
|
// Mock provider that counts calls
|
||||||
|
let historicalCallCount = 0;
|
||||||
|
const historicalProvider: opendata.IStockProvider = {
|
||||||
|
name: 'HistoricalMock',
|
||||||
|
priority: 100,
|
||||||
|
requiresAuth: false,
|
||||||
|
async fetchData() {
|
||||||
|
historicalCallCount++;
|
||||||
|
return [{
|
||||||
|
ticker: 'TEST',
|
||||||
|
price: 100,
|
||||||
|
currency: 'USD',
|
||||||
|
timestamp: new Date('2025-01-01'),
|
||||||
|
fetchedAt: new Date(),
|
||||||
|
provider: 'HistoricalMock',
|
||||||
|
dataType: 'eod',
|
||||||
|
marketState: 'CLOSED',
|
||||||
|
open: 99,
|
||||||
|
high: 101,
|
||||||
|
low: 98,
|
||||||
|
volume: 1000000,
|
||||||
|
change: 1,
|
||||||
|
changePercent: 1,
|
||||||
|
previousClose: 99
|
||||||
|
}];
|
||||||
|
},
|
||||||
|
async isAvailable() { return true; }
|
||||||
|
};
|
||||||
|
|
||||||
|
const testService = new opendata.StockPriceService({ ttl: 60000 });
|
||||||
|
testService.register(historicalProvider);
|
||||||
|
|
||||||
|
// First request
|
||||||
|
await testService.getData({
|
||||||
|
type: 'historical',
|
||||||
|
ticker: 'TEST',
|
||||||
|
from: new Date('2025-01-01'),
|
||||||
|
to: new Date('2025-01-31')
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(historicalCallCount).toEqual(1);
|
||||||
|
|
||||||
|
// Second request - should use cache (not incremental fetch)
|
||||||
|
await testService.getData({
|
||||||
|
type: 'historical',
|
||||||
|
ticker: 'TEST',
|
||||||
|
from: new Date('2025-01-01'),
|
||||||
|
to: new Date('2025-01-31')
|
||||||
|
});
|
||||||
|
|
||||||
|
// Should still be 1 (used cache)
|
||||||
|
expect(historicalCallCount).toEqual(1);
|
||||||
|
|
||||||
|
console.log('✓ Historical requests use simple cache (no incremental fetch)');
|
||||||
|
});
|
||||||
|
|
||||||
|
await tap.test('current price requests should still use simple cache', async () => {
|
||||||
|
stockService.clearCache();
|
||||||
|
|
||||||
|
let currentCallCount = 0;
|
||||||
|
const currentProvider: opendata.IStockProvider = {
|
||||||
|
name: 'CurrentMock',
|
||||||
|
priority: 100,
|
||||||
|
requiresAuth: false,
|
||||||
|
async fetchData() {
|
||||||
|
currentCallCount++;
|
||||||
|
return {
|
||||||
|
ticker: 'TEST',
|
||||||
|
price: 150,
|
||||||
|
currency: 'USD',
|
||||||
|
timestamp: new Date(),
|
||||||
|
fetchedAt: new Date(),
|
||||||
|
provider: 'CurrentMock',
|
||||||
|
dataType: 'eod',
|
||||||
|
marketState: 'CLOSED',
|
||||||
|
open: 149,
|
||||||
|
high: 151,
|
||||||
|
low: 148,
|
||||||
|
volume: 5000000,
|
||||||
|
change: 1,
|
||||||
|
changePercent: 0.67,
|
||||||
|
previousClose: 149
|
||||||
|
};
|
||||||
|
},
|
||||||
|
async isAvailable() { return true; }
|
||||||
|
};
|
||||||
|
|
||||||
|
const testService = new opendata.StockPriceService({ ttl: 60000 });
|
||||||
|
testService.register(currentProvider);
|
||||||
|
|
||||||
|
// First request
|
||||||
|
await testService.getData({
|
||||||
|
type: 'current',
|
||||||
|
ticker: 'TEST'
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(currentCallCount).toEqual(1);
|
||||||
|
|
||||||
|
// Second request - should use cache
|
||||||
|
await testService.getData({
|
||||||
|
type: 'current',
|
||||||
|
ticker: 'TEST'
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(currentCallCount).toEqual(1);
|
||||||
|
|
||||||
|
console.log('✓ Current price requests use simple cache');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
export default tap.start();
|
||||||
@@ -3,6 +3,6 @@
|
|||||||
*/
|
*/
|
||||||
export const commitinfo = {
|
export const commitinfo = {
|
||||||
name: '@fin.cx/opendata',
|
name: '@fin.cx/opendata',
|
||||||
version: '3.4.0',
|
version: '3.5.0',
|
||||||
description: 'A comprehensive TypeScript library for accessing business data and real-time financial information. Features include German company data management with MongoDB integration, JSONL bulk processing, automated Handelsregister interactions, and real-time stock market data from multiple providers.'
|
description: 'A comprehensive TypeScript library for accessing business data and real-time financial information. Features include German company data management with MongoDB integration, JSONL bulk processing, automated Handelsregister interactions, and real-time stock market data from multiple providers.'
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -156,11 +156,22 @@ export class StockPriceService implements IProviderRegistry {
|
|||||||
*/
|
*/
|
||||||
public async getData(request: IStockDataRequest): Promise<IStockPrice | IStockPrice[]> {
|
public async getData(request: IStockDataRequest): Promise<IStockPrice | IStockPrice[]> {
|
||||||
const cacheKey = this.getDataCacheKey(request);
|
const cacheKey = this.getDataCacheKey(request);
|
||||||
const cached = this.getFromCache(cacheKey);
|
|
||||||
|
|
||||||
if (cached) {
|
// For intraday requests without date filter, ALWAYS try incremental fetch
|
||||||
console.log(`Cache hit for ${this.getRequestDescription(request)}`);
|
// This ensures we check for new data even if cache hasn't expired
|
||||||
return cached;
|
if (request.type === 'intraday' && !request.date) {
|
||||||
|
const incrementalResult = await this.tryIncrementalFetch(request, cacheKey);
|
||||||
|
if (incrementalResult) {
|
||||||
|
return incrementalResult;
|
||||||
|
}
|
||||||
|
// If incremental fetch returns null, continue to normal fetch below
|
||||||
|
} else {
|
||||||
|
// For other request types (historical, current, batch), use simple cache
|
||||||
|
const cached = this.getFromCache(cacheKey);
|
||||||
|
if (cached) {
|
||||||
|
console.log(`Cache hit for ${this.getRequestDescription(request)}`);
|
||||||
|
return cached;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const providers = this.getEnabledProviders();
|
const providers = this.getEnabledProviders();
|
||||||
@@ -204,6 +215,137 @@ export class StockPriceService implements IProviderRegistry {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Try incremental fetch: Only fetch NEW data since last cached timestamp
|
||||||
|
* Returns merged result if successful, null if incremental fetch not applicable
|
||||||
|
*/
|
||||||
|
private async tryIncrementalFetch(
|
||||||
|
request: IStockDataRequest,
|
||||||
|
cacheKey: string
|
||||||
|
): Promise<IStockPrice[] | null> {
|
||||||
|
// Only applicable for intraday requests without date filter
|
||||||
|
if (request.type !== 'intraday' || request.date) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if we have similar cached data (same ticker, interval, but any limit/date)
|
||||||
|
const baseKey = `intraday:${request.ticker}:${request.interval}:latest`;
|
||||||
|
let cachedData: IStockPrice[] | null = null;
|
||||||
|
let matchedKey: string | null = null;
|
||||||
|
|
||||||
|
// Find any cached intraday data for this ticker+interval
|
||||||
|
for (const [key, entry] of this.cache.entries()) {
|
||||||
|
if (key.startsWith(baseKey)) {
|
||||||
|
const age = Date.now() - entry.timestamp.getTime();
|
||||||
|
if (entry.ttl !== Infinity && age > entry.ttl) {
|
||||||
|
continue; // Expired
|
||||||
|
}
|
||||||
|
cachedData = Array.isArray(entry.price) ? entry.price as IStockPrice[] : null;
|
||||||
|
matchedKey = key;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!cachedData || cachedData.length === 0) {
|
||||||
|
return null; // No cached data to build on
|
||||||
|
}
|
||||||
|
|
||||||
|
// Find latest timestamp in cached data
|
||||||
|
const latestCached = cachedData.reduce((latest, price) => {
|
||||||
|
return price.timestamp > latest ? price.timestamp : latest;
|
||||||
|
}, new Date(0));
|
||||||
|
|
||||||
|
// Freshness check: If latest data is less than 1 minute old, just return cache
|
||||||
|
const dataAge = Date.now() - latestCached.getTime();
|
||||||
|
const freshnessThreshold = 60 * 1000; // 1 minute
|
||||||
|
|
||||||
|
if (dataAge < freshnessThreshold) {
|
||||||
|
console.log(`🔄 Incremental cache: Latest data is ${Math.round(dataAge / 1000)}s old (< 1min), returning cached data`);
|
||||||
|
return cachedData;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`🔄 Incremental cache: Found ${cachedData.length} cached records, latest: ${latestCached.toISOString()} (${Math.round(dataAge / 1000)}s old)`);
|
||||||
|
|
||||||
|
// Fetch only NEW data since latest cached timestamp
|
||||||
|
// Create a modified request with date filter
|
||||||
|
const modifiedRequest: IStockIntradayRequest = {
|
||||||
|
...request,
|
||||||
|
date: latestCached // Fetch from this date forward
|
||||||
|
};
|
||||||
|
|
||||||
|
const providers = this.getEnabledProviders();
|
||||||
|
for (const provider of providers) {
|
||||||
|
const entry = this.providers.get(provider.name)!;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const newData = await this.fetchWithRetry(
|
||||||
|
() => provider.fetchData(modifiedRequest),
|
||||||
|
entry.config
|
||||||
|
) as IStockPrice[];
|
||||||
|
|
||||||
|
entry.successCount++;
|
||||||
|
|
||||||
|
// Filter out data at or before latest cached timestamp (avoid duplicates)
|
||||||
|
const filteredNew = newData.filter(p => p.timestamp > latestCached);
|
||||||
|
|
||||||
|
if (filteredNew.length === 0) {
|
||||||
|
console.log(`🔄 Incremental cache: No new data since ${latestCached.toISOString()}, using cache`);
|
||||||
|
return cachedData;
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`🔄 Incremental cache: Fetched ${filteredNew.length} new records since ${latestCached.toISOString()}`);
|
||||||
|
|
||||||
|
// Merge cached + new data
|
||||||
|
const merged = [...cachedData, ...filteredNew];
|
||||||
|
|
||||||
|
// Sort by timestamp (ascending)
|
||||||
|
merged.sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime());
|
||||||
|
|
||||||
|
// Deduplicate by timestamp (keep latest)
|
||||||
|
const deduped = this.deduplicateByTimestamp(merged);
|
||||||
|
|
||||||
|
// Apply limit if specified in original request
|
||||||
|
const effectiveLimit = request.limit || deduped.length;
|
||||||
|
const result = deduped.slice(-effectiveLimit); // Take most recent N
|
||||||
|
|
||||||
|
// Update cache with merged result
|
||||||
|
const ttl = this.getRequestTTL(request, result);
|
||||||
|
this.addToCache(cacheKey, result, ttl);
|
||||||
|
|
||||||
|
console.log(`🔄 Incremental cache: Returning ${result.length} total records (${cachedData.length} cached + ${filteredNew.length} new)`);
|
||||||
|
return result;
|
||||||
|
|
||||||
|
} catch (error) {
|
||||||
|
entry.errorCount++;
|
||||||
|
entry.lastError = error as Error;
|
||||||
|
entry.lastErrorTime = new Date();
|
||||||
|
console.warn(`Incremental fetch failed for ${provider.name}, falling back to full fetch`);
|
||||||
|
continue; // Try next provider or fall back to normal fetch
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null; // Incremental fetch failed, fall back to normal fetch
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deduplicate array of prices by timestamp, keeping the latest data for each timestamp
|
||||||
|
*/
|
||||||
|
private deduplicateByTimestamp(prices: IStockPrice[]): IStockPrice[] {
|
||||||
|
const seen = new Map<number, IStockPrice>();
|
||||||
|
|
||||||
|
for (const price of prices) {
|
||||||
|
const ts = price.timestamp.getTime();
|
||||||
|
const existing = seen.get(ts);
|
||||||
|
|
||||||
|
// Keep the entry with the latest fetchedAt (most recent data)
|
||||||
|
if (!existing || price.fetchedAt > existing.fetchedAt) {
|
||||||
|
seen.set(ts, price);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return Array.from(seen.values());
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get TTL based on request type and result
|
* Get TTL based on request type and result
|
||||||
*/
|
*/
|
||||||
@@ -328,7 +470,8 @@ export class StockPriceService implements IProviderRegistry {
|
|||||||
return `historical:${request.ticker}:${fromStr}:${toStr}${request.exchange ? `:${request.exchange}` : ''}`;
|
return `historical:${request.ticker}:${fromStr}:${toStr}${request.exchange ? `:${request.exchange}` : ''}`;
|
||||||
case 'intraday':
|
case 'intraday':
|
||||||
const dateStr = request.date ? request.date.toISOString().split('T')[0] : 'latest';
|
const dateStr = request.date ? request.date.toISOString().split('T')[0] : 'latest';
|
||||||
return `intraday:${request.ticker}:${request.interval}:${dateStr}${request.exchange ? `:${request.exchange}` : ''}`;
|
const limitStr = request.limit ? `:limit${request.limit}` : '';
|
||||||
|
return `intraday:${request.ticker}:${request.interval}:${dateStr}${limitStr}${request.exchange ? `:${request.exchange}` : ''}`;
|
||||||
case 'batch':
|
case 'batch':
|
||||||
const tickers = request.tickers.sort().join(',');
|
const tickers = request.tickers.sort().join(',');
|
||||||
return `batch:${tickers}${request.exchange ? `:${request.exchange}` : ''}`;
|
return `batch:${tickers}${request.exchange ? `:${request.exchange}` : ''}`;
|
||||||
@@ -355,6 +498,15 @@ export class StockPriceService implements IProviderRegistry {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private addToCache(key: string, price: IStockPrice | IStockPrice[], ttl?: number): void {
|
private addToCache(key: string, price: IStockPrice | IStockPrice[], ttl?: number): void {
|
||||||
|
// Deduplicate array entries by timestamp before caching
|
||||||
|
if (Array.isArray(price)) {
|
||||||
|
const beforeCount = price.length;
|
||||||
|
price = this.deduplicateByTimestamp(price);
|
||||||
|
if (price.length < beforeCount) {
|
||||||
|
console.log(`Deduplicated ${beforeCount - price.length} duplicate timestamps in cache entry for ${key}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Enforce max entries limit
|
// Enforce max entries limit
|
||||||
if (this.cache.size >= this.cacheConfig.maxEntries) {
|
if (this.cache.size >= this.cacheConfig.maxEntries) {
|
||||||
// Remove oldest entry
|
// Remove oldest entry
|
||||||
|
|||||||
@@ -24,6 +24,8 @@ export interface IProviderConfig {
|
|||||||
timeout?: number;
|
timeout?: number;
|
||||||
retryAttempts?: number;
|
retryAttempts?: number;
|
||||||
retryDelay?: number;
|
retryDelay?: number;
|
||||||
|
maxRecords?: number; // Maximum records to fetch per request (default: 10000)
|
||||||
|
defaultIntradayLimit?: number; // Default limit for intraday requests without explicit limit (default: 1000)
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface IProviderRegistry {
|
export interface IProviderRegistry {
|
||||||
|
|||||||
@@ -378,8 +378,18 @@ export class CoinGeckoProvider implements IStockProvider {
|
|||||||
const marketCapData = responseData.market_caps || [];
|
const marketCapData = responseData.market_caps || [];
|
||||||
const volumeData = responseData.total_volumes || [];
|
const volumeData = responseData.total_volumes || [];
|
||||||
|
|
||||||
// Process each data point
|
// Warn if processing large amount of historical data
|
||||||
for (let i = 0; i < priceData.length; i++) {
|
const maxRecords = this.config?.maxRecords || 10000;
|
||||||
|
if (priceData.length > maxRecords) {
|
||||||
|
this.logger.warn(
|
||||||
|
`Historical request for ${request.ticker} returned ${priceData.length} records, ` +
|
||||||
|
`which exceeds maxRecords limit of ${maxRecords}. Processing first ${maxRecords} only.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Process each data point (up to maxRecords)
|
||||||
|
const recordsToProcess = Math.min(priceData.length, maxRecords);
|
||||||
|
for (let i = 0; i < recordsToProcess; i++) {
|
||||||
const [timestamp, price] = priceData[i];
|
const [timestamp, price] = priceData[i];
|
||||||
const date = new Date(timestamp);
|
const date = new Date(timestamp);
|
||||||
|
|
||||||
@@ -480,8 +490,19 @@ export class CoinGeckoProvider implements IStockProvider {
|
|||||||
const marketCapData = responseData.market_caps || [];
|
const marketCapData = responseData.market_caps || [];
|
||||||
const volumeData = responseData.total_volumes || [];
|
const volumeData = responseData.total_volumes || [];
|
||||||
|
|
||||||
// Apply limit if specified
|
// Apply default limit if user didn't specify one (performance optimization)
|
||||||
const limit = request.limit || priceData.length;
|
const effectiveLimit = request.limit || this.config?.defaultIntradayLimit || 1000;
|
||||||
|
|
||||||
|
// Warn if fetching large amount of data without explicit limit
|
||||||
|
if (!request.limit && priceData.length > effectiveLimit) {
|
||||||
|
this.logger.warn(
|
||||||
|
`Intraday request for ${request.ticker} returned ${priceData.length} records but no limit specified. ` +
|
||||||
|
`Applying default limit of ${effectiveLimit}. Consider adding a limit to the request for better performance.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Apply limit (take most recent data)
|
||||||
|
const limit = Math.min(effectiveLimit, priceData.length);
|
||||||
const dataToProcess = priceData.slice(-limit);
|
const dataToProcess = priceData.slice(-limit);
|
||||||
|
|
||||||
for (let i = 0; i < dataToProcess.length; i++) {
|
for (let i = 0; i < dataToProcess.length; i++) {
|
||||||
@@ -624,21 +645,34 @@ export class CoinGeckoProvider implements IStockProvider {
|
|||||||
|
|
||||||
const coinList = await response.json() as ICoinListItem[];
|
const coinList = await response.json() as ICoinListItem[];
|
||||||
|
|
||||||
|
// Clear cache before rebuilding to prevent memory leak
|
||||||
|
// Keep only entries that are in priorityTickerMap
|
||||||
|
const priorityEntries = new Map<string, string>();
|
||||||
|
for (const [key, value] of this.priorityTickerMap) {
|
||||||
|
priorityEntries.set(key, value);
|
||||||
|
}
|
||||||
|
this.coinMapCache.clear();
|
||||||
|
|
||||||
|
// Restore priority mappings
|
||||||
|
for (const [key, value] of priorityEntries) {
|
||||||
|
this.coinMapCache.set(key, value);
|
||||||
|
}
|
||||||
|
|
||||||
// Build mapping: symbol -> id
|
// Build mapping: symbol -> id
|
||||||
for (const coin of coinList) {
|
for (const coin of coinList) {
|
||||||
const symbol = coin.symbol.toLowerCase();
|
const symbol = coin.symbol.toLowerCase();
|
||||||
const id = coin.id.toLowerCase();
|
const id = coin.id.toLowerCase();
|
||||||
|
|
||||||
// Don't overwrite priority mappings or existing cache entries
|
// Don't overwrite priority mappings
|
||||||
if (!this.priorityTickerMap.has(symbol) && !this.coinMapCache.has(symbol)) {
|
if (!this.priorityTickerMap.has(symbol)) {
|
||||||
this.coinMapCache.set(symbol, id);
|
this.coinMapCache.set(symbol, id);
|
||||||
}
|
}
|
||||||
// Always cache the ID mapping
|
// Always cache the ID mapping (id -> id for when users pass CoinGecko IDs directly)
|
||||||
this.coinMapCache.set(id, id);
|
this.coinMapCache.set(id, id);
|
||||||
}
|
}
|
||||||
|
|
||||||
this.coinListLoadedAt = new Date();
|
this.coinListLoadedAt = new Date();
|
||||||
this.logger.info(`Loaded ${coinList.length} coins from CoinGecko`);
|
this.logger.info(`Loaded ${coinList.length} coins from CoinGecko (cache: ${this.coinMapCache.size} entries)`);
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
this.logger.error('Failed to load coin list from CoinGecko:', error);
|
this.logger.error('Failed to load coin list from CoinGecko:', error);
|
||||||
// Don't throw - we can still work with direct IDs
|
// Don't throw - we can still work with direct IDs
|
||||||
|
|||||||
@@ -187,7 +187,7 @@ export class MarketstackProvider implements IStockProvider {
|
|||||||
const allPrices: IStockPrice[] = [];
|
const allPrices: IStockPrice[] = [];
|
||||||
let offset = request.offset || 0;
|
let offset = request.offset || 0;
|
||||||
const limit = request.limit || 1000; // Max per page
|
const limit = request.limit || 1000; // Max per page
|
||||||
const maxRecords = 10000; // Safety limit
|
const maxRecords = this.config?.maxRecords || 10000; // Safety limit (configurable)
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
let url = `${this.baseUrl}/eod?access_key=${this.apiKey}`;
|
let url = `${this.baseUrl}/eod?access_key=${this.apiKey}`;
|
||||||
@@ -259,7 +259,18 @@ export class MarketstackProvider implements IStockProvider {
|
|||||||
const allPrices: IStockPrice[] = [];
|
const allPrices: IStockPrice[] = [];
|
||||||
let offset = 0;
|
let offset = 0;
|
||||||
const limit = 1000; // Max per page for intraday
|
const limit = 1000; // Max per page for intraday
|
||||||
const maxRecords = 10000; // Safety limit
|
const maxRecords = this.config?.maxRecords || 10000; // Safety limit (configurable)
|
||||||
|
|
||||||
|
// Apply default limit if user didn't specify one (performance optimization)
|
||||||
|
const effectiveLimit = request.limit || this.config?.defaultIntradayLimit || 1000;
|
||||||
|
|
||||||
|
// Warn if fetching large amount of data without explicit limit
|
||||||
|
if (!request.limit && effectiveLimit > 1000) {
|
||||||
|
this.logger.warn(
|
||||||
|
`Intraday request for ${request.ticker} without explicit limit will fetch up to ${effectiveLimit} records. ` +
|
||||||
|
`Consider adding a limit to the request for better performance.`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
// Format symbol for intraday endpoint (replace . with -)
|
// Format symbol for intraday endpoint (replace . with -)
|
||||||
const formattedSymbol = this.formatSymbolForIntraday(request.ticker);
|
const formattedSymbol = this.formatSymbolForIntraday(request.ticker);
|
||||||
@@ -310,17 +321,17 @@ export class MarketstackProvider implements IStockProvider {
|
|||||||
const pagination = responseData.pagination;
|
const pagination = responseData.pagination;
|
||||||
const hasMore = pagination && offset + limit < pagination.total;
|
const hasMore = pagination && offset + limit < pagination.total;
|
||||||
|
|
||||||
// Honor limit from request if specified, or safety limit
|
// Honor effective limit or safety maxRecords
|
||||||
if (!hasMore || (request.limit && allPrices.length >= request.limit) || allPrices.length >= maxRecords) {
|
if (!hasMore || allPrices.length >= effectiveLimit || allPrices.length >= maxRecords) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
offset += limit;
|
offset += limit;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Apply limit if specified
|
// Apply effective limit
|
||||||
if (request.limit && allPrices.length > request.limit) {
|
if (allPrices.length > effectiveLimit) {
|
||||||
return allPrices.slice(0, request.limit);
|
return allPrices.slice(0, effectiveLimit);
|
||||||
}
|
}
|
||||||
|
|
||||||
return allPrices;
|
return allPrices;
|
||||||
|
|||||||
Reference in New Issue
Block a user