Compare commits

...

6 Commits

Author SHA1 Message Date
jkunz 858d46366f v3.6.0
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 1s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2026-04-17 11:51:02 +00:00
jkunz 73801f785a feat(laws,opendata): add local law storage and migrate OpenData persistence to smartdb-backed local storage 2026-04-17 11:51:02 +00:00
jkunz 79e74a34ed 3.5.1
Default (tags) / security (push) Failing after 0s
Default (tags) / test (push) Failing after 0s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-11-07 12:52:56 +00:00
jkunz adf0afe3c6 update readme 2025-11-07 12:52:45 +00:00
jkunz 3be2f0b855 v3.5.0
Default (tags) / security (push) Failing after 22s
Default (tags) / test (push) Failing after 13s
Default (tags) / release (push) Has been skipped
Default (tags) / metadata (push) Has been skipped
2025-11-07 08:05:59 +00:00
jkunz c38f895a72 feat(stocks): Add provider fetch limits, intraday incremental fetch, cache deduplication, and provider safety/warning improvements 2025-11-07 08:05:59 +00:00
41 changed files with 10087 additions and 7300 deletions
-1
View File
@@ -1 +0,0 @@
/cache
-67
View File
@@ -1,67 +0,0 @@
# language of the project (csharp, python, rust, java, typescript, go, cpp, or ruby)
# * For C, use cpp
# * For JavaScript, use typescript
# Special requirements:
# * csharp: Requires the presence of a .sln file in the project folder.
language: typescript
# whether to use the project's gitignore file to ignore files
# Added on 2025-04-07
ignore_all_files_in_gitignore: true
# list of additional paths to ignore
# same syntax as gitignore, so you can use * and **
# Was previously called `ignored_dirs`, please update your config if you are using that.
# Added (renamed) on 2025-04-07
ignored_paths: []
# whether the project is in read-only mode
# If set to true, all editing tools will be disabled and attempts to use them will result in an error
# Added on 2025-04-18
read_only: false
# list of tool names to exclude. We recommend not excluding any tools, see the readme for more details.
# Below is the complete list of tools for convenience.
# To make sure you have the latest list of tools, and to view their descriptions,
# execute `uv run scripts/print_tool_overview.py`.
#
# * `activate_project`: Activates a project by name.
# * `check_onboarding_performed`: Checks whether project onboarding was already performed.
# * `create_text_file`: Creates/overwrites a file in the project directory.
# * `delete_lines`: Deletes a range of lines within a file.
# * `delete_memory`: Deletes a memory from Serena's project-specific memory store.
# * `execute_shell_command`: Executes a shell command.
# * `find_referencing_code_snippets`: Finds code snippets in which the symbol at the given location is referenced.
# * `find_referencing_symbols`: Finds symbols that reference the symbol at the given location (optionally filtered by type).
# * `find_symbol`: Performs a global (or local) search for symbols with/containing a given name/substring (optionally filtered by type).
# * `get_current_config`: Prints the current configuration of the agent, including the active and available projects, tools, contexts, and modes.
# * `get_symbols_overview`: Gets an overview of the top-level symbols defined in a given file.
# * `initial_instructions`: Gets the initial instructions for the current project.
# Should only be used in settings where the system prompt cannot be set,
# e.g. in clients you have no control over, like Claude Desktop.
# * `insert_after_symbol`: Inserts content after the end of the definition of a given symbol.
# * `insert_at_line`: Inserts content at a given line in a file.
# * `insert_before_symbol`: Inserts content before the beginning of the definition of a given symbol.
# * `list_dir`: Lists files and directories in the given directory (optionally with recursion).
# * `list_memories`: Lists memories in Serena's project-specific memory store.
# * `onboarding`: Performs onboarding (identifying the project structure and essential tasks, e.g. for testing or building).
# * `prepare_for_new_conversation`: Provides instructions for preparing for a new conversation (in order to continue with the necessary context).
# * `read_file`: Reads a file within the project directory.
# * `read_memory`: Reads the memory with the given name from Serena's project-specific memory store.
# * `remove_project`: Removes a project from the Serena configuration.
# * `replace_lines`: Replaces a range of lines within a file with new content.
# * `replace_symbol_body`: Replaces the full definition of a symbol.
# * `restart_language_server`: Restarts the language server, may be necessary when edits not through Serena happen.
# * `search_for_pattern`: Performs a search for a pattern in the project.
# * `summarize_changes`: Provides instructions for summarizing the changes made to the codebase.
# * `switch_modes`: Activates modes by providing a list of their names
# * `think_about_collected_information`: Thinking tool for pondering the completeness of collected information.
# * `think_about_task_adherence`: Thinking tool for determining whether the agent is still on track with the current task.
# * `think_about_whether_you_are_done`: Thinking tool for determining whether the task is truly completed.
# * `write_memory`: Writes a named memory (for future reference) to Serena's project-specific memory store.
excluded_tools: []
# initial prompt for the project. It will always be given to the LLM upon activating the project
# (contrary to the memories, which are loaded on demand).
initial_prompt: ""
project_name: "opendata"
+12 -6
View File
@@ -1,5 +1,5 @@
{
"gitzone": {
"@git.zone/cli": {
"projectType": "npm",
"module": {
"githost": "gitlab.com",
@@ -26,13 +26,19 @@
"business registry",
"data retrieval"
]
},
"release": {
"registries": [
"https://verdaccio.lossless.digital",
"https://registry.npmjs.org"
],
"accessLevel": "public"
}
},
"npmci": {
"npmGlobalTools": [],
"npmAccessLevel": "public"
},
"tsdoc": {
"@git.zone/tsdoc": {
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
},
"@ship.zone/szci": {
"npmGlobalTools": []
}
}
+1
View File
@@ -0,0 +1 @@
Important: Also read .nogit/AGENTS.md
+19
View File
@@ -1,5 +1,24 @@
# Changelog
## 2026-04-17 - 3.6.0 - feat(laws,opendata)
add local law storage and migrate OpenData persistence to smartdb-backed local storage
- introduces a new LawService export with searchable LawRecord persistence for German, EU, and US laws
- replaces OpenData startup dependency on external MongoDB environment configuration with embedded local smartdb bootstrap
- hardens Handelsregister downloads and integration tests with better skip handling and downloaded file detection
- updates build and dependency configuration to newer git.zone and push.rocks packages and switches project metadata to .smartconfig.json
## 2025-11-07 - 3.5.0 - feat(stocks)
Add provider fetch limits, intraday incremental fetch, cache deduplication, and provider safety/warning improvements
- Add maxRecords and defaultIntradayLimit to IProviderConfig to control maximum records per request and default intraday limits.
- CoinGecko provider: enforce maxRecords when processing historical data, warn when large historical/intraday results are returned without explicit limits, preserve priority mappings when rebuilding the coin cache, and improve cache load logging.
- Marketstack provider: make safety maxRecords configurable, apply a configurable default intraday limit, warn when no explicit limit is provided, and ensure effective limits are applied to returned results.
- StockPriceService: always attempt incremental fetch for intraday requests without a date to fetch only new data since the last cached timestamp and fall back to full fetch when necessary.
- StockPriceService: deduplicate price arrays by timestamp before caching and after merges to avoid duplicate timestamps and reduce cache bloat.
- Introduce StockDataService for unified access to prices and fundamentals with automatic enrichment (market cap, P/E, price-to-book) and caching improvements.
- Various cache/TTL improvements and safer default behaviors for intraday, historical and live data to improve performance and memory usage.
## 2025-11-06 - 3.4.0 - feat(stocks)
Introduce unified stock data service, new providers, improved caching and German business data tooling
Generated
+2787 -2206
View File
File diff suppressed because it is too large Load Diff
+16 -14
View File
@@ -1,6 +1,6 @@
{
"name": "@fin.cx/opendata",
"version": "3.4.0",
"version": "3.6.0",
"private": false,
"description": "A comprehensive TypeScript library for accessing business data and real-time financial information. Features include German company data management with MongoDB integration, JSONL bulk processing, automated Handelsregister interactions, and real-time stock market data from multiple providers.",
"main": "dist_ts/index.js",
@@ -10,32 +10,34 @@
"license": "MIT",
"scripts": {
"test": "(tstest test/ --verbose)",
"build": "(tsbuild --web --allowimplicitany)",
"build": "(tsbuild)",
"buildDocs": "(tsdoc)"
},
"devDependencies": {
"@git.zone/tsbuild": "^2.6.8",
"@git.zone/tsbundle": "^2.5.1",
"@git.zone/tsrun": "^1.6.2",
"@git.zone/tstest": "^2.7.0",
"@types/node": "^22.14.0"
"@git.zone/tsbuild": "^4.4.0",
"@git.zone/tsbundle": "^2.10.0",
"@git.zone/tsrun": "^2.0.2",
"@git.zone/tstest": "^3.6.3",
"@types/node": "25.6.0"
},
"dependencies": {
"@push.rocks/lik": "^6.2.2",
"@push.rocks/qenv": "^6.1.3",
"@push.rocks/smartarchive": "^4.2.2",
"@push.rocks/smartarchive": "^5.2.1",
"@push.rocks/smartarray": "^1.1.0",
"@push.rocks/smartbrowser": "^2.0.8",
"@push.rocks/smartdata": "^5.16.4",
"@push.rocks/smartdata": "^7.1.7",
"@push.rocks/smartdb": "^2.7.0",
"@push.rocks/smartdelay": "^3.0.5",
"@push.rocks/smartfile": "^11.2.7",
"@push.rocks/smartfile": "^13.1.2",
"@push.rocks/smartfs": "^1.5.0",
"@push.rocks/smartlog": "^3.1.10",
"@push.rocks/smartpath": "^6.0.0",
"@push.rocks/smartpromise": "^4.2.3",
"@push.rocks/smartrequest": "^4.3.4",
"@push.rocks/smartstream": "^3.2.5",
"@push.rocks/smartrequest": "^5.0.1",
"@push.rocks/smartstream": "^3.4.0",
"@push.rocks/smartunique": "^3.0.9",
"@push.rocks/smartxml": "^1.1.1",
"@push.rocks/smartxml": "^2.0.0",
"@tsclass/tsclass": "^9.3.0"
},
"repository": {
@@ -58,7 +60,7 @@
"dist_ts_web/**/*",
"assets/**/*",
"cli.js",
"npmextra.json",
".smartconfig.json",
"readme.md"
],
"keywords": [
+3793 -3959
View File
File diff suppressed because it is too large Load Diff
+307 -703
View File
File diff suppressed because it is too large Load Diff
-193
View File
@@ -1,193 +0,0 @@
# Stock Prices Module Implementation Plan
Command to reread guidelines: Read /home/philkunz/.claude/CLAUDE.md
## Overview
Implementation of a stocks module for fetching current stock prices using various APIs. The architecture will support multiple providers, but we'll start with implementing only Yahoo Finance API. The design will make it easy to add additional providers (Alpha Vantage, IEX Cloud, etc.) in the future without changing the core architecture.
## Phase 1: Yahoo Finance Implementation
### 1.1 Research & Documentation
- [ ] Research Yahoo Finance API endpoints (no official API, using public endpoints)
- [ ] Document available data fields and formats
- [ ] Identify rate limits and restrictions
- [ ] Test endpoints manually with curl
### 1.2 Module Structure
```
ts/
├── index.ts # Main exports
├── plugins.ts # External dependencies
└── stocks/
├── index.ts # Stocks module exports
├── classes.stockservice.ts # Main StockPriceService class
├── interfaces/
│ ├── stockprice.ts # IStockPrice interface
│ └── provider.ts # IStockProvider interface (for all providers)
└── providers/
├── provider.yahoo.ts # Yahoo Finance implementation
└── (future: provider.alphavantage.ts, provider.iex.ts, etc.)
```
### 1.3 Core Interfaces
```typescript
// IStockPrice - Standardized stock price data
interface IStockPrice {
ticker: string;
price: number;
currency: string;
change: number;
changePercent: number;
timestamp: Date;
provider: string;
}
// IStockProvider - Provider implementation contract
interface IStockProvider {
name: string;
fetchPrice(ticker: string): Promise<IStockPrice>;
fetchPrices(tickers: string[]): Promise<IStockPrice[]>;
isAvailable(): Promise<boolean>;
}
```
### 1.4 Yahoo Finance Provider Implementation
- [ ] Create YahooFinanceProvider class
- [ ] Implement HTTP requests to Yahoo Finance endpoints
- [ ] Parse response data into IStockPrice format
- [ ] Handle errors and edge cases
- [ ] Add request throttling/rate limiting
### 1.5 Main Service Class
- [ ] Create StockPriceService class with provider registry
- [ ] Implement provider interface for pluggable providers
- [ ] Register Yahoo provider (with ability to add more later)
- [ ] Add method for single ticker lookup
- [ ] Add method for batch ticker lookup
- [ ] Implement error handling with graceful degradation
- [ ] Design fallback mechanism (ready for multiple providers)
## Phase 2: Core Features
### 2.1 Service Architecture
- [ ] Create provider registry pattern for managing multiple providers
- [ ] Implement provider priority and selection logic
- [ ] Design provider health check interface
- [ ] Create provider configuration system
- [ ] Implement provider discovery mechanism
- [ ] Add provider capability querying (which tickers/markets supported)
## Phase 3: Advanced Features
### 3.1 Caching System
- [ ] Design cache interface
- [ ] Implement in-memory cache with TTL
- [ ] Add cache invalidation logic
- [ ] Make cache configurable per ticker
### 3.2 Configuration
- [ ] Provider configuration (timeout, retry settings)
- [ ] Cache configuration (TTL, max entries)
- [ ] Request timeout configuration
- [ ] Error handling configuration
### 3.3 Error Handling
- [ ] Define custom error types
- [ ] Implement retry logic with exponential backoff
- [ ] Add circuit breaker pattern for failing providers
- [ ] Comprehensive error logging
## Phase 4: Testing
### 4.1 Unit Tests
- [ ] Test each provider independently
- [ ] Mock HTTP requests for predictable testing
- [ ] Test error scenarios
- [ ] Test data transformation logic
### 4.2 Integration Tests
- [ ] Test with real API calls (rate limit aware)
- [ ] Test provider fallback scenarios
- [ ] Test batch operations
- [ ] Test cache behavior
### 4.3 Performance Tests
- [ ] Measure response times
- [ ] Test concurrent request handling
- [ ] Validate cache effectiveness
## Implementation Order
1. **Week 1: Yahoo Finance Provider**
- Research and test Yahoo endpoints
- Implement basic provider and service
- Create core interfaces
- Basic error handling
2. **Week 2: Service Architecture**
- Create extensible provider system
- Implement provider interface
- Add provider registration
3. **Week 3: Advanced Features**
- Implement caching system
- Add configuration management
- Enhance error handling
4. **Week 4: Testing & Documentation**
- Write comprehensive tests
- Create usage documentation
- Performance optimization
## Dependencies
### Required
- `@push.rocks/smartrequest` - HTTP requests
- `@push.rocks/smartpromise` - Promise utilities
- `@push.rocks/smartlog` - Logging
### Development
- `@git.zone/tstest` - Testing framework
- `@git.zone/tsrun` - TypeScript execution
## API Endpoints Research
### Yahoo Finance
- Base URL: `https://query1.finance.yahoo.com/v8/finance/chart/{ticker}`
- No authentication required
- Returns JSON with price data
- Rate limits unknown (need to test)
- Alternative endpoints to explore:
- `/v7/finance/quote` - Simplified quote data
- `/v10/finance/quoteSummary` - Detailed company data
## Success Criteria
1. Can fetch current stock prices for any valid ticker
2. Extensible architecture for future providers
3. Response time < 1 second for cached data
4. Response time < 3 seconds for fresh data
5. Proper error handling and recovery
6. Comprehensive test coverage (>80%)
## Notes
- Yahoo Finance provides free stock data without authentication
- **Architecture designed for multiple providers**: While only implementing Yahoo Finance initially, all interfaces, classes, and patterns are designed to support multiple stock data providers
- The provider registry pattern allows adding new providers without modifying existing code
- Each provider implements the same IStockProvider interface for consistency
- Future providers can be added by simply creating a new provider class and registering it
- Implement proper TypeScript types for all data structures
- Follow the project's coding standards (prefix interfaces with 'I')
- Use plugins.ts for all external dependencies
- Keep filenames lowercase
- Write tests using @git.zone/tstest with smartexpect syntax
- Focus on clean, extensible architecture for future growth
## Future Provider Addition Example
When ready to add a new provider (e.g., Alpha Vantage), the process will be:
1. Create `ts/stocks/providers/provider.alphavantage.ts`
2. Implement the `IStockProvider` interface
3. Register the provider in the StockPriceService
4. No changes needed to existing code or interfaces
+395
View File
@@ -0,0 +1,395 @@
import { expect, tap } from '@git.zone/tstest/tapbundle';
import * as opendata from '../ts/index.js';
/**
* Test to inspect actual cache contents and verify data integrity
*/
class MockProvider implements opendata.IStockProvider {
name = 'MockProvider';
priority = 100;
requiresAuth = false;
public callLog: Array<{ type: string; ticker: string; timestamp: Date }> = [];
async fetchData(request: opendata.IStockDataRequest): Promise<opendata.IStockPrice | opendata.IStockPrice[]> {
this.callLog.push({
type: request.type,
ticker: request.type === 'batch' ? request.tickers.join(',') : (request as any).ticker,
timestamp: new Date()
});
if (request.type === 'intraday') {
const count = request.limit || 10;
const prices: opendata.IStockPrice[] = [];
const baseTime = request.date || new Date('2025-01-07T09:30:00.000Z');
for (let i = 0; i < count; i++) {
prices.push({
ticker: request.ticker,
price: 100 + i,
currency: 'USD',
timestamp: new Date(baseTime.getTime() + i * 60 * 1000),
fetchedAt: new Date(),
provider: this.name,
dataType: 'intraday',
marketState: 'REGULAR',
open: 100,
high: 101,
low: 99,
volume: 1000000,
change: 0,
changePercent: 0,
previousClose: 100
});
}
return prices;
}
// Default single price
return {
ticker: (request as any).ticker,
price: 150,
currency: 'USD',
timestamp: new Date(),
fetchedAt: new Date(),
provider: this.name,
dataType: 'eod',
marketState: 'CLOSED',
open: 149,
high: 151,
low: 148,
volume: 5000000,
change: 1,
changePercent: 0.67,
previousClose: 149
};
}
async isAvailable(): Promise<boolean> {
return true;
}
}
let stockService: opendata.StockPriceService;
let mockProvider: MockProvider;
tap.test('Cache Inspection - Setup', async () => {
stockService = new opendata.StockPriceService({
ttl: 60000,
maxEntries: 100
});
mockProvider = new MockProvider();
stockService.register(mockProvider);
console.log('✓ Service and provider initialized');
});
tap.test('Cache Inspection - Verify Cache Key Generation', async () => {
await tap.test('should generate unique cache keys for different requests', async () => {
stockService.clearCache();
mockProvider.callLog = [];
// Fetch with different parameters
await stockService.getData({ type: 'intraday', ticker: 'AAPL', interval: '1min', limit: 10 });
await stockService.getData({ type: 'intraday', ticker: 'AAPL', interval: '1min', limit: 20 });
await stockService.getData({ type: 'intraday', ticker: 'AAPL', interval: '5min', limit: 10 });
await stockService.getData({ type: 'intraday', ticker: 'MSFT', interval: '1min', limit: 10 });
// Should have made 4 provider calls (all different cache keys)
expect(mockProvider.callLog.length).toEqual(4);
console.log('✓ Cache keys are unique for different parameters');
console.log(` Total provider calls: ${mockProvider.callLog.length}`);
});
await tap.test('should reuse cache for identical requests', async () => {
stockService.clearCache();
mockProvider.callLog = [];
// Same request 3 times
const result1 = await stockService.getData({ type: 'intraday', ticker: 'AAPL', interval: '1min', limit: 10 });
const result2 = await stockService.getData({ type: 'intraday', ticker: 'AAPL', interval: '1min', limit: 10 });
const result3 = await stockService.getData({ type: 'intraday', ticker: 'AAPL', interval: '1min', limit: 10 });
// Should have made only 1 provider call
expect(mockProvider.callLog.length).toEqual(1);
// All results should be identical (same reference from cache)
expect((result1 as opendata.IStockPrice[]).length).toEqual((result2 as opendata.IStockPrice[]).length);
expect((result1 as opendata.IStockPrice[]).length).toEqual((result3 as opendata.IStockPrice[]).length);
// Verify timestamps match (exact same cached data)
const ts1 = (result1 as opendata.IStockPrice[])[0].timestamp.getTime();
const ts2 = (result2 as opendata.IStockPrice[])[0].timestamp.getTime();
const ts3 = (result3 as opendata.IStockPrice[])[0].timestamp.getTime();
expect(ts1).toEqual(ts2);
expect(ts2).toEqual(ts3);
console.log('✓ Cache reused for identical requests');
console.log(` 3 requests → 1 provider call`);
});
});
tap.test('Cache Inspection - Verify Data Structure', async () => {
await tap.test('should cache complete IStockPrice objects', async () => {
stockService.clearCache();
const result = await stockService.getData({
type: 'intraday',
ticker: 'TSLA',
interval: '1min',
limit: 5
});
expect(result).toBeArray();
const prices = result as opendata.IStockPrice[];
// Verify structure of cached data
for (const price of prices) {
expect(price).toHaveProperty('ticker');
expect(price).toHaveProperty('price');
expect(price).toHaveProperty('currency');
expect(price).toHaveProperty('timestamp');
expect(price).toHaveProperty('fetchedAt');
expect(price).toHaveProperty('provider');
expect(price).toHaveProperty('dataType');
expect(price).toHaveProperty('marketState');
expect(price).toHaveProperty('open');
expect(price).toHaveProperty('high');
expect(price).toHaveProperty('low');
expect(price).toHaveProperty('volume');
// Verify types
expect(typeof price.ticker).toEqual('string');
expect(typeof price.price).toEqual('number');
expect(price.timestamp).toBeInstanceOf(Date);
expect(price.fetchedAt).toBeInstanceOf(Date);
}
console.log('✓ Cached data has complete IStockPrice structure');
console.log(` Sample: ${prices[0].ticker} @ $${prices[0].price} (${prices[0].timestamp.toISOString()})`);
});
await tap.test('should preserve array order in cache', async () => {
stockService.clearCache();
const result1 = await stockService.getData({
type: 'intraday',
ticker: 'AAPL',
interval: '1min',
limit: 10
});
const result2 = await stockService.getData({
type: 'intraday',
ticker: 'AAPL',
interval: '1min',
limit: 10
});
const prices1 = result1 as opendata.IStockPrice[];
const prices2 = result2 as opendata.IStockPrice[];
// Verify order is preserved
for (let i = 0; i < prices1.length; i++) {
expect(prices1[i].timestamp.getTime()).toEqual(prices2[i].timestamp.getTime());
expect(prices1[i].price).toEqual(prices2[i].price);
}
console.log('✓ Array order preserved in cache');
});
});
tap.test('Cache Inspection - Verify TTL Behavior', async () => {
await tap.test('should respect cache TTL for intraday data', async (testArg) => {
// Create service with very short TTL for testing
const shortTTLService = new opendata.StockPriceService({
ttl: 100, // 100ms
maxEntries: 100
});
const testProvider = new MockProvider();
shortTTLService.register(testProvider);
// First fetch
await shortTTLService.getData({
type: 'intraday',
ticker: 'TEST',
interval: '1min',
limit: 5
});
const callCount1 = testProvider.callLog.length;
// Immediate second fetch - should hit cache
await shortTTLService.getData({
type: 'intraday',
ticker: 'TEST',
interval: '1min',
limit: 5
});
const callCount2 = testProvider.callLog.length;
expect(callCount2).toEqual(callCount1); // No new call
// Wait for TTL to expire
await new Promise(resolve => setTimeout(resolve, 150));
// Third fetch - should hit provider (cache expired)
await shortTTLService.getData({
type: 'intraday',
ticker: 'TEST',
interval: '1min',
limit: 5
});
const callCount3 = testProvider.callLog.length;
expect(callCount3).toBeGreaterThan(callCount2); // New call made
console.log('✓ Cache TTL working correctly');
console.log(` Before expiry: ${callCount2 - callCount1} new calls`);
console.log(` After expiry: ${callCount3 - callCount2} new calls`);
});
});
tap.test('Cache Inspection - Memory Efficiency', async () => {
await tap.test('should store deduplicated data in cache', async () => {
stockService.clearCache();
mockProvider.callLog = [];
// Fetch data
const result1 = await stockService.getData({
type: 'intraday',
ticker: 'AAPL',
interval: '1min',
limit: 100
});
const prices = result1 as opendata.IStockPrice[];
// Verify no duplicate timestamps in cached data
const timestamps = prices.map(p => p.timestamp.getTime());
const uniqueTimestamps = new Set(timestamps);
expect(uniqueTimestamps.size).toEqual(timestamps.length);
console.log('✓ No duplicate timestamps in cached data');
console.log(` Records: ${prices.length}`);
console.log(` Unique timestamps: ${uniqueTimestamps.size}`);
});
await tap.test('should estimate memory usage', async () => {
stockService.clearCache();
// Fetch various sizes
await stockService.getData({ type: 'intraday', ticker: 'AAPL', interval: '1min', limit: 100 });
await stockService.getData({ type: 'intraday', ticker: 'MSFT', interval: '1min', limit: 100 });
await stockService.getData({ type: 'intraday', ticker: 'GOOGL', interval: '5min', limit: 50 });
// Estimate memory (rough calculation)
// Each IStockPrice is approximately 300-400 bytes
const totalRecords = 100 + 100 + 50;
const estimatedBytes = totalRecords * 350; // Average 350 bytes per record
const estimatedKB = (estimatedBytes / 1024).toFixed(2);
console.log('✓ Cache memory estimation:');
console.log(` Total records cached: ${totalRecords}`);
console.log(` Estimated memory: ~${estimatedKB} KB`);
console.log(` Average per record: ~350 bytes`);
});
});
tap.test('Cache Inspection - Edge Cases', async () => {
await tap.test('should handle empty results', async () => {
const emptyProvider = new MockProvider();
emptyProvider.fetchData = async () => [];
const emptyService = new opendata.StockPriceService();
emptyService.register(emptyProvider);
const result = await emptyService.getData({
type: 'intraday',
ticker: 'EMPTY',
interval: '1min'
});
expect(result).toBeArray();
expect((result as opendata.IStockPrice[]).length).toEqual(0);
// Second fetch should still hit cache (even though empty)
const result2 = await emptyService.getData({
type: 'intraday',
ticker: 'EMPTY',
interval: '1min'
});
expect(result2).toBeArray();
expect((result2 as opendata.IStockPrice[]).length).toEqual(0);
console.log('✓ Empty results cached correctly');
});
await tap.test('should handle single record', async () => {
stockService.clearCache();
const result = await stockService.getData({
type: 'intraday',
ticker: 'SINGLE',
interval: '1min',
limit: 1
});
expect(result).toBeArray();
expect((result as opendata.IStockPrice[]).length).toEqual(1);
console.log('✓ Single record cached correctly');
});
});
tap.test('Cache Inspection - Verify fetchedAt Timestamps', async () => {
await tap.test('should preserve fetchedAt in cached data', async () => {
stockService.clearCache();
const beforeFetch = Date.now();
const result = await stockService.getData({
type: 'intraday',
ticker: 'AAPL',
interval: '1min',
limit: 5
});
const afterFetch = Date.now();
const prices = result as opendata.IStockPrice[];
for (const price of prices) {
const fetchedTime = price.fetchedAt.getTime();
expect(fetchedTime).toBeGreaterThanOrEqual(beforeFetch);
expect(fetchedTime).toBeLessThanOrEqual(afterFetch);
}
// Fetch again - fetchedAt should be the same (from cache)
await new Promise(resolve => setTimeout(resolve, 50)); // Small delay
const result2 = await stockService.getData({
type: 'intraday',
ticker: 'AAPL',
interval: '1min',
limit: 5
});
const prices2 = result2 as opendata.IStockPrice[];
// Verify fetchedAt matches (same cached data)
for (let i = 0; i < prices.length; i++) {
expect(prices2[i].fetchedAt.getTime()).toEqual(prices[i].fetchedAt.getTime());
}
console.log('✓ fetchedAt timestamps preserved in cache');
});
});
export default tap.start();
@@ -1,5 +1,6 @@
import { expect, tap } from '@git.zone/tstest/tapbundle';
import * as opendata from '../ts/index.js';
import * as plugins from '../ts/plugins.js';
// Test data
const testCryptos = ['BTC', 'ETH', 'USDT'];
@@ -8,6 +9,28 @@ const invalidCrypto = 'INVALID_CRYPTO_XYZ_12345';
let stockService: opendata.StockPriceService;
let coingeckoProvider: opendata.CoinGeckoProvider;
let coingeckoAvailable = false;
let coingeckoSkipReason = 'CoinGecko integration requirements are unavailable.';
type TSkipTools = {
skip: (reason?: string) => never;
};
const runCoinGeckoRequest = async <T>(toolsArg: TSkipTools, operation: () => Promise<T>): Promise<T> => {
try {
return await operation();
} catch (error) {
const errorMessage = plugins.getErrorMessage(error);
if (errorMessage.includes('Rate limit exceeded')) {
coingeckoAvailable = false;
coingeckoSkipReason = `Skipping CoinGecko integration tests: ${errorMessage}.`;
toolsArg.skip(coingeckoSkipReason);
}
throw error;
}
};
tap.test('should create StockPriceService instance', async () => {
stockService = new opendata.StockPriceService({
@@ -23,22 +46,29 @@ tap.test('should create CoinGeckoProvider instance without API key', async () =>
expect(coingeckoProvider.name).toEqual('CoinGecko');
expect(coingeckoProvider.requiresAuth).toEqual(false);
expect(coingeckoProvider.priority).toEqual(90);
coingeckoAvailable = await coingeckoProvider.isAvailable();
if (!coingeckoAvailable) {
coingeckoSkipReason = 'Skipping CoinGecko integration tests: provider is not reachable.';
}
});
tap.test('should register CoinGecko provider with the service', async () => {
tap.test('should register CoinGecko provider with the service', async (toolsArg) => {
toolsArg.skipIf(!coingeckoAvailable, coingeckoSkipReason);
stockService.register(coingeckoProvider);
const providers = stockService.getAllProviders();
expect(providers).toContainEqual(coingeckoProvider);
expect(stockService.getProvider('CoinGecko')).toEqual(coingeckoProvider);
});
tap.test('should check CoinGecko provider health', async () => {
const health = await stockService.checkProvidersHealth();
tap.test('should check CoinGecko provider health', async (toolsArg) => {
toolsArg.skipIf(!coingeckoAvailable, coingeckoSkipReason);
const health = await runCoinGeckoRequest(toolsArg, async () => stockService.checkProvidersHealth());
expect(health.get('CoinGecko')).toEqual(true);
});
tap.test('should fetch single crypto price using ticker symbol (BTC)', async () => {
const price = await stockService.getPrice({ ticker: 'BTC' });
tap.test('should fetch single crypto price using ticker symbol (BTC)', async (toolsArg) => {
toolsArg.skipIf(!coingeckoAvailable, coingeckoSkipReason);
const price = await runCoinGeckoRequest(toolsArg, async () => stockService.getPrice({ ticker: 'BTC' }));
expect(price).toHaveProperty('ticker');
expect(price).toHaveProperty('price');
@@ -62,11 +92,12 @@ tap.test('should fetch single crypto price using ticker symbol (BTC)', async ()
console.log(` Change: ${price.changePercent >= 0 ? '+' : ''}${price.changePercent.toFixed(2)}%`);
});
tap.test('should fetch single crypto price using CoinGecko ID (bitcoin)', async () => {
tap.test('should fetch single crypto price using CoinGecko ID (bitcoin)', async (toolsArg) => {
toolsArg.skipIf(!coingeckoAvailable, coingeckoSkipReason);
// Clear cache to ensure fresh fetch
stockService.clearCache();
const price = await stockService.getPrice({ ticker: 'bitcoin' });
const price = await runCoinGeckoRequest(toolsArg, async () => stockService.getPrice({ ticker: 'bitcoin' }));
expect(price.ticker).toEqual('BITCOIN');
expect(price.price).toBeGreaterThan(0);
@@ -74,12 +105,13 @@ tap.test('should fetch single crypto price using CoinGecko ID (bitcoin)', async
expect(price.companyName).toInclude('Bitcoin');
});
tap.test('should fetch multiple crypto prices (batch)', async () => {
tap.test('should fetch multiple crypto prices (batch)', async (toolsArg) => {
toolsArg.skipIf(!coingeckoAvailable, coingeckoSkipReason);
stockService.clearCache();
const prices = await stockService.getPrices({
const prices = await runCoinGeckoRequest(toolsArg, async () => stockService.getPrices({
tickers: testCryptos
});
}));
expect(prices).toBeArray();
expect(prices.length).toEqual(testCryptos.length);
@@ -98,19 +130,20 @@ tap.test('should fetch multiple crypto prices (batch)', async () => {
}
});
tap.test('should fetch historical crypto prices', async () => {
tap.test('should fetch historical crypto prices', async (toolsArg) => {
toolsArg.skipIf(!coingeckoAvailable, coingeckoSkipReason);
// Add delay to avoid rate limiting
await new Promise(resolve => setTimeout(resolve, 3000));
const to = new Date();
const from = new Date(to.getTime() - 7 * 24 * 60 * 60 * 1000); // 7 days ago
const prices = await stockService.getData({
const prices = await runCoinGeckoRequest(toolsArg, async () => stockService.getData({
type: 'historical',
ticker: 'BTC',
from: from,
to: to
});
}));
expect(prices).toBeArray();
expect((prices as opendata.IStockPrice[]).length).toBeGreaterThan(0);
@@ -143,16 +176,17 @@ tap.test('should fetch historical crypto prices', async () => {
expect(firstPrice.provider).toEqual('CoinGecko');
});
tap.test('should fetch intraday crypto prices (hourly)', async () => {
tap.test('should fetch intraday crypto prices (hourly)', async (toolsArg) => {
toolsArg.skipIf(!coingeckoAvailable, coingeckoSkipReason);
// Add delay to avoid rate limiting
await new Promise(resolve => setTimeout(resolve, 3000));
const prices = await stockService.getData({
const prices = await runCoinGeckoRequest(toolsArg, async () => stockService.getData({
type: 'intraday',
ticker: 'ETH',
interval: '1hour',
limit: 12 // Last 12 hours
});
}));
expect(prices).toBeArray();
expect((prices as opendata.IStockPrice[]).length).toBeGreaterThan(0);
@@ -175,9 +209,10 @@ tap.test('should fetch intraday crypto prices (hourly)', async () => {
expect(firstPrice.provider).toEqual('CoinGecko');
});
tap.test('should serve cached prices on subsequent requests', async () => {
tap.test('should serve cached prices on subsequent requests', async (toolsArg) => {
toolsArg.skipIf(!coingeckoAvailable, coingeckoSkipReason);
// First request - should hit the API
const firstRequest = await stockService.getPrice({ ticker: 'BTC' });
const firstRequest = await runCoinGeckoRequest(toolsArg, async () => stockService.getPrice({ ticker: 'BTC' }));
// Second request - should be served from cache
const secondRequest = await stockService.getPrice({ ticker: 'BTC' });
@@ -188,23 +223,26 @@ tap.test('should serve cached prices on subsequent requests', async () => {
expect(secondRequest.fetchedAt).toEqual(firstRequest.fetchedAt);
});
tap.test('should handle invalid crypto ticker gracefully', async () => {
tap.test('should handle invalid crypto ticker gracefully', async (toolsArg) => {
toolsArg.skipIf(!coingeckoAvailable, coingeckoSkipReason);
try {
await stockService.getPrice({ ticker: invalidCrypto });
await runCoinGeckoRequest(toolsArg, async () => stockService.getPrice({ ticker: invalidCrypto }));
throw new Error('Should have thrown an error for invalid ticker');
} catch (error) {
expect(error.message).toInclude('Failed to fetch');
expect(plugins.getErrorMessage(error)).toInclude('Failed to fetch');
}
});
tap.test('should support market checking', async () => {
tap.test('should support market checking', async (toolsArg) => {
toolsArg.skipIf(!coingeckoAvailable, coingeckoSkipReason);
expect(coingeckoProvider.supportsMarket('CRYPTO')).toEqual(true);
expect(coingeckoProvider.supportsMarket('BTC')).toEqual(true);
expect(coingeckoProvider.supportsMarket('ETH')).toEqual(true);
expect(coingeckoProvider.supportsMarket('NASDAQ')).toEqual(false);
});
tap.test('should support ticker validation', async () => {
tap.test('should support ticker validation', async (toolsArg) => {
toolsArg.skipIf(!coingeckoAvailable, coingeckoSkipReason);
expect(coingeckoProvider.supportsTicker('BTC')).toEqual(true);
expect(coingeckoProvider.supportsTicker('bitcoin')).toEqual(true);
expect(coingeckoProvider.supportsTicker('wrapped-bitcoin')).toEqual(true);
@@ -212,11 +250,15 @@ tap.test('should support ticker validation', async () => {
expect(coingeckoProvider.supportsTicker('BTC@USD')).toEqual(false);
});
tap.test('should display provider statistics', async () => {
tap.test('should display provider statistics', async (toolsArg) => {
toolsArg.skipIf(!coingeckoAvailable, coingeckoSkipReason);
const stats = stockService.getProviderStats();
const coingeckoStats = stats.get('CoinGecko');
expect(coingeckoStats).toBeTruthy();
if (!coingeckoStats) {
throw new Error('Missing CoinGecko stats');
}
expect(coingeckoStats.successCount).toBeGreaterThan(0);
console.log('\n📊 CoinGecko Provider Statistics:');
@@ -227,14 +269,15 @@ tap.test('should display provider statistics', async () => {
}
});
tap.test('should display crypto price dashboard', async () => {
tap.test('should display crypto price dashboard', async (toolsArg) => {
toolsArg.skipIf(!coingeckoAvailable, coingeckoSkipReason);
// Add delay to avoid rate limiting
await new Promise(resolve => setTimeout(resolve, 3000));
stockService.clearCache();
const cryptos = ['BTC', 'ETH', 'BNB', 'SOL', 'ADA'];
const prices = await stockService.getPrices({ tickers: cryptos });
const prices = await runCoinGeckoRequest(toolsArg, async () => stockService.getPrices({ tickers: cryptos }));
console.log('\n╔═══════════════════════════════════════════════════════════╗');
console.log('║ 🌐 CRYPTOCURRENCY PRICE DASHBOARD ║');
@@ -253,7 +296,8 @@ tap.test('should display crypto price dashboard', async () => {
console.log(`Fetched at: ${prices[0].fetchedAt.toISOString()}`);
});
tap.test('should clear cache', async () => {
tap.test('should clear cache', async (toolsArg) => {
toolsArg.skipIf(!coingeckoAvailable, coingeckoSkipReason);
stockService.clearCache();
// Cache is cleared, no assertions needed
});
+21 -7
View File
@@ -12,6 +12,8 @@ const testGermanBusinessDataDir = plugins.path.join(testNogitDir, 'germanbusines
const testOutputDir = plugins.path.join(testNogitDir, 'testoutput');
let testOpenDataInstance: opendata.OpenData;
let handelsregisterStarted = false;
let handelsregisterSkipReason = 'Handelsregister integration requirements are unavailable.';
tap.test('first test', async () => {
testOpenDataInstance = new opendata.OpenData({
@@ -23,32 +25,44 @@ tap.test('first test', async () => {
});
tap.test('should start the instance', async () => {
await testOpenDataInstance.start();
try {
await testOpenDataInstance.start();
handelsregisterStarted = true;
} catch (error) {
handelsregisterSkipReason = `Skipping Handelsregister integration tests: ${plugins.getErrorMessage(error)}`;
console.warn(handelsregisterSkipReason);
}
});
const resultsSearch = tap.test('should get the data for a company', async () => {
const resultsSearch = tap.test('should get the data for a company', async (toolsArg) => {
toolsArg.skipIf(!handelsregisterStarted, handelsregisterSkipReason);
const result = await testOpenDataInstance.handelsregister.searchCompany('LADR', 20);
console.log(result);
return result;
});
tap.test('should get the data for a specific company', async () => {
let testCompany: BusinessRecord['data']['germanParsedRegistration'] = (await resultsSearch.testResultPromise)[0]['germanParsedRegistration'];
tap.test('should get the data for a specific company', async (toolsArg) => {
toolsArg.skipIf(!handelsregisterStarted, handelsregisterSkipReason);
const searchResults = await resultsSearch.testResultPromise as BusinessRecord['data'][];
let testCompany: BusinessRecord['data']['germanParsedRegistration'] = searchResults[0].germanParsedRegistration;
console.log(`trying to find specific company with:`);
console.log(testCompany);
const result = await testOpenDataInstance.handelsregister.getSpecificCompany(testCompany);
console.log(result);
await plugins.smartfs.directory(testOutputDir).create();
await Promise.all(result.files.map(async (file) => {
await file.writeToDir(testOutputDir);
await file.writeToDiskAtPath(
plugins.path.join(testOutputDir, plugins.path.basename(file.path))
);
}));
});
tap.test('should stop the instance', async (toolsArg) => {
toolsArg.skipIf(!handelsregisterStarted, handelsregisterSkipReason);
await testOpenDataInstance.stop();
});
tap.start()
export default tap.start()
+582
View File
@@ -0,0 +1,582 @@
import { expect, tap } from '@git.zone/tstest/tapbundle';
import * as opendata from '../ts/index.js';
/**
* Mock provider for testing incremental cache behavior
* Allows precise control over what data is returned to test cache logic
*/
class MockIntradayProvider implements opendata.IStockProvider {
name = 'MockIntraday';
priority = 100;
requiresAuth = false;
// Track fetch calls for testing
public fetchCallCount = 0;
public lastRequest: opendata.IStockDataRequest | null = null;
// Mock data to return
private mockData: opendata.IStockPrice[] = [];
/**
* Set the mock data that will be returned on next fetch
*/
public setMockData(data: opendata.IStockPrice[]): void {
this.mockData = data;
}
/**
* Reset fetch tracking
*/
public resetTracking(): void {
this.fetchCallCount = 0;
this.lastRequest = null;
}
async fetchData(request: opendata.IStockDataRequest): Promise<opendata.IStockPrice | opendata.IStockPrice[]> {
this.fetchCallCount++;
this.lastRequest = request;
// For intraday requests, return filtered data based on date
if (request.type === 'intraday') {
let filteredData = [...this.mockData];
// Filter by date if specified (simulate incremental fetch)
if (request.date) {
filteredData = filteredData.filter(p => p.timestamp > request.date!);
}
// Apply limit
if (request.limit) {
filteredData = filteredData.slice(-request.limit);
}
return filteredData;
}
// For other requests, return first item or empty array
if (this.mockData.length > 0) {
return this.mockData[0];
}
throw new Error('No mock data available');
}
async isAvailable(): Promise<boolean> {
return true;
}
}
/**
* Helper to generate mock intraday prices
*/
function generateMockIntradayPrices(
ticker: string,
count: number,
startTime: Date,
intervalMinutes: number = 1
): opendata.IStockPrice[] {
const prices: opendata.IStockPrice[] = [];
let basePrice = 100;
for (let i = 0; i < count; i++) {
const timestamp = new Date(startTime.getTime() + i * intervalMinutes * 60 * 1000);
basePrice += (Math.random() - 0.5) * 2; // Random walk
prices.push({
ticker,
price: basePrice,
currency: 'USD',
timestamp,
fetchedAt: new Date(),
provider: 'MockIntraday',
dataType: 'intraday',
marketState: 'REGULAR',
open: basePrice - 0.5,
high: basePrice + 1,
low: basePrice - 1,
volume: 1000000,
change: 0,
changePercent: 0,
previousClose: basePrice
});
}
return prices;
}
let stockService: opendata.StockPriceService;
let mockProvider: MockIntradayProvider;
tap.test('Incremental Cache Setup', async () => {
await tap.test('should create StockPriceService and MockProvider', async () => {
stockService = new opendata.StockPriceService({
ttl: 60000, // 1 minute default (will be overridden by smart TTL)
maxEntries: 1000
});
expect(stockService).toBeInstanceOf(opendata.StockPriceService);
mockProvider = new MockIntradayProvider();
stockService.register(mockProvider);
const providers = stockService.getEnabledProviders();
expect(providers).toContainEqual(mockProvider);
console.log('✓ Test setup complete');
});
});
tap.test('Incremental Cache - Basic Behavior', async () => {
await tap.test('should cache intraday data on first fetch', async () => {
stockService.clearCache();
mockProvider.resetTracking();
const startTime = new Date('2025-01-07T09:30:00.000Z');
const mockData = generateMockIntradayPrices('AAPL', 10, startTime, 1);
mockProvider.setMockData(mockData);
// First fetch - should hit provider
const result1 = await stockService.getData({
type: 'intraday',
ticker: 'AAPL',
interval: '1min',
limit: 10
});
expect(result1).toBeArray();
expect((result1 as opendata.IStockPrice[]).length).toEqual(10);
expect(mockProvider.fetchCallCount).toEqual(1);
console.log('✓ First fetch cached 10 records');
});
await tap.test('should serve from cache on second identical request', async () => {
mockProvider.resetTracking();
// Second fetch - should hit cache (no provider call)
const result2 = await stockService.getData({
type: 'intraday',
ticker: 'AAPL',
interval: '1min',
limit: 10
});
expect(result2).toBeArray();
expect((result2 as opendata.IStockPrice[]).length).toEqual(10);
expect(mockProvider.fetchCallCount).toEqual(0); // Should NOT call provider
console.log('✓ Second fetch served from cache (0 provider calls)');
});
});
tap.test('Incremental Cache - Incremental Fetch', async () => {
await tap.test('should only fetch NEW data on refresh', async () => {
stockService.clearCache();
mockProvider.resetTracking();
const startTime = new Date('2025-01-07T09:30:00.000Z');
// First fetch: 10 records from 9:30-9:39
const mockData1 = generateMockIntradayPrices('MSFT', 10, startTime, 1);
mockProvider.setMockData(mockData1);
const result1 = await stockService.getData({
type: 'intraday',
ticker: 'MSFT',
interval: '1min'
});
expect((result1 as opendata.IStockPrice[]).length).toEqual(10);
expect(mockProvider.fetchCallCount).toEqual(1);
const latestTimestamp1 = (result1 as opendata.IStockPrice[])[9].timestamp;
console.log(`✓ First fetch: 10 records, latest timestamp: ${latestTimestamp1.toISOString()}`);
// Simulate 5 minutes passing - 5 new records available
mockProvider.resetTracking();
const mockData2 = generateMockIntradayPrices('MSFT', 15, startTime, 1); // 15 total (10 old + 5 new)
mockProvider.setMockData(mockData2);
// Second fetch - should detect cache and only fetch NEW data
const result2 = await stockService.getData({
type: 'intraday',
ticker: 'MSFT',
interval: '1min'
});
expect((result2 as opendata.IStockPrice[]).length).toEqual(15);
expect(mockProvider.fetchCallCount).toEqual(1); // Should call provider
// Verify the request had a date filter (incremental fetch)
expect(mockProvider.lastRequest).not.toEqual(null);
expect(mockProvider.lastRequest!.type).toEqual('intraday');
expect((mockProvider.lastRequest as opendata.IStockIntradayRequest).date).not.toEqual(undefined);
const requestDate = (mockProvider.lastRequest as opendata.IStockIntradayRequest).date;
console.log(`✓ Incremental fetch requested data since: ${requestDate!.toISOString()}`);
console.log(`✓ Total records after merge: ${(result2 as opendata.IStockPrice[]).length}`);
console.log('✓ Only fetched NEW data (incremental fetch working)');
});
await tap.test('should return cached data when no new records available', async () => {
stockService.clearCache();
mockProvider.resetTracking();
const startTime = new Date('2025-01-07T09:30:00.000Z');
const mockData = generateMockIntradayPrices('GOOGL', 10, startTime, 1);
mockProvider.setMockData(mockData);
// First fetch
const result1 = await stockService.getData({
type: 'intraday',
ticker: 'GOOGL',
interval: '1min'
});
expect((result1 as opendata.IStockPrice[]).length).toEqual(10);
// Second fetch - same data (no new records)
mockProvider.resetTracking();
mockProvider.setMockData(mockData); // Same data
const result2 = await stockService.getData({
type: 'intraday',
ticker: 'GOOGL',
interval: '1min'
});
expect((result2 as opendata.IStockPrice[]).length).toEqual(10);
expect(mockProvider.fetchCallCount).toEqual(1); // Incremental fetch attempted
console.log('✓ No new records - returned cached data');
});
});
tap.test('Incremental Cache - Deduplication', async () => {
await tap.test('should deduplicate by timestamp in merged data', async () => {
stockService.clearCache();
mockProvider.resetTracking();
const startTime = new Date('2025-01-07T09:30:00.000Z');
// First fetch: 10 records
const mockData1 = generateMockIntradayPrices('TSLA', 10, startTime, 1);
mockProvider.setMockData(mockData1);
const result1 = await stockService.getData({
type: 'intraday',
ticker: 'TSLA',
interval: '1min'
});
expect((result1 as opendata.IStockPrice[]).length).toEqual(10);
// Second fetch: Return overlapping data (last 5 old + 5 new)
// This simulates provider returning some duplicate timestamps
mockProvider.resetTracking();
const mockData2 = generateMockIntradayPrices('TSLA', 15, startTime, 1);
mockProvider.setMockData(mockData2);
const result2 = await stockService.getData({
type: 'intraday',
ticker: 'TSLA',
interval: '1min'
});
// Should have 15 unique timestamps (deduplication worked)
expect((result2 as opendata.IStockPrice[]).length).toEqual(15);
// Verify timestamps are unique
const timestamps = (result2 as opendata.IStockPrice[]).map(p => p.timestamp.getTime());
const uniqueTimestamps = new Set(timestamps);
expect(uniqueTimestamps.size).toEqual(15);
console.log('✓ Deduplication working - 15 unique timestamps');
});
});
tap.test('Incremental Cache - Limit Handling', async () => {
await tap.test('should respect limit parameter in merged results', async () => {
stockService.clearCache();
mockProvider.resetTracking();
const startTime = new Date('2025-01-07T09:30:00.000Z');
// First fetch with limit 100
const mockData1 = generateMockIntradayPrices('AMZN', 100, startTime, 1);
mockProvider.setMockData(mockData1);
const result1 = await stockService.getData({
type: 'intraday',
ticker: 'AMZN',
interval: '1min',
limit: 100
});
expect((result1 as opendata.IStockPrice[]).length).toEqual(100);
// Second fetch: 10 new records available
mockProvider.resetTracking();
const mockData2 = generateMockIntradayPrices('AMZN', 110, startTime, 1);
mockProvider.setMockData(mockData2);
const result2 = await stockService.getData({
type: 'intraday',
ticker: 'AMZN',
interval: '1min',
limit: 100 // Same limit
});
// Should still return 100 (most recent 100 after merge)
expect((result2 as opendata.IStockPrice[]).length).toEqual(100);
// Verify we got the most RECENT 100 (should include new data)
const lastTimestamp = (result2 as opendata.IStockPrice[])[99].timestamp;
const expectedLastTimestamp = mockData2[109].timestamp;
expect(lastTimestamp.getTime()).toEqual(expectedLastTimestamp.getTime());
console.log('✓ Limit respected - returned most recent 100 records');
});
await tap.test('should handle different limits without cache collision', async () => {
stockService.clearCache();
mockProvider.resetTracking();
const startTime = new Date('2025-01-07T09:30:00.000Z');
const mockData = generateMockIntradayPrices('NVDA', 1000, startTime, 1);
mockProvider.setMockData(mockData);
// Fetch with limit 100
const result1 = await stockService.getData({
type: 'intraday',
ticker: 'NVDA',
interval: '1min',
limit: 100
});
expect((result1 as opendata.IStockPrice[]).length).toEqual(100);
mockProvider.resetTracking();
// Fetch with limit 500 (should NOT use cached limit:100 data)
const result2 = await stockService.getData({
type: 'intraday',
ticker: 'NVDA',
interval: '1min',
limit: 500
});
expect((result2 as opendata.IStockPrice[]).length).toEqual(500);
// Should have made a new provider call (different cache key)
expect(mockProvider.fetchCallCount).toBeGreaterThan(0);
console.log('✓ Different limits use different cache keys');
});
});
tap.test('Incremental Cache - Dashboard Polling Scenario', async () => {
await tap.test('should efficiently handle repeated polling requests', async () => {
stockService.clearCache();
mockProvider.resetTracking();
const startTime = new Date('2025-01-07T09:30:00.000Z');
let currentDataSize = 100;
// Initial fetch: 100 records
let mockData = generateMockIntradayPrices('AAPL', currentDataSize, startTime, 1);
mockProvider.setMockData(mockData);
const result1 = await stockService.getData({
type: 'intraday',
ticker: 'AAPL',
interval: '1min',
limit: 1000
});
expect((result1 as opendata.IStockPrice[]).length).toEqual(100);
const initialFetchCount = mockProvider.fetchCallCount;
console.log(`✓ Initial fetch: ${(result1 as opendata.IStockPrice[]).length} records (${initialFetchCount} API calls)`);
// Simulate 5 dashboard refreshes (1 new record each time)
let totalNewRecords = 0;
for (let i = 0; i < 5; i++) {
mockProvider.resetTracking();
currentDataSize += 1; // 1 new record
totalNewRecords += 1;
mockData = generateMockIntradayPrices('AAPL', currentDataSize, startTime, 1);
mockProvider.setMockData(mockData);
const result = await stockService.getData({
type: 'intraday',
ticker: 'AAPL',
interval: '1min',
limit: 1000
});
expect((result as opendata.IStockPrice[]).length).toEqual(currentDataSize);
expect(mockProvider.fetchCallCount).toEqual(1); // Incremental fetch
}
console.log(`✓ Dashboard polling: 5 refreshes with ${totalNewRecords} new records`);
console.log('✓ Each refresh only fetched NEW data (incremental cache working)');
});
});
tap.test('Incremental Cache - Memory Impact', async () => {
await tap.test('should demonstrate memory savings from deduplication', async () => {
stockService.clearCache();
mockProvider.resetTracking();
const startTime = new Date('2025-01-07T09:30:00.000Z');
// Create data with intentional duplicates
const baseData = generateMockIntradayPrices('MSFT', 1000, startTime, 1);
const duplicatedData = [...baseData, ...baseData.slice(-100)]; // Duplicate last 100
expect(duplicatedData.length).toEqual(1100); // Before deduplication
mockProvider.setMockData(duplicatedData);
const result = await stockService.getData({
type: 'intraday',
ticker: 'MSFT',
interval: '1min'
});
// Should have 1000 unique records (100 duplicates removed)
expect((result as opendata.IStockPrice[]).length).toEqual(1000);
console.log('✓ Deduplication removed 100 duplicate timestamps');
console.log(`✓ Memory saved: ~${Math.round((100 / 1100) * 100)}%`);
});
});
tap.test('Incremental Cache - Fallback Behavior', async () => {
await tap.test('should not use incremental fetch for requests with date filter', async () => {
stockService.clearCache();
mockProvider.resetTracking();
const startTime = new Date('2025-01-07T09:30:00.000Z');
const mockData = generateMockIntradayPrices('GOOGL', 100, startTime, 1);
mockProvider.setMockData(mockData);
// First fetch without date
await stockService.getData({
type: 'intraday',
ticker: 'GOOGL',
interval: '1min'
});
mockProvider.resetTracking();
// Second fetch WITH date filter - should NOT use incremental cache
const result = await stockService.getData({
type: 'intraday',
ticker: 'GOOGL',
interval: '1min',
date: new Date('2025-01-07T10:00:00.000Z') // Explicit date filter
});
// Should have made normal fetch (not incremental)
expect(mockProvider.fetchCallCount).toEqual(1);
expect((mockProvider.lastRequest as opendata.IStockIntradayRequest).date).not.toEqual(undefined);
console.log('✓ Incremental cache skipped for requests with explicit date filter');
});
});
tap.test('Incremental Cache - Performance Benchmark', async () => {
await tap.test('should demonstrate API call reduction', async () => {
stockService.clearCache();
mockProvider.resetTracking();
const startTime = new Date('2025-01-07T09:30:00.000Z');
// Initial dataset: 1000 records
let mockData = generateMockIntradayPrices('BENCHMARK', 1000, startTime, 1);
mockProvider.setMockData(mockData);
// Initial fetch
await stockService.getData({
type: 'intraday',
ticker: 'BENCHMARK',
interval: '1min',
limit: 1000
});
expect(mockProvider.fetchCallCount).toEqual(1);
console.log('✓ Initial fetch: 1000 records');
let totalProviderCalls = 1;
let totalNewRecords = 0;
// Simulate 10 refreshes (5 new records each)
for (let i = 0; i < 10; i++) {
mockProvider.resetTracking();
// Add 5 new records
const newCount = 5;
mockData = generateMockIntradayPrices('BENCHMARK', 1000 + totalNewRecords + newCount, startTime, 1);
mockProvider.setMockData(mockData);
await stockService.getData({
type: 'intraday',
ticker: 'BENCHMARK',
interval: '1min',
limit: 1000
});
totalProviderCalls += mockProvider.fetchCallCount;
totalNewRecords += newCount;
}
console.log('\n📊 Performance Benchmark:');
console.log(` Total refreshes: 10`);
console.log(` New records fetched: ${totalNewRecords}`);
console.log(` Total provider calls: ${totalProviderCalls}`);
console.log(` Without incremental cache: ${11} calls (1 initial + 10 full refreshes)`);
console.log(` With incremental cache: ${totalProviderCalls} calls (1 initial + 10 incremental)`);
console.log(` Data transfer reduction: ~${Math.round((1 - (totalNewRecords / (10 * 1000))) * 100)}%`);
console.log(' (Only fetched NEW data instead of refetching all 1000 records each time)');
});
});
tap.test('Incremental Cache - Timestamp Ordering', async () => {
await tap.test('should maintain timestamp order after merge', async () => {
stockService.clearCache();
mockProvider.resetTracking();
const startTime = new Date('2025-01-07T09:30:00.000Z');
// First fetch
const mockData1 = generateMockIntradayPrices('TSLA', 10, startTime, 1);
mockProvider.setMockData(mockData1);
await stockService.getData({
type: 'intraday',
ticker: 'TSLA',
interval: '1min'
});
// Second fetch with new data
mockProvider.resetTracking();
const mockData2 = generateMockIntradayPrices('TSLA', 15, startTime, 1);
mockProvider.setMockData(mockData2);
const result = await stockService.getData({
type: 'intraday',
ticker: 'TSLA',
interval: '1min'
});
// Verify ascending timestamp order
const timestamps = (result as opendata.IStockPrice[]).map(p => p.timestamp.getTime());
for (let i = 1; i < timestamps.length; i++) {
expect(timestamps[i]).toBeGreaterThan(timestamps[i - 1]);
}
console.log('✓ Timestamps correctly ordered (ascending)');
});
});
export default tap.start();
+129
View File
@@ -0,0 +1,129 @@
import { expect, tap } from '@git.zone/tstest/tapbundle';
import * as opendata from '../ts/index.js';
import * as paths from '../ts/paths.js';
import * as plugins from '../ts/plugins.js';
const testDbFolder = plugins.path.join(paths.packageDir, '.nogit', 'law-smartdb-test');
let lawService: opendata.LawService;
tap.test('LawService - setup local smartdb', async () => {
await plugins.smartfs.directory(testDbFolder).recursive().delete().catch(() => {});
lawService = new opendata.LawService({
dbFolderPath: testDbFolder,
dbName: 'lawstest',
});
await lawService.start();
expect(lawService).toBeInstanceOf(opendata.LawService);
});
tap.test('LawService - sync and search Germany law', async () => {
const germanLaw = await lawService.syncLaw({
jurisdiction: 'de',
identifier: 'aeg',
});
expect(germanLaw.identifier).toEqual('aeg');
expect(germanLaw.title).toInclude('Eisenbahngesetz');
expect(germanLaw.text).toInclude('Ausgleichspflicht');
const results = await lawService.searchLaws({
jurisdiction: 'de',
query: 'Eisenbahngesetz',
limit: 5,
});
expect(results.length).toBeGreaterThan(0);
expect(results[0].identifier).toEqual('aeg');
});
tap.test('LawService - sync and search EU law', async () => {
const euLaw = await lawService.syncLaw({
jurisdiction: 'eu',
identifier: '32024R1689',
language: 'EN',
});
expect(euLaw.identifier).toEqual('32024R1689');
expect(euLaw.title).toInclude('Artificial Intelligence Act');
expect(euLaw.text.toLowerCase()).toInclude('artificial intelligence');
const results = await lawService.searchLaws({
jurisdiction: 'eu',
query: 'Artificial Intelligence Act',
limit: 5,
});
expect(results.length).toBeGreaterThan(0);
expect(results[0].identifier).toEqual('32024R1689');
});
tap.test('LawService - sync and search USA law', async () => {
const usLaw = await lawService.syncLaw({
jurisdiction: 'us',
identifier: 'PLAW-119publ1',
usCollection: 'PLAW',
});
expect(usLaw.identifier).toEqual('PLAW-119publ1');
expect(usLaw.shortTitle).toInclude('Laken Riley Act');
expect(usLaw.text).toInclude('To require the Secretary of Homeland Security');
const results = await lawService.searchLaws({
jurisdiction: 'us',
query: 'Laken Riley Act',
limit: 5,
});
expect(results.length).toBeGreaterThan(0);
expect(results[0].identifier).toEqual('PLAW-119publ1');
});
tap.test('LawService - sync and search USA code citation', async () => {
const usCodeSection = await lawService.syncLaw({
jurisdiction: 'us',
identifier: '8 U.S.C. § 1226',
});
expect(usCodeSection.identifier).toEqual('8 USC 1226');
expect(usCodeSection.citation).toEqual('8 USC 1226');
expect(usCodeSection.title).toInclude('Apprehension and detention of aliens');
expect(usCodeSection.text).toInclude('Detention of criminal aliens');
const results = await lawService.searchLaws({
jurisdiction: 'us',
query: 'Apprehension and detention of aliens',
limit: 10,
});
expect(results.length).toBeGreaterThan(0);
expect(results.map((lawArg) => lawArg.identifier).includes('8 USC 1226')).toEqual(true);
});
tap.test('LawService - local lookup returns synced law', async () => {
const euLaw = await lawService.getLaw({
jurisdiction: 'eu',
identifier: '32024R1689',
language: 'EN',
});
expect(euLaw).toBeDefined();
expect(euLaw?.title).toInclude('Artificial Intelligence Act');
const usCodeLaw = await lawService.getLaw({
jurisdiction: 'us',
identifier: '8 USC 1226(c)',
});
expect(usCodeLaw).toBeDefined();
expect(usCodeLaw?.identifier).toEqual('8 USC 1226');
});
tap.test('LawService - teardown local smartdb', async () => {
await lawService.stop();
await plugins.smartfs.directory(testDbFolder).recursive().delete().catch(() => {});
});
export default tap.start();
@@ -13,6 +13,8 @@ const invalidTicker = 'INVALID_TICKER_XYZ';
let stockService: opendata.StockPriceService;
let marketstackProvider: opendata.MarketstackProvider;
let testQenv: plugins.qenv.Qenv;
let marketstackAvailable = false;
let marketstackSkipReason = 'Marketstack integration requirements are unavailable.';
tap.test('should create StockPriceService instance', async () => {
stockService = new opendata.StockPriceService({
@@ -27,6 +29,10 @@ tap.test('should create MarketstackProvider instance', async () => {
// Create qenv and get API key
testQenv = new plugins.qenv.Qenv(paths.packageDir, testNogitDir);
const apiKey = await testQenv.getEnvVarOnDemand('MARKETSTACK_COM_TOKEN');
if (!apiKey) {
marketstackSkipReason = 'Skipping Marketstack integration tests: MARKETSTACK_COM_TOKEN not set.';
return;
}
marketstackProvider = new opendata.MarketstackProvider(apiKey, {
enabled: true,
@@ -37,13 +43,17 @@ tap.test('should create MarketstackProvider instance', async () => {
expect(marketstackProvider).toBeInstanceOf(opendata.MarketstackProvider);
expect(marketstackProvider.name).toEqual('Marketstack');
expect(marketstackProvider.requiresAuth).toEqual(true);
expect(marketstackProvider.priority).toEqual(80);
expect(marketstackProvider.priority).toEqual(90);
marketstackAvailable = await marketstackProvider.isAvailable();
if (!marketstackAvailable) {
marketstackSkipReason = 'Skipping Marketstack integration tests: provider is not reachable.';
marketstackProvider = undefined as any;
}
} catch (error) {
if (error.message.includes('MARKETSTACK_COM_TOKEN')) {
if (plugins.getErrorMessage(error).includes('MARKETSTACK_COM_TOKEN')) {
console.log('⚠️ MARKETSTACK_COM_TOKEN not set - skipping Marketstack tests');
tap.test('Marketstack token not available', async () => {
expect(true).toEqual(true); // Skip gracefully
});
marketstackSkipReason = 'Skipping Marketstack integration tests: MARKETSTACK_COM_TOKEN not set.';
return;
}
throw error;
@@ -64,7 +74,7 @@ tap.test('should register Marketstack provider with the service', async () => {
tap.test('should check provider health', async () => {
if (!marketstackProvider) {
console.log('⚠️ Skipping - Marketstack provider not initialized');
console.log(`⚠️ ${marketstackSkipReason}`);
return;
}
@@ -151,7 +161,7 @@ tap.test('should handle invalid ticker gracefully', async () => {
await stockService.getPrice({ ticker: invalidTicker });
throw new Error('Should have thrown an error for invalid ticker');
} catch (error) {
expect(error.message).toInclude('Failed to fetch');
expect(plugins.getErrorMessage(error)).toInclude('Failed to fetch');
console.log('✓ Invalid ticker handled correctly');
}
});
@@ -196,6 +206,9 @@ tap.test('should get provider statistics', async () => {
const marketstackStats = stats.get('Marketstack');
expect(marketstackStats).not.toEqual(undefined);
if (!marketstackStats) {
throw new Error('Missing Marketstack stats');
}
expect(marketstackStats.successCount).toBeGreaterThan(0);
expect(marketstackStats.errorCount).toBeGreaterThanOrEqual(0);
@@ -280,7 +293,7 @@ tap.test('should fetch sample EOD data', async () => {
console.log(`Provider: Marketstack (EOD Data)`);
console.log(`Last updated: ${new Date().toLocaleString()}\n`);
} catch (error) {
console.log('Error fetching sample data:', error.message);
console.log('Error fetching sample data:', plugins.getErrorMessage(error));
}
expect(true).toEqual(true);
+365
View File
@@ -0,0 +1,365 @@
import { expect, tap } from '@git.zone/tstest/tapbundle';
import * as opendata from '../ts/index.js';
/**
* Test to verify we NEVER return stale intraday data
* Even when cache hasn't expired, we should check for new data
*/
class MockIntradayProvider implements opendata.IStockProvider {
name = 'MockIntradayProvider';
priority = 100;
requiresAuth = false;
public fetchCount = 0;
public lastRequestDate: Date | undefined;
private currentDataCount = 10; // Start with 10 records
private baseTime = new Date('2025-01-07T09:30:00.000Z');
async fetchData(request: opendata.IStockDataRequest): Promise<opendata.IStockPrice | opendata.IStockPrice[]> {
this.fetchCount++;
if (request.type === 'intraday') {
this.lastRequestDate = request.date;
const startTime = request.date || this.baseTime;
const prices: opendata.IStockPrice[] = [];
// Simulate provider returning data AFTER the requested date
for (let i = 0; i < this.currentDataCount; i++) {
const timestamp = new Date(startTime.getTime() + i * 60 * 1000);
// Only return data AFTER request date if date filter is present
if (request.date && timestamp <= request.date) {
continue;
}
prices.push({
ticker: request.ticker,
price: 100 + i,
currency: 'USD',
timestamp,
fetchedAt: new Date(),
provider: this.name,
dataType: 'intraday',
marketState: 'REGULAR',
open: 100,
high: 101,
low: 99,
volume: 1000000,
change: 0,
changePercent: 0,
previousClose: 100
});
}
return prices;
}
throw new Error('Only intraday supported in this mock');
}
async isAvailable(): Promise<boolean> {
return true;
}
public addNewRecords(count: number): void {
this.currentDataCount += count;
}
public advanceTime(minutes: number): void {
this.baseTime = new Date(this.baseTime.getTime() + minutes * 60 * 1000);
}
}
let stockService: opendata.StockPriceService;
let mockProvider: MockIntradayProvider;
tap.test('Stale Data Fix - Setup', async () => {
// Use LONG TTL so cache doesn't expire during test
stockService = new opendata.StockPriceService({
ttl: 300000, // 5 minutes
maxEntries: 1000
});
mockProvider = new MockIntradayProvider();
stockService.register(mockProvider);
console.log('✓ Service initialized with 5-minute cache TTL');
});
tap.test('Stale Data Fix - Check for New Data Even When Cache Valid', async () => {
await tap.test('should return cached data if less than 1 minute old (freshness check)', async () => {
stockService.clearCache();
mockProvider.fetchCount = 0;
mockProvider.currentDataCount = 10;
console.log('\n📊 Scenario: Request twice within 1 minute\n');
// First request - fetch 10 records
console.log('⏰ First request (initial fetch)');
const result1 = await stockService.getData({
type: 'intraday',
ticker: 'AAPL',
interval: '1min',
limit: 1000
});
expect(result1).toBeArray();
expect((result1 as opendata.IStockPrice[]).length).toEqual(10);
expect(mockProvider.fetchCount).toEqual(1);
const latestTimestamp1 = (result1 as opendata.IStockPrice[])[9].timestamp;
console.log(` ✓ Fetched 10 records, latest: ${latestTimestamp1.toISOString()}`);
// Second request immediately - should return cache (data < 1min old)
console.log('\n⏰ Second request (< 1 minute later)');
mockProvider.fetchCount = 0;
mockProvider.addNewRecords(10); // New data available, but won't fetch yet
const result2 = await stockService.getData({
type: 'intraday',
ticker: 'AAPL',
interval: '1min',
limit: 1000
});
// Should return cached data (freshness check prevents fetch)
expect((result2 as opendata.IStockPrice[]).length).toEqual(10);
expect(mockProvider.fetchCount).toEqual(0); // No provider call
console.log(` ✓ Returned cached 10 records (no provider call)`);
console.log(` ✓ Freshness check: Data < 1min old, no fetch needed`);
});
await tap.test('should fetch NEW data when cache is > 1 minute old', async () => {
stockService.clearCache();
mockProvider.fetchCount = 0;
mockProvider.currentDataCount = 10;
console.log('\n📊 Scenario: Request after 2 minutes (data > 1min old)\n');
// First request - fetch 10 records at 9:30am
console.log('⏰ 9:30:00 - First request (initial fetch)');
const result1 = await stockService.getData({
type: 'intraday',
ticker: 'MSFT',
interval: '1min',
limit: 1000
});
expect(result1).toBeArray();
expect((result1 as opendata.IStockPrice[]).length).toEqual(10);
const latestTimestamp1 = (result1 as opendata.IStockPrice[])[9].timestamp;
console.log(` ✓ Fetched 10 records, latest: ${latestTimestamp1.toISOString()}`);
// Advance time by 2 minutes - now data is > 1 minute old
console.log('\n⏰ 9:32:00 - Second request (2 minutes later, data > 1min old)');
console.log(' 📝 Advancing provider time by 2 minutes...');
mockProvider.fetchCount = 0;
mockProvider.advanceTime(2); // Advance 2 minutes
mockProvider.addNewRecords(10); // Now provider has 20 records total
const result2 = await stockService.getData({
type: 'intraday',
ticker: 'MSFT',
interval: '1min',
limit: 1000
});
expect(result2).toBeArray();
const prices2 = result2 as opendata.IStockPrice[];
// Should have 20 records (10 cached + 10 new)
expect(prices2.length).toEqual(20);
// Should have made a provider call (data was stale)
expect(mockProvider.fetchCount).toBeGreaterThan(0);
const latestTimestamp2 = prices2[prices2.length - 1].timestamp;
console.log(` ✓ Now have ${prices2.length} records, latest: ${latestTimestamp2.toISOString()}`);
console.log(` ✓ Provider calls: ${mockProvider.fetchCount} (fetched new data)`);
console.log(` ✓ Data was > 1min old, incremental fetch triggered!`);
// Verify we got NEW data
expect(latestTimestamp2.getTime()).toBeGreaterThan(latestTimestamp1.getTime());
console.log('\n✅ SUCCESS: Fetched new data when cache was stale!');
});
await tap.test('should handle polling with > 1 minute intervals efficiently', async () => {
stockService.clearCache();
mockProvider.fetchCount = 0;
mockProvider.currentDataCount = 100;
console.log('\n📊 Scenario: Dashboard polling every 2 minutes\n');
// Initial request at 9:30am
console.log('⏰ 9:30:00 - Request 1 (initial fetch)');
await stockService.getData({
type: 'intraday',
ticker: 'GOOGL',
interval: '1min',
limit: 1000
});
expect(mockProvider.fetchCount).toEqual(1);
console.log(` ✓ Fetched 100 records (provider calls: 1)`);
let totalProviderCalls = 1;
let totalNewRecords = 0;
// Simulate 3 polling refreshes (2 minutes apart, 5 new records each)
for (let i = 2; i <= 4; i++) {
mockProvider.fetchCount = 0;
mockProvider.advanceTime(2); // Advance 2 minutes (triggers freshness check)
mockProvider.addNewRecords(5);
totalNewRecords += 5;
const minutes = (i - 1) * 2;
console.log(`\n⏰ 9:${30 + minutes}:00 - Request ${i} (${minutes} minutes later, +5 new records)`);
const result = await stockService.getData({
type: 'intraday',
ticker: 'GOOGL',
interval: '1min',
limit: 1000
});
const expectedTotal = 100 + totalNewRecords;
expect((result as opendata.IStockPrice[]).length).toEqual(expectedTotal);
// Should have made exactly 1 provider call (incremental fetch)
expect(mockProvider.fetchCount).toEqual(1);
totalProviderCalls++;
console.log(` ✓ Now have ${expectedTotal} records (incremental fetch: 1 call)`);
}
console.log(`\n📊 Summary:`);
console.log(` Total requests: 4`);
console.log(` Total provider calls: ${totalProviderCalls}`);
console.log(` New records fetched: ${totalNewRecords}`);
console.log(` Without incremental cache: Would fetch 100 records × 3 refreshes = 300 records`);
console.log(` With incremental cache: Only fetched ${totalNewRecords} new records`);
console.log(` Data transfer reduction: ${Math.round((1 - (totalNewRecords / 300)) * 100)}%`);
console.log('\n✅ SUCCESS: Only fetched NEW data on each refresh!');
});
});
tap.test('Stale Data Fix - Verify No Regression for Other Request Types', async () => {
await tap.test('historical requests should still use simple cache', async () => {
stockService.clearCache();
// Mock provider that counts calls
let historicalCallCount = 0;
const historicalProvider: opendata.IStockProvider = {
name: 'HistoricalMock',
priority: 100,
requiresAuth: false,
async fetchData() {
historicalCallCount++;
return [{
ticker: 'TEST',
price: 100,
currency: 'USD',
timestamp: new Date('2025-01-01'),
fetchedAt: new Date(),
provider: 'HistoricalMock',
dataType: 'eod',
marketState: 'CLOSED',
open: 99,
high: 101,
low: 98,
volume: 1000000,
change: 1,
changePercent: 1,
previousClose: 99
}];
},
async isAvailable() { return true; }
};
const testService = new opendata.StockPriceService({ ttl: 60000 });
testService.register(historicalProvider);
// First request
await testService.getData({
type: 'historical',
ticker: 'TEST',
from: new Date('2025-01-01'),
to: new Date('2025-01-31')
});
expect(historicalCallCount).toEqual(1);
// Second request - should use cache (not incremental fetch)
await testService.getData({
type: 'historical',
ticker: 'TEST',
from: new Date('2025-01-01'),
to: new Date('2025-01-31')
});
// Should still be 1 (used cache)
expect(historicalCallCount).toEqual(1);
console.log('✓ Historical requests use simple cache (no incremental fetch)');
});
await tap.test('current price requests should still use simple cache', async () => {
stockService.clearCache();
let currentCallCount = 0;
const currentProvider: opendata.IStockProvider = {
name: 'CurrentMock',
priority: 100,
requiresAuth: false,
async fetchData() {
currentCallCount++;
return {
ticker: 'TEST',
price: 150,
currency: 'USD',
timestamp: new Date(),
fetchedAt: new Date(),
provider: 'CurrentMock',
dataType: 'eod',
marketState: 'CLOSED',
open: 149,
high: 151,
low: 148,
volume: 5000000,
change: 1,
changePercent: 0.67,
previousClose: 149
};
},
async isAvailable() { return true; }
};
const testService = new opendata.StockPriceService({ ttl: 60000 });
testService.register(currentProvider);
// First request
await testService.getData({
type: 'current',
ticker: 'TEST'
});
expect(currentCallCount).toEqual(1);
// Second request - should use cache
await testService.getData({
type: 'current',
ticker: 'TEST'
});
expect(currentCallCount).toEqual(1);
console.log('✓ Current price requests use simple cache');
});
});
export default tap.start();
+35 -6
View File
@@ -11,6 +11,8 @@ const testDownloadDir = plugins.path.join(testNogitDir, 'downloads');
const testGermanBusinessDataDir = plugins.path.join(testNogitDir, 'germanbusinessdata');
let testOpenDataInstance: opendata.OpenData;
let openDataStarted = false;
let openDataSkipReason = 'OpenData integration requirements are unavailable.';
tap.test('first test', async () => {
testOpenDataInstance = new opendata.OpenData({
@@ -22,16 +24,43 @@ tap.test('first test', async () => {
});
tap.test('should start the instance', async () => {
await testOpenDataInstance.start();
try {
await testOpenDataInstance.start();
openDataStarted = true;
} catch (error) {
openDataSkipReason = `Skipping OpenData integration tests: ${plugins.getErrorMessage(error)}`;
console.warn(openDataSkipReason);
}
})
tap.test('should build initial data', async () => {
await testOpenDataInstance.buildInitialDb();
tap.test('should persist business records using local smartdb', async (toolsArg) => {
toolsArg.skipIf(!openDataStarted, openDataSkipReason);
const businessRecord = new testOpenDataInstance.CBusinessRecord();
businessRecord.id = await testOpenDataInstance.CBusinessRecord.getNewId();
businessRecord.data.name = `Test Company ${plugins.smartunique.uniSimple()}`;
businessRecord.data.germanParsedRegistration = {
court: 'Bremen',
type: 'HRB',
number: `${Date.now()}`,
};
await businessRecord.save();
const storedRecord = await BusinessRecord.getInstance({
id: businessRecord.id,
});
expect(storedRecord.id).toEqual(businessRecord.id);
expect(storedRecord.data.name).toEqual(businessRecord.data.name);
expect(storedRecord.data.germanParsedRegistration).toEqual(
businessRecord.data.germanParsedRegistration
);
});
tap.test('should stop the instance', async () => {
tap.test('should stop the instance', async (toolsArg) => {
toolsArg.skipIf(!openDataStarted, openDataSkipReason);
await testOpenDataInstance.stop();
});
tap.start()
export default tap.start()
+1 -1
View File
@@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@fin.cx/opendata',
version: '3.4.0',
version: '3.6.0',
description: 'A comprehensive TypeScript library for accessing business data and real-time financial information. Features include German company data management with MongoDB integration, JSONL bulk processing, automated Handelsregister interactions, and real-time stock market data from multiple providers.'
}
+1 -1
View File
@@ -18,7 +18,7 @@ export class BusinessRecord extends plugins.smartdata.SmartDataDbDoc<
// INSTANCE
@plugins.smartdata.unI()
id: string;
id!: string;
@plugins.smartdata.svDb()
data: {
+44 -19
View File
@@ -20,15 +20,44 @@ export class HandelsRegister {
this.uniqueDowloadFolder = plugins.path.join(this.downloadDir, plugins.smartunique.uniSimple());
}
private async resetDownloadFolder() {
await plugins.smartfs.directory(this.uniqueDowloadFolder).recursive().delete().catch(() => {});
await plugins.smartfs.directory(this.uniqueDowloadFolder).create();
}
private async waitForDownloadedFile() {
for (let attempt = 0; attempt < 120; attempt++) {
const directoryEntries = await plugins.fs.readdir(this.uniqueDowloadFolder);
const fileName = directoryEntries.find(
(entry) => !entry.endsWith('.crdownload') && !entry.endsWith('.tmp')
);
if (fileName) {
const filePath = plugins.path.join(this.uniqueDowloadFolder, fileName);
const firstStat = await plugins.fs.stat(filePath);
await plugins.smartdelay.delayFor(500);
const secondStat = await plugins.fs.stat(filePath);
if (firstStat.size === secondStat.size) {
return filePath;
}
}
await plugins.smartdelay.delayFor(500);
}
throw new Error('Timed out while waiting for the download to finish.');
}
public async start() {
// Start the browser
await plugins.smartfile.fs.ensureDir(this.uniqueDowloadFolder);
await this.resetDownloadFolder();
await this.smartbrowserInstance.start();
}
public async stop() {
// Stop the browser
await plugins.smartfile.fs.remove(this.uniqueDowloadFolder);
await plugins.smartfs.directory(this.uniqueDowloadFolder).recursive().delete();
await this.smartbrowserInstance.stop();
}
@@ -184,24 +213,16 @@ export class HandelsRegister {
throw new Error('Invalid file type');
}
}, typeArg);
const downloadedFilePath = await this.waitForDownloadedFile();
const renamedFilePath = plugins.path.join(
this.uniqueDowloadFolder,
typeArg === 'AD' ? 'ad.pdf' : 'si.xml'
);
await plugins.fs.rename(downloadedFilePath, renamedFilePath);
const file = await plugins.smartfileFactory.fromFilePath(renamedFilePath);
await plugins.smartfile.fs.waitForFileToBeReady(this.uniqueDowloadFolder);
const files = await plugins.smartfile.fs.fileTreeToObject(this.uniqueDowloadFolder, '**/*');
const file = files[0];
// lets clear the folder for the next download
await plugins.smartfile.fs.ensureEmptyDir(this.uniqueDowloadFolder);
switch (typeArg) {
case 'AD':
await file.rename(`ad.pdf`);
break;
case 'SI':
await file.rename(`si.xml`);
break;
break;
}
// Keep the download folder empty between requests.
await this.resetDownloadFolder();
return file;
}
@@ -297,6 +318,10 @@ export class HandelsRegister {
*/
public async getSpecificCompany(companyArg: BusinessRecord['data']['germanParsedRegistration']) {
return this.asyncExecutionStack.getExclusiveExecutionSlot(async () => {
if (!companyArg?.type || !companyArg.number || !companyArg.court) {
throw new Error('A complete parsed German registration is required.');
}
const page = await this.getNewPage();
await this.navigateToPage(page, 'Normal search');
await page.waitForSelector('#form\\:schlagwoerter', { timeout: 5000 });
+7 -6
View File
@@ -53,14 +53,15 @@ export class JsonlDataProcessor<T> {
dataUrlArg = 'https://daten.offeneregister.de/de_companies_ocdata.jsonl.bz2'
) {
const done = plugins.smartpromise.defer();
const dataExists = await plugins.smartfile.fs.isDirectory(this.germanBusinessDataDir);
const dataExists = await plugins.smartfs.directory(this.germanBusinessDataDir).exists();
if (!dataExists) {
await plugins.smartfile.fs.ensureDir(this.germanBusinessDataDir);
await plugins.smartfs.directory(this.germanBusinessDataDir).create();
} else {
}
const smartarchive = await plugins.smartarchive.SmartArchive.fromArchiveUrl(dataUrlArg);
const jsonlDataStream = await smartarchive.exportToStreamOfStreamFiles();
const jsonlDataStream = await plugins.smartarchive.SmartArchive.create()
.url(dataUrlArg)
.toStreamFiles();
let totalRecordsCounter = 0;
let nextRest: string = '';
jsonlDataStream.pipe(
@@ -74,11 +75,11 @@ export class JsonlDataProcessor<T> {
writeFunction: async (chunkArg: Buffer, streamToolsArg) => {
const currentString = nextRest + chunkArg.toString();
const lines = currentString.split('\n');
nextRest = lines.pop();
nextRest = lines.pop() ?? '';
console.log(`Got another ${lines.length} records.`);
const concurrentProcessor = new plugins.smartarray.ConcurrentProcessor<string>(
async (line) => {
let entry: T;
let entry: T | undefined;
if (!line) return;
try {
entry = JSON.parse(line);
+65 -32
View File
@@ -1,7 +1,6 @@
import { BusinessRecord } from './classes.businessrecord.js';
import { HandelsRegister } from './classes.handelsregister.js';
import { JsonlDataProcessor, type SeedEntryType } from './classes.jsonldata.js';
import * as paths from './paths.js';
import * as plugins from './plugins.js';
export interface IOpenDataConfig {
@@ -11,12 +10,13 @@ export interface IOpenDataConfig {
}
export class OpenData {
public db: plugins.smartdata.SmartdataDb;
private serviceQenv: plugins.qenv.Qenv;
public db!: plugins.smartdata.SmartdataDb;
private localSmartDb?: plugins.smartdb.LocalSmartDb;
private config: IOpenDataConfig;
private started = false;
public jsonLDataProcessor: JsonlDataProcessor<SeedEntryType>;
public handelsregister: HandelsRegister;
public jsonLDataProcessor!: JsonlDataProcessor<SeedEntryType>;
public handelsregister!: HandelsRegister;
public CBusinessRecord = plugins.smartdata.setDefaultManagerForDoc(this, BusinessRecord);
@@ -28,38 +28,62 @@ export class OpenData {
throw new Error('@fin.cx/opendata: All directory paths are required (downloadDir, germanBusinessDataDir, nogitDir).');
}
this.config = configArg;
this.serviceQenv = new plugins.qenv.Qenv(paths.packageDir, this.config.nogitDir);
}
public async start() {
// Ensure configured directories exist
await plugins.smartfile.fs.ensureDir(this.config.nogitDir);
await plugins.smartfile.fs.ensureDir(this.config.downloadDir);
await plugins.smartfile.fs.ensureDir(this.config.germanBusinessDataDir);
if (this.started) {
return;
}
this.db = new plugins.smartdata.SmartdataDb({
mongoDbUrl: await this.serviceQenv.getEnvVarOnDemand('MONGODB_URL'),
mongoDbName: await this.serviceQenv.getEnvVarOnDemand('MONGODB_NAME'),
mongoDbUser: await this.serviceQenv.getEnvVarOnDemand('MONGODB_USER'),
mongoDbPass: await this.serviceQenv.getEnvVarOnDemand('MONGODB_PASS'),
// Ensure configured directories exist
await plugins.smartfs.directory(this.config.nogitDir).create();
await plugins.smartfs.directory(this.config.downloadDir).create();
await plugins.smartfs.directory(this.config.germanBusinessDataDir).create();
this.localSmartDb = new plugins.smartdb.LocalSmartDb({
folderPath: plugins.path.join(this.config.nogitDir, 'opendata-smartdb'),
});
await this.db.init();
this.jsonLDataProcessor = new JsonlDataProcessor(
this.config.germanBusinessDataDir,
async (entryArg) => {
const businessRecord = new this.CBusinessRecord();
businessRecord.id = await this.CBusinessRecord.getNewId();
businessRecord.data.name = entryArg.name;
businessRecord.data.germanParsedRegistration = {
court: entryArg.all_attributes.registered_office,
number: entryArg.all_attributes._registerNummer,
type: entryArg.all_attributes._registerArt as 'HRA' | 'HRB',
};
await businessRecord.save();
const connectionInfo = await this.localSmartDb.start();
this.db = new plugins.smartdata.SmartdataDb({
mongoDbUrl: connectionInfo.connectionUri,
mongoDbName: 'opendata',
});
try {
await this.db.init();
await this.db.mongoDb.collection('_opendata_bootstrap').insertOne({
createdAt: new Date(),
});
await this.db.mongoDb.collection('_opendata_bootstrap').deleteMany({});
this.jsonLDataProcessor = new JsonlDataProcessor(
this.config.germanBusinessDataDir,
async (entryArg) => {
const businessRecord = new this.CBusinessRecord();
businessRecord.id = await this.CBusinessRecord.getNewId();
businessRecord.data.name = entryArg.name;
businessRecord.data.germanParsedRegistration = {
court: entryArg.all_attributes.registered_office,
number: entryArg.all_attributes._registerNummer,
type: entryArg.all_attributes._registerArt as 'HRA' | 'HRB',
};
await businessRecord.save();
}
);
this.handelsregister = new HandelsRegister(this, this.config.downloadDir);
await this.handelsregister.start();
this.started = true;
} catch (error) {
if (this.handelsregister) {
await this.handelsregister.stop().catch(() => {});
}
);
this.handelsregister = new HandelsRegister(this, this.config.downloadDir);
await this.handelsregister.start();
await this.db.close().catch(() => {});
await this.localSmartDb.stop().catch(() => {});
this.localSmartDb = undefined;
throw error;
}
}
public async buildInitialDb() {
@@ -81,7 +105,16 @@ export class OpenData {
public async stop() {
if (!this.started) {
return;
}
if (this.handelsregister) {
await this.handelsregister.stop();
}
await this.db.close();
await this.handelsregister.stop();
await this.localSmartDb?.stop();
this.localSmartDb = undefined;
this.started = false;
}
}
+1
View File
@@ -1,2 +1,3 @@
export * from './classes.main.opendata.js';
export * from './laws/index.js';
export * from './stocks/index.js';
+81
View File
@@ -0,0 +1,81 @@
import * as plugins from '../plugins.js';
import type { TJurisdiction, TLawSource, TRawLawFormat } from './interfaces.law.js';
@plugins.smartdata.Manager()
export class LawRecord extends plugins.smartdata.SmartDataDbDoc<LawRecord, LawRecord> {
public static getByLookupKey = async (lookupKeyArg: string) => {
const lawRecords = await LawRecord.getInstances({
lookupKey: lookupKeyArg,
});
return lawRecords[0];
};
@plugins.smartdata.unI()
id!: string;
@plugins.smartdata.unI()
@plugins.smartdata.svDb()
lookupKey!: string;
@plugins.smartdata.index()
@plugins.smartdata.svDb()
jurisdiction!: TJurisdiction;
@plugins.smartdata.index()
@plugins.smartdata.svDb()
source!: TLawSource;
@plugins.smartdata.searchable()
@plugins.smartdata.svDb()
identifier!: string;
@plugins.smartdata.searchable()
@plugins.smartdata.svDb()
title!: string;
@plugins.smartdata.searchable()
@plugins.smartdata.svDb()
shortTitle: string = '';
@plugins.smartdata.searchable()
@plugins.smartdata.svDb()
citation: string = '';
@plugins.smartdata.index()
@plugins.smartdata.svDb()
type: string = '';
@plugins.smartdata.index()
@plugins.smartdata.svDb()
language: string = '';
@plugins.smartdata.svDb()
sourceUrl!: string;
@plugins.smartdata.svDb()
rawFormat!: TRawLawFormat;
@plugins.smartdata.svDb()
rawBody!: string;
@plugins.smartdata.searchable()
@plugins.smartdata.svDb()
text!: string;
@plugins.smartdata.index()
@plugins.smartdata.svDb()
dateIssued: string = '';
@plugins.smartdata.index()
@plugins.smartdata.svDb()
lastModified: string = '';
@plugins.smartdata.svDb()
sourceMeta: Record<string, string> = {};
@plugins.smartdata.svDb()
fetchedAt: Date = new Date();
@plugins.smartdata.svDb()
syncedAt: Date = new Date();
}
File diff suppressed because it is too large Load Diff
+3
View File
@@ -0,0 +1,3 @@
export * from './interfaces.law.js';
export * from './classes.lawrecord.js';
export * from './classes.lawservice.js';
+48
View File
@@ -0,0 +1,48 @@
export type TJurisdiction = 'de' | 'eu' | 'us';
export type TLawSource =
| 'gesetze-im-internet'
| 'eur-lex'
| 'law-cornell-lii'
| 'govinfo-plaw'
| 'govinfo-uscode';
export type TRawLawFormat = 'xml' | 'html' | 'text' | 'json';
export type TUsLawCollection = 'PLAW' | 'USCODE';
export interface ILawServiceConfig {
dbFolderPath?: string;
dbName?: string;
govInfoApiKey?: string;
}
export interface ILawLookupRequest {
jurisdiction: TJurisdiction;
identifier: string;
language?: string;
usCollection?: TUsLawCollection;
forceSync?: boolean;
}
export interface ILawSyncRequest {
jurisdiction: TJurisdiction;
limit?: number;
offset?: number;
language?: string;
govInfoApiKey?: string;
usCollection?: TUsLawCollection;
since?: Date;
}
export interface ILawSearchRequest {
query: string;
jurisdiction?: TJurisdiction;
limit?: number;
}
export interface ILawSyncResult {
jurisdiction: TJurisdiction;
syncedCount: number;
identifiers: string[];
}
+19
View File
@@ -1,7 +1,9 @@
// node native scope
import * as fs from 'node:fs/promises';
import * as path from 'node:path';
export {
fs,
path,
}
@@ -14,23 +16,40 @@ import * as smartbrowser from '@push.rocks/smartbrowser';
import * as smartdata from '@push.rocks/smartdata';
import * as smartdelay from '@push.rocks/smartdelay';
import * as smartfile from '@push.rocks/smartfile';
import { SmartFs, SmartFsProviderNode } from '@push.rocks/smartfs';
import * as smartlog from '@push.rocks/smartlog';
import * as smartpath from '@push.rocks/smartpath';
import * as smartpromise from '@push.rocks/smartpromise';
import * as smartrequest from '@push.rocks/smartrequest';
import * as smartdb from '@push.rocks/smartdb';
import * as smartstream from '@push.rocks/smartstream';
import * as smartunique from '@push.rocks/smartunique';
import * as smartxml from '@push.rocks/smartxml';
const smartfs = new SmartFs(new SmartFsProviderNode());
const smartfileFactory = new smartfile.SmartFileFactory(smartfs);
const getErrorMessage = (error: unknown) => {
if (error instanceof Error) {
return error.message;
}
return String(error);
};
export {
getErrorMessage,
lik,
qenv,
smartarchive,
smartarray,
smartbrowser,
smartdb,
smartdata,
smartdelay,
smartfile,
smartfileFactory,
smartfs,
smartlog,
smartpath,
smartpromise,
+1 -1
View File
@@ -166,7 +166,7 @@ export class FundamentalsService implements IFundamentalsProviderRegistry {
lastError = error as Error;
console.warn(
`Provider ${provider.name} failed for ${this.getRequestDescription(request)}: ${error.message}`
`Provider ${provider.name} failed for ${this.getRequestDescription(request)}: ${plugins.getErrorMessage(error)}`
);
}
}
+17 -7
View File
@@ -32,7 +32,17 @@ export class StockDataService {
private logger = console;
private config: Required<IStockDataServiceConfig> = {
private config: {
cache: {
priceTTL: number;
fundamentalsTTL: number;
maxEntries: number;
};
timeout: {
price: number;
fundamentals: number;
};
} = {
cache: {
priceTTL: 24 * 60 * 60 * 1000, // 24 hours
fundamentalsTTL: 90 * 24 * 60 * 60 * 1000, // 90 days
@@ -200,7 +210,7 @@ export class StockDataService {
entry.lastErrorTime = new Date();
lastError = error as Error;
console.warn(`Provider ${provider.name} failed for ${ticker}: ${error.message}`);
console.warn(`Provider ${provider.name} failed for ${ticker}: ${plugins.getErrorMessage(error)}`);
}
}
@@ -250,7 +260,7 @@ export class StockDataService {
entry.lastErrorTime = new Date();
lastError = error as Error;
console.warn(`Provider ${provider.name} failed for batch prices: ${error.message}`);
console.warn(`Provider ${provider.name} failed for batch prices: ${plugins.getErrorMessage(error)}`);
}
}
@@ -301,7 +311,7 @@ export class StockDataService {
entry.lastErrorTime = new Date();
lastError = error as Error;
console.warn(`Provider ${provider.name} failed for ${ticker} fundamentals: ${error.message}`);
console.warn(`Provider ${provider.name} failed for ${ticker} fundamentals: ${plugins.getErrorMessage(error)}`);
}
}
@@ -352,7 +362,7 @@ export class StockDataService {
entry.lastErrorTime = new Date();
lastError = error as Error;
console.warn(`Provider ${provider.name} failed for batch fundamentals: ${error.message}`);
console.warn(`Provider ${provider.name} failed for batch fundamentals: ${plugins.getErrorMessage(error)}`);
}
}
@@ -382,7 +392,7 @@ export class StockDataService {
fundamentals = this.enrichWithPrice(fundamentals, price.price);
}
} catch (error) {
console.warn(`Failed to fetch fundamentals for ${normalizedRequest.ticker}: ${error.message}`);
console.warn(`Failed to fetch fundamentals for ${normalizedRequest.ticker}: ${plugins.getErrorMessage(error)}`);
// Continue without fundamentals
}
}
@@ -426,7 +436,7 @@ export class StockDataService {
fundamentalsMap = new Map(fundamentals.map(f => [f.ticker, f]));
}
} catch (error) {
console.warn(`Failed to fetch batch fundamentals: ${error.message}`);
console.warn(`Failed to fetch batch fundamentals: ${plugins.getErrorMessage(error)}`);
// Continue without fundamentals
}
}
+159 -7
View File
@@ -156,11 +156,22 @@ export class StockPriceService implements IProviderRegistry {
*/
public async getData(request: IStockDataRequest): Promise<IStockPrice | IStockPrice[]> {
const cacheKey = this.getDataCacheKey(request);
const cached = this.getFromCache(cacheKey);
if (cached) {
console.log(`Cache hit for ${this.getRequestDescription(request)}`);
return cached;
// For intraday requests without date filter, ALWAYS try incremental fetch
// This ensures we check for new data even if cache hasn't expired
if (request.type === 'intraday' && !request.date) {
const incrementalResult = await this.tryIncrementalFetch(request, cacheKey);
if (incrementalResult) {
return incrementalResult;
}
// If incremental fetch returns null, continue to normal fetch below
} else {
// For other request types (historical, current, batch), use simple cache
const cached = this.getFromCache(cacheKey);
if (cached) {
console.log(`Cache hit for ${this.getRequestDescription(request)}`);
return cached;
}
}
const providers = this.getEnabledProviders();
@@ -194,7 +205,7 @@ export class StockPriceService implements IProviderRegistry {
lastError = error as Error;
console.warn(
`Provider ${provider.name} failed for ${this.getRequestDescription(request)}: ${error.message}`
`Provider ${provider.name} failed for ${this.getRequestDescription(request)}: ${plugins.getErrorMessage(error)}`
);
}
}
@@ -204,6 +215,137 @@ export class StockPriceService implements IProviderRegistry {
);
}
/**
* Try incremental fetch: Only fetch NEW data since last cached timestamp
* Returns merged result if successful, null if incremental fetch not applicable
*/
private async tryIncrementalFetch(
request: IStockDataRequest,
cacheKey: string
): Promise<IStockPrice[] | null> {
// Only applicable for intraday requests without date filter
if (request.type !== 'intraday' || request.date) {
return null;
}
// Check if we have similar cached data (same ticker, interval, but any limit/date)
const baseKey = `intraday:${request.ticker}:${request.interval}:latest`;
let cachedData: IStockPrice[] | null = null;
let matchedKey: string | null = null;
// Find any cached intraday data for this ticker+interval
for (const [key, entry] of this.cache.entries()) {
if (key.startsWith(baseKey)) {
const age = Date.now() - entry.timestamp.getTime();
if (entry.ttl !== Infinity && age > entry.ttl) {
continue; // Expired
}
cachedData = Array.isArray(entry.price) ? entry.price as IStockPrice[] : null;
matchedKey = key;
break;
}
}
if (!cachedData || cachedData.length === 0) {
return null; // No cached data to build on
}
// Find latest timestamp in cached data
const latestCached = cachedData.reduce((latest, price) => {
return price.timestamp > latest ? price.timestamp : latest;
}, new Date(0));
// Freshness check: If latest data is less than 1 minute old, just return cache
const dataAge = Date.now() - latestCached.getTime();
const freshnessThreshold = 60 * 1000; // 1 minute
if (dataAge < freshnessThreshold) {
console.log(`🔄 Incremental cache: Latest data is ${Math.round(dataAge / 1000)}s old (< 1min), returning cached data`);
return cachedData;
}
console.log(`🔄 Incremental cache: Found ${cachedData.length} cached records, latest: ${latestCached.toISOString()} (${Math.round(dataAge / 1000)}s old)`);
// Fetch only NEW data since latest cached timestamp
// Create a modified request with date filter
const modifiedRequest: IStockIntradayRequest = {
...request,
date: latestCached // Fetch from this date forward
};
const providers = this.getEnabledProviders();
for (const provider of providers) {
const entry = this.providers.get(provider.name)!;
try {
const newData = await this.fetchWithRetry(
() => provider.fetchData(modifiedRequest),
entry.config
) as IStockPrice[];
entry.successCount++;
// Filter out data at or before latest cached timestamp (avoid duplicates)
const filteredNew = newData.filter(p => p.timestamp > latestCached);
if (filteredNew.length === 0) {
console.log(`🔄 Incremental cache: No new data since ${latestCached.toISOString()}, using cache`);
return cachedData;
}
console.log(`🔄 Incremental cache: Fetched ${filteredNew.length} new records since ${latestCached.toISOString()}`);
// Merge cached + new data
const merged = [...cachedData, ...filteredNew];
// Sort by timestamp (ascending)
merged.sort((a, b) => a.timestamp.getTime() - b.timestamp.getTime());
// Deduplicate by timestamp (keep latest)
const deduped = this.deduplicateByTimestamp(merged);
// Apply limit if specified in original request
const effectiveLimit = request.limit || deduped.length;
const result = deduped.slice(-effectiveLimit); // Take most recent N
// Update cache with merged result
const ttl = this.getRequestTTL(request, result);
this.addToCache(cacheKey, result, ttl);
console.log(`🔄 Incremental cache: Returning ${result.length} total records (${cachedData.length} cached + ${filteredNew.length} new)`);
return result;
} catch (error) {
entry.errorCount++;
entry.lastError = error as Error;
entry.lastErrorTime = new Date();
console.warn(`Incremental fetch failed for ${provider.name}, falling back to full fetch`);
continue; // Try next provider or fall back to normal fetch
}
}
return null; // Incremental fetch failed, fall back to normal fetch
}
/**
* Deduplicate array of prices by timestamp, keeping the latest data for each timestamp
*/
private deduplicateByTimestamp(prices: IStockPrice[]): IStockPrice[] {
const seen = new Map<number, IStockPrice>();
for (const price of prices) {
const ts = price.timestamp.getTime();
const existing = seen.get(ts);
// Keep the entry with the latest fetchedAt (most recent data)
if (!existing || price.fetchedAt > existing.fetchedAt) {
seen.set(ts, price);
}
}
return Array.from(seen.values());
}
/**
* Get TTL based on request type and result
*/
@@ -328,7 +470,8 @@ export class StockPriceService implements IProviderRegistry {
return `historical:${request.ticker}:${fromStr}:${toStr}${request.exchange ? `:${request.exchange}` : ''}`;
case 'intraday':
const dateStr = request.date ? request.date.toISOString().split('T')[0] : 'latest';
return `intraday:${request.ticker}:${request.interval}:${dateStr}${request.exchange ? `:${request.exchange}` : ''}`;
const limitStr = request.limit ? `:limit${request.limit}` : '';
return `intraday:${request.ticker}:${request.interval}:${dateStr}${limitStr}${request.exchange ? `:${request.exchange}` : ''}`;
case 'batch':
const tickers = request.tickers.sort().join(',');
return `batch:${tickers}${request.exchange ? `:${request.exchange}` : ''}`;
@@ -355,6 +498,15 @@ export class StockPriceService implements IProviderRegistry {
}
private addToCache(key: string, price: IStockPrice | IStockPrice[], ttl?: number): void {
// Deduplicate array entries by timestamp before caching
if (Array.isArray(price)) {
const beforeCount = price.length;
price = this.deduplicateByTimestamp(price);
if (price.length < beforeCount) {
console.log(`Deduplicated ${beforeCount - price.length} duplicate timestamps in cache entry for ${key}`);
}
}
// Enforce max entries limit
if (this.cache.size >= this.cacheConfig.maxEntries) {
// Remove oldest entry
@@ -370,4 +522,4 @@ export class StockPriceService implements IProviderRegistry {
ttl: ttl || this.cacheConfig.ttl
});
}
}
}
+2
View File
@@ -24,6 +24,8 @@ export interface IProviderConfig {
timeout?: number;
retryAttempts?: number;
retryDelay?: number;
maxRecords?: number; // Maximum records to fetch per request (default: 10000)
defaultIntradayLimit?: number; // Default limit for intraday requests without explicit limit (default: 1000)
}
export interface IProviderRegistry {
+42 -8
View File
@@ -378,8 +378,18 @@ export class CoinGeckoProvider implements IStockProvider {
const marketCapData = responseData.market_caps || [];
const volumeData = responseData.total_volumes || [];
// Process each data point
for (let i = 0; i < priceData.length; i++) {
// Warn if processing large amount of historical data
const maxRecords = this.config?.maxRecords || 10000;
if (priceData.length > maxRecords) {
this.logger.warn(
`Historical request for ${request.ticker} returned ${priceData.length} records, ` +
`which exceeds maxRecords limit of ${maxRecords}. Processing first ${maxRecords} only.`
);
}
// Process each data point (up to maxRecords)
const recordsToProcess = Math.min(priceData.length, maxRecords);
for (let i = 0; i < recordsToProcess; i++) {
const [timestamp, price] = priceData[i];
const date = new Date(timestamp);
@@ -480,8 +490,19 @@ export class CoinGeckoProvider implements IStockProvider {
const marketCapData = responseData.market_caps || [];
const volumeData = responseData.total_volumes || [];
// Apply limit if specified
const limit = request.limit || priceData.length;
// Apply default limit if user didn't specify one (performance optimization)
const effectiveLimit = request.limit || this.config?.defaultIntradayLimit || 1000;
// Warn if fetching large amount of data without explicit limit
if (!request.limit && priceData.length > effectiveLimit) {
this.logger.warn(
`Intraday request for ${request.ticker} returned ${priceData.length} records but no limit specified. ` +
`Applying default limit of ${effectiveLimit}. Consider adding a limit to the request for better performance.`
);
}
// Apply limit (take most recent data)
const limit = Math.min(effectiveLimit, priceData.length);
const dataToProcess = priceData.slice(-limit);
for (let i = 0; i < dataToProcess.length; i++) {
@@ -624,21 +645,34 @@ export class CoinGeckoProvider implements IStockProvider {
const coinList = await response.json() as ICoinListItem[];
// Clear cache before rebuilding to prevent memory leak
// Keep only entries that are in priorityTickerMap
const priorityEntries = new Map<string, string>();
for (const [key, value] of this.priorityTickerMap) {
priorityEntries.set(key, value);
}
this.coinMapCache.clear();
// Restore priority mappings
for (const [key, value] of priorityEntries) {
this.coinMapCache.set(key, value);
}
// Build mapping: symbol -> id
for (const coin of coinList) {
const symbol = coin.symbol.toLowerCase();
const id = coin.id.toLowerCase();
// Don't overwrite priority mappings or existing cache entries
if (!this.priorityTickerMap.has(symbol) && !this.coinMapCache.has(symbol)) {
// Don't overwrite priority mappings
if (!this.priorityTickerMap.has(symbol)) {
this.coinMapCache.set(symbol, id);
}
// Always cache the ID mapping
// Always cache the ID mapping (id -> id for when users pass CoinGecko IDs directly)
this.coinMapCache.set(id, id);
}
this.coinListLoadedAt = new Date();
this.logger.info(`Loaded ${coinList.length} coins from CoinGecko`);
this.logger.info(`Loaded ${coinList.length} coins from CoinGecko (cache: ${this.coinMapCache.size} entries)`);
} catch (error) {
this.logger.error('Failed to load coin list from CoinGecko:', error);
// Don't throw - we can still work with direct IDs
+27 -14
View File
@@ -45,7 +45,7 @@ export class MarketstackProvider implements IStockProvider {
public priority = 90; // Increased from 80 - now supports real-time intraday data during market hours
public readonly requiresAuth = true;
public readonly rateLimit = {
requestsPerMinute: undefined, // No per-minute limit specified
requestsPerMinute: 0, // No per-minute limit specified
requestsPerDay: undefined // Varies by plan
};
@@ -99,8 +99,9 @@ export class MarketstackProvider implements IStockProvider {
}
} catch (error) {
// If intraday fails, fallback to EOD with warning
if (error.message?.includes('intraday') || error.message?.includes('Marketstack API error')) {
this.logger.warn(`Intraday endpoint failed for ${request.ticker}, falling back to EOD:`, error.message);
const errorMessage = plugins.getErrorMessage(error);
if (errorMessage.includes('intraday') || errorMessage.includes('Marketstack API error')) {
this.logger.warn(`Intraday endpoint failed for ${request.ticker}, falling back to EOD:`, errorMessage);
try {
return await this.fetchCurrentPriceEod(request);
} catch (eodError) {
@@ -187,7 +188,7 @@ export class MarketstackProvider implements IStockProvider {
const allPrices: IStockPrice[] = [];
let offset = request.offset || 0;
const limit = request.limit || 1000; // Max per page
const maxRecords = 10000; // Safety limit
const maxRecords = this.config?.maxRecords || 10000; // Safety limit (configurable)
while (true) {
let url = `${this.baseUrl}/eod?access_key=${this.apiKey}`;
@@ -245,7 +246,7 @@ export class MarketstackProvider implements IStockProvider {
return allPrices;
} catch (error) {
this.logger.error(`Failed to fetch historical prices for ${request.ticker}:`, error);
throw new Error(`Marketstack: Failed to fetch historical prices for ${request.ticker}: ${error.message}`);
throw new Error(`Marketstack: Failed to fetch historical prices for ${request.ticker}: ${plugins.getErrorMessage(error)}`);
}
}
@@ -259,7 +260,18 @@ export class MarketstackProvider implements IStockProvider {
const allPrices: IStockPrice[] = [];
let offset = 0;
const limit = 1000; // Max per page for intraday
const maxRecords = 10000; // Safety limit
const maxRecords = this.config?.maxRecords || 10000; // Safety limit (configurable)
// Apply default limit if user didn't specify one (performance optimization)
const effectiveLimit = request.limit || this.config?.defaultIntradayLimit || 1000;
// Warn if fetching large amount of data without explicit limit
if (!request.limit && effectiveLimit > 1000) {
this.logger.warn(
`Intraday request for ${request.ticker} without explicit limit will fetch up to ${effectiveLimit} records. ` +
`Consider adding a limit to the request for better performance.`
);
}
// Format symbol for intraday endpoint (replace . with -)
const formattedSymbol = this.formatSymbolForIntraday(request.ticker);
@@ -310,23 +322,23 @@ export class MarketstackProvider implements IStockProvider {
const pagination = responseData.pagination;
const hasMore = pagination && offset + limit < pagination.total;
// Honor limit from request if specified, or safety limit
if (!hasMore || (request.limit && allPrices.length >= request.limit) || allPrices.length >= maxRecords) {
// Honor effective limit or safety maxRecords
if (!hasMore || allPrices.length >= effectiveLimit || allPrices.length >= maxRecords) {
break;
}
offset += limit;
}
// Apply limit if specified
if (request.limit && allPrices.length > request.limit) {
return allPrices.slice(0, request.limit);
// Apply effective limit
if (allPrices.length > effectiveLimit) {
return allPrices.slice(0, effectiveLimit);
}
return allPrices;
} catch (error) {
this.logger.error(`Failed to fetch intraday prices for ${request.ticker}:`, error);
throw new Error(`Marketstack: Failed to fetch intraday prices for ${request.ticker}: ${error.message}`);
throw new Error(`Marketstack: Failed to fetch intraday prices for ${request.ticker}: ${plugins.getErrorMessage(error)}`);
}
}
@@ -348,8 +360,9 @@ export class MarketstackProvider implements IStockProvider {
}
} catch (error) {
// Fallback to EOD if intraday fails
if (error.message?.includes('intraday') || error.message?.includes('Marketstack API error')) {
this.logger.warn(`Intraday batch endpoint failed, falling back to EOD:`, error.message);
const errorMessage = plugins.getErrorMessage(error);
if (errorMessage.includes('intraday') || errorMessage.includes('Marketstack API error')) {
this.logger.warn(`Intraday batch endpoint failed, falling back to EOD:`, errorMessage);
try {
return await this.fetchBatchCurrentPricesEod(request);
} catch (eodError) {
+2 -2
View File
@@ -141,7 +141,7 @@ export class SecEdgarProvider implements IFundamentalsProvider {
return this.parseCompanyFacts(request.ticker, cik, companyFacts);
} catch (error) {
this.logger.error(`Failed to fetch fundamentals for ${request.ticker}:`, error);
throw new Error(`SEC EDGAR: Failed to fetch fundamentals for ${request.ticker}: ${error.message}`);
throw new Error(`SEC EDGAR: Failed to fetch fundamentals for ${request.ticker}: ${plugins.getErrorMessage(error)}`);
}
}
@@ -163,7 +163,7 @@ export class SecEdgarProvider implements IFundamentalsProvider {
results.push(fundamentals);
} catch (error) {
this.logger.warn(`Failed to fetch fundamentals for ${ticker}:`, error);
errors.push(`${ticker}: ${error.message}`);
errors.push(`${ticker}: ${plugins.getErrorMessage(error)}`);
// Continue with other tickers
}
}
+3 -3
View File
@@ -86,7 +86,7 @@ export class YahooFinanceProvider implements IStockProvider {
return stockPrice;
} catch (error) {
console.error(`Failed to fetch price for ${request.ticker}:`, error);
throw new Error(`Yahoo Finance: Failed to fetch price for ${request.ticker}: ${error.message}`);
throw new Error(`Yahoo Finance: Failed to fetch price for ${request.ticker}: ${plugins.getErrorMessage(error)}`);
}
}
@@ -145,7 +145,7 @@ export class YahooFinanceProvider implements IStockProvider {
return prices;
} catch (error) {
console.error(`Failed to fetch batch prices:`, error);
throw new Error(`Yahoo Finance: Failed to fetch batch prices: ${error.message}`);
throw new Error(`Yahoo Finance: Failed to fetch batch prices: ${plugins.getErrorMessage(error)}`);
}
}
@@ -191,4 +191,4 @@ export class YahooFinanceProvider implements IStockProvider {
return timeDiff > 3600 ? 'CLOSED' : 'REGULAR';
}
}
}
}
-2
View File
@@ -1,7 +1,5 @@
{
"compilerOptions": {
"experimentalDecorators": true,
"useDefineForClassFields": false,
"target": "ES2022",
"module": "NodeNext",
"moduleResolution": "NodeNext",