6 Commits

Author SHA1 Message Date
6af4f6b9c0 3.2.0
Some checks failed
Default (tags) / security (push) Successful in 42s
Default (tags) / test (push) Successful in 1m1s
Default (tags) / release (push) Failing after 59s
Default (tags) / metadata (push) Successful in 1m12s
2025-08-28 18:43:56 +00:00
1141681b60 feat(docs): Expand README with detailed usage/examples, update test runner and test script, and pin/bump dependencies 2025-08-28 18:43:56 +00:00
2bf04ccb70 3.1.2
Some checks failed
Default (tags) / security (push) Successful in 51s
Default (tags) / test (push) Failing after 11m39s
Default (tags) / release (push) Has been cancelled
Default (tags) / metadata (push) Has been cancelled
2025-08-28 16:06:44 +00:00
97232adbb0 fix(core): Update CI workflows and dependencies; apply small bugfixes and formatting improvements 2025-08-28 16:06:44 +00:00
dda83e35c0 3.1.1 2024-11-26 22:41:11 +01:00
8bf1af08d3 fix(core): Fix S3 cache manager methods for better path encoding consistency 2024-11-26 22:41:10 +01:00
20 changed files with 5842 additions and 4736 deletions

View File

@@ -6,8 +6,8 @@ on:
- '**'
env:
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
IMAGE: code.foss.global/host.today/ht-docker-node:npmci
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
@@ -26,7 +26,7 @@ jobs:
- name: Install pnpm and npmci
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
pnpm install -g @ship.zone/npmci
- name: Run npm prepare
run: npmci npm prepare

View File

@@ -6,8 +6,8 @@ on:
- '*'
env:
IMAGE: registry.gitlab.com/hosttoday/ht-docker-node:npmci
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@gitea.lossless.digital/${{gitea.repository}}.git
IMAGE: code.foss.global/host.today/ht-docker-node:npmci
NPMCI_COMPUTED_REPOURL: https://${{gitea.repository_owner}}:${{secrets.GITEA_TOKEN}}@/${{gitea.repository}}.git
NPMCI_TOKEN_NPM: ${{secrets.NPMCI_TOKEN_NPM}}
NPMCI_TOKEN_NPM2: ${{secrets.NPMCI_TOKEN_NPM2}}
NPMCI_GIT_GITHUBTOKEN: ${{secrets.NPMCI_GIT_GITHUBTOKEN}}
@@ -26,7 +26,7 @@ jobs:
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
pnpm install -g @ship.zone/npmci
npmci npm prepare
- name: Audit production dependencies
@@ -54,7 +54,7 @@ jobs:
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
pnpm install -g @ship.zone/npmci
npmci npm prepare
- name: Test stable
@@ -82,7 +82,7 @@ jobs:
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
pnpm install -g @ship.zone/npmci
npmci npm prepare
- name: Release
@@ -104,7 +104,7 @@ jobs:
- name: Prepare
run: |
pnpm install -g pnpm
pnpm install -g @shipzone/npmci
pnpm install -g @ship.zone/npmci
npmci npm prepare
- name: Code quality
@@ -119,6 +119,6 @@ jobs:
run: |
npmci node install stable
npmci npm install
pnpm install -g @gitzone/tsdoc
pnpm install -g @git.zone/tsdoc
npmci command tsdoc
continue-on-error: true

7
.gitignore vendored
View File

@@ -3,7 +3,6 @@
# artifacts
coverage/
public/
pages/
# installs
node_modules/
@@ -17,4 +16,8 @@ node_modules/
dist/
dist_*/
# custom
# AI
.claude/
.serena/
#------# custom

View File

@@ -1,6 +1,35 @@
# Changelog
## 2025-08-28 - 3.2.0 - feat(docs)
Expand README with detailed usage/examples, update test runner and test script, and pin/bump dependencies
- Completely overhauled README: added highlights, Quick Start, advanced configuration, core operations, examples, storage tier explanations, performance tips, and API reference.
- Updated tests to use @git.zone/tstest/tapbundle (test import changed) and adjusted package.json test script to run with --verbose --logfile --timeout 60.
- Bumped/pinned dependencies: @push.rocks/smartcache -> ^1.0.18 and several packages now have explicit version ranges (e.g. @push.rocks/lik -> ^6.2.2).
- Removed devDependency on @push.rocks/tapbundle.
## 2025-08-28 - 3.1.2 - fix(core)
Update CI workflows and dependencies; apply small bugfixes and formatting improvements
- Update Gitea workflow image URL and computed repo URL to internal registry and simplified repo path
- Switch npmci install target from @shipzone/npmci to @ship.zone/npmci in CI scripts
- Bump devDependencies and dependencies to newer versions (tsbuild, tstest, tapbundle, lik, smartbucket, smartfile, smartpath, smartpromise, taskbuffer, tsclass, etc.)
- Add package metadata: homepage anchor, packageManager, bugs URL and pnpm overrides block
- Fix TypeScript formatting and signatures (consistent trailing commas, multiline parameter lists, path join argument formatting)
- Fix file system and S3 manager calls to ensure consistent path handling and argument formatting (improved encoding/consistency in disk and S3 managers)
- Correct test file syntax (object commas) to avoid runtime/parse issues
- Update .gitignore to add AI tool folders and reorganize custom section
- Add pnpm-workspace.yaml with onlyBuiltDependencies entry
## 2024-11-26 - 3.1.1 - fix(core)
Fix S3 cache manager methods for better path encoding consistency
- Corrected path parameter usage in S3 manager methods to ensure encoding and consistency.
- Updated package.json Git dependencies for scoped packages.
## 2024-11-24 - 3.1.0 - feat(core)
Enhanced caching solution with optional configurations and improved documentation.
- Improved package description and keywords for better discoverability.
@@ -9,6 +38,7 @@ Enhanced caching solution with optional configurations and improved documentatio
- Enhanced README with detailed usage instructions and examples.
## 2024-05-29 to 2024-02-14 - 3.0.8
Minor configuration updates and documentation changes.
- Updated project description.
@@ -16,54 +46,65 @@ Minor configuration updates and documentation changes.
- Adjustments to npmextra.json regarding githost.
## 2024-02-14 - 3.0.7
Core enhancements and bug fixes.
- Fixed updates in the core module to improve stability.
## 2024-02-14 - 3.0.6
Core maintenance and updates.
- Implemented minor fixes in the core module.
## 2023-07-21 - 3.0.5
Addressed core module adjustments.
- Patched core module to rectify issues.
## 2023-07-20 - 3.0.4
Further improvements to core functionality.
- Additional fixes applied to the core component.
## 2023-07-11 to 2023-07-10 - 3.0.3
Organizational and structural changes.
- Transitioned to a new organizational scheme.
## 2023-01-09 - 3.0.2
Core module corrections.
- Resolved various issues within the core module.
## 2022-06-09 - 3.0.1
Continuous enhancements in core functionality.
- Continued bug fixes for core module efficiency.
## 2022-04-04 to 2022-04-02 - 3.0.0
Major release with significant updates.
## 2022-03-22 - 2.0.0
Significant breaking changes in core tech stack.
- BREAKING CHANGE: Transitioned core module to ECMAScript Modules (ESM).
## 2021-05-10 - 1.0.9
Caching improvements and optimization.
- Enhanced caching by properly respecting TTL across all cache levels.
## 2020-02-15 to 2020-02-05 - 1.0.6 to 1.0.1
Initial series of core module fixes and updates.
- Persistent efforts to stabilize and improve core functionalities.

View File

@@ -32,4 +32,4 @@
"tsdoc": {
"legal": "\n## License and Legal Information\n\nThis repository contains open-source code that is licensed under the MIT License. A copy of the MIT License can be found in the [license](license) file within this repository. \n\n**Please note:** The MIT License does not grant permission to use the trade names, trademarks, service marks, or product names of the project, except as required for reasonable and customary use in describing the origin of the work and reproducing the content of the NOTICE file.\n\n### Trademarks\n\nThis project is owned and maintained by Task Venture Capital GmbH. The names and logos associated with Task Venture Capital GmbH and any related products or services are trademarks of Task Venture Capital GmbH and are not included within the scope of the MIT license granted herein. Use of these trademarks must comply with Task Venture Capital GmbH's Trademark Guidelines, and any usage must be approved in writing by Task Venture Capital GmbH.\n\n### Company Information\n\nTask Venture Capital GmbH \nRegistered at District court Bremen HRB 35230 HB, Germany\n\nFor any legal inquiries or if you require further information, please contact us via email at hello@task.vc.\n\nBy using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.\n"
}
}
}

View File

@@ -1,6 +1,6 @@
{
"name": "@push.rocks/levelcache",
"version": "3.1.0",
"version": "3.2.0",
"private": false,
"description": "A versatile caching solution offering multi-level storage utilizing memory, disk, and Amazon S3 for efficient data management and backup.",
"main": "dist_ts/index.js",
@@ -9,32 +9,31 @@
"author": "Lossless GmbH",
"license": "MIT",
"scripts": {
"test": "(tstest test/ --web)",
"test": "(tstest test/ --verbose --logfile --timeout 60)",
"build": "(tsbuild --web --allowimplicitany)",
"buildDocs": "tsdoc",
"localPublish": "gitzone commit && pnpm run build && pnpm publish && pnpm publish --access public --registry=\"https://registry.npmjs.org\""
},
"devDependencies": {
"@gitzone/tsbuild": "^2.1.66",
"@gitzone/tsrun": "^1.2.44",
"@gitzone/tstest": "^1.0.77",
"@push.rocks/tapbundle": "^5.5.3",
"@types/node": "^22.9.3"
"@git.zone/tsbuild": "^2.6.7",
"@git.zone/tsrun": "^1.2.44",
"@git.zone/tstest": "^2.3.5",
"@types/node": "^24.3.0"
},
"dependencies": {
"@push.rocks/lik": "^6.1.0",
"@push.rocks/smartbucket": "^3.3.3",
"@push.rocks/smartcache": "^1.0.13",
"@push.rocks/smartenv": "^5.0.12",
"@push.rocks/lik": "^6.2.2",
"@push.rocks/smartbucket": "^3.3.10",
"@push.rocks/smartcache": "^1.0.18",
"@push.rocks/smartenv": "^5.0.13",
"@push.rocks/smartexit": "^1.0.23",
"@push.rocks/smartfile": "^11.0.21",
"@push.rocks/smartfile": "^11.2.7",
"@push.rocks/smartjson": "^5.0.20",
"@push.rocks/smartpath": "^5.0.18",
"@push.rocks/smartpromise": "^4.0.4",
"@push.rocks/smartpath": "^6.0.0",
"@push.rocks/smartpromise": "^4.2.3",
"@push.rocks/smartstring": "^4.0.15",
"@push.rocks/smartunique": "^3.0.9",
"@push.rocks/taskbuffer": "^3.1.7",
"@tsclass/tsclass": "^4.1.2"
"@push.rocks/taskbuffer": "^3.1.10",
"@tsclass/tsclass": "^9.2.0"
},
"files": [
"ts/**/*",
@@ -65,9 +64,16 @@
"Node.js",
"TypeScript"
],
"homepage": "https://code.foss.global/push.rocks/levelcache",
"homepage": "https://code.foss.global/push.rocks/levelcache#readme",
"repository": {
"type": "git",
"url": "https://code.foss.global/push.rocks/levelcache.git"
},
"packageManager": "pnpm@10.14.0+sha512.ad27a79641b49c3e481a16a805baa71817a04bbe06a38d17e60e2eaee83f6a146c6a688125f5792e48dd5ba30e7da52a5cda4c3992b9ccf333f9ce223af84748",
"bugs": {
"url": "https://code.foss.global/push.rocks/levelcache/issues"
},
"pnpm": {
"overrides": {}
}
}

9913
pnpm-lock.yaml generated

File diff suppressed because it is too large Load Diff

4
pnpm-workspace.yaml Normal file
View File

@@ -0,0 +1,4 @@
onlyBuiltDependencies:
- esbuild
- mongodb-memory-server
- puppeteer

430
readme.md
View File

@@ -1,167 +1,373 @@
```markdown
# @push.rocks/levelcache
A cache that utilizes memory, disk, and S3 for data storage and backup.
# @push.rocks/levelcache 🚀
**Supercharged Multi-Level Caching for Modern Applications**
A high-performance, tiered caching solution that intelligently leverages memory, disk, and S3 storage to deliver blazing-fast data access with reliable persistence and backup capabilities.
## Highlights
**Intelligent Tiered Storage** - Automatically routes data between memory, disk, and S3 based on size and access patterns
**Lightning Fast** - Memory-first architecture ensures microsecond access times for hot data
💾 **Persistent & Durable** - Optional disk and S3 layers provide data durability across restarts
🎯 **TTL Support** - Built-in time-to-live for automatic cache expiration
🔧 **TypeScript First** - Full type safety and excellent IDE support
☁️ **S3 Ready** - Seamless integration with Amazon S3 for massive scale caching
## Install
To install `@push.rocks/levelcache`, you can use npm or yarn:
```bash
# Using npm
npm install @push.rocks/levelcache --save
```
or
```bash
yarn add @push.rocks/levelcache
```
This installs `@push.rocks/levelcache` and adds it to your project's dependencies.
# Using yarn
yarn add @push.rocks/levelcache
# Using pnpm (recommended)
pnpm add @push.rocks/levelcache
```
## Usage
`@push.rocks/levelcache` provides a comprehensive solution for multi-level caching that takes advantage of memory, disk, and Amazon S3 storage, making it a versatile tool for data caching and backup. The package is built with TypeScript, enabling strict type checks and better development experience. Below, we'll explore how to effectively employ `@push.rocks/levelcache` in your projects, discussing its features and demonstrating its usage with code examples.
### Quick Start
### 1. Overview
The `LevelCache` class handles all cache operations. It decides where to store data based on pre-configured thresholds corresponding to the data size and the total storage capacity allocated for each storage type (memory/disk/S3). This mechanism optimizes both speed and persistence, allowing for efficient data storage and retrieval.
### 2. Getting Started: Initialization
To use `@push.rocks/levelcache`, you'll need to import the main classes: `LevelCache` and `CacheEntry`. `LevelCache` is the primary class, while `CacheEntry` represents individual pieces of cached data.
Get up and running with just a few lines:
```typescript
import { LevelCache, CacheEntry } from '@push.rocks/levelcache';
// Initialize cache with minimal config
const cache = new LevelCache({
cacheId: 'myAppCache'
});
// Wait for cache to be ready
await cache.ready;
// Store data
const entry = new CacheEntry({
contents: Buffer.from('Hello Cache World! 🎉'),
ttl: 60000 // 1 minute TTL
});
await cache.storeCacheEntryByKey('greeting', entry);
// Retrieve data
const retrieved = await cache.retrieveCacheEntryByKey('greeting');
console.log(retrieved.contents.toString()); // "Hello Cache World! 🎉"
```
#### Initialization with Optional Configurations
### Advanced Configuration
To create a cache, instantiate the `LevelCache` class with desired configurations. You can specify the limits for memory and disk storage, setup S3 configurations if needed, and more.
`LevelCache` offers granular control over storage tiers and behavior:
```typescript
const myCache = new LevelCache({
cacheId: 'myUniqueCacheId', // Unique ID for cache delineation
maxMemoryStorageInMB: 10, // Maximum memory use in MB (default 0.5 MB)
maxDiskStorageInMB: 100, // Maximum disk space in MB (default 10 MB)
diskStoragePath: './myCache', // Path for storing disk cache; default is '.nogit'
const cache = new LevelCache({
cacheId: 'productionCache',
// Storage Limits
maxMemoryStorageInMB: 128, // 128MB RAM cache (default: 0.5)
maxDiskStorageInMB: 1024, // 1GB disk cache (default: 10)
maxS3StorageInMB: 10240, // 10GB S3 storage (optional)
// Disk Configuration
diskStoragePath: './cache-data', // Custom disk location (default: '.nogit')
// S3 Configuration (optional)
s3Config: {
accessKeyId: 'yourAccessKeyId', // AWS S3 access key
secretAccessKey: 'yourSecretAccessKey', // Corresponding secret key
region: 'us-west-2' // AWS region, e.g., 'us-west-2'
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
region: 'us-east-1'
},
s3BucketName: 'myBucketName', // Designated name for S3 bucket
immutableCache: false, // Whether stored cache entries should remain unaltered
persistentCache: true, // Should the cache persist upon restarts
s3BucketName: 'my-cache-bucket',
// Behavior Options
forceLevel: 'memory', // Force specific tier (optional)
immutableCache: false, // Prevent cache mutations
persistentCache: true // Persist cache on restarts
});
```
### 3. Storing and Retrieving Data
`LevelCache` methods enable seamless data storage and retrieval, handling complexity under the hood.
### Core Operations
#### Storing Data
Create a `CacheEntry` specifying the data content and time-to-live (`ttl`). Use `storeCacheEntryByKey` to add this entry to the cache.
```typescript
async function storeData() {
// Wait for cache to be ready before operations
await myCache.ready;
// Store text data
const textEntry = new CacheEntry({
contents: Buffer.from('Important text data'),
ttl: 3600000, // 1 hour
typeInfo: 'text/plain' // Optional metadata
});
await cache.storeCacheEntryByKey('document:123', textEntry);
const entryContents = Buffer.from('Caching this data');
const myCacheEntry = new CacheEntry({
ttl: 7200000, // Time-to-live in milliseconds (2 hours)
contents: entryContents,
});
// Store JSON data
const jsonData = { user: 'john', role: 'admin' };
const jsonEntry = new CacheEntry({
contents: Buffer.from(JSON.stringify(jsonData)),
ttl: 7200000, // 2 hours
typeInfo: 'application/json'
});
await cache.storeCacheEntryByKey('user:john', jsonEntry);
// Storing the cache entry associated with a specific key
await myCache.storeCacheEntryByKey('someDataKey', myCacheEntry);
}
// Store binary data (images, files, etc.)
const imageBuffer = await fs.readFile('./logo.png');
const imageEntry = new CacheEntry({
contents: imageBuffer,
ttl: 86400000, // 24 hours
typeInfo: 'image/png'
});
await cache.storeCacheEntryByKey('assets:logo', imageEntry);
```
#### Retrieving Data
Retrieve stored data using `retrieveCacheEntryByKey`. The retrieved `CacheEntry` will give access to the original data.
```typescript
// Basic retrieval
const entry = await cache.retrieveCacheEntryByKey('user:john');
if (entry) {
const userData = JSON.parse(entry.contents.toString());
console.log(userData); // { user: 'john', role: 'admin' }
} else {
console.log('Cache miss or expired');
}
// Check if key exists
const exists = await cache.checkKeyPresence('user:john');
console.log(`Key exists: ${exists}`);
// Handle cache misses gracefully
async function getUser(userId: string) {
const cacheKey = `user:${userId}`;
let entry = await cache.retrieveCacheEntryByKey(cacheKey);
if (!entry) {
// Cache miss - fetch from database
const userData = await database.getUser(userId);
// Store in cache for next time
entry = new CacheEntry({
contents: Buffer.from(JSON.stringify(userData)),
ttl: 600000 // 10 minutes
});
await cache.storeCacheEntryByKey(cacheKey, entry);
}
return JSON.parse(entry.contents.toString());
}
```
#### Managing Cache
```typescript
async function retrieveData() {
const retrievedEntry = await myCache.retrieveCacheEntryByKey('someDataKey');
if (retrievedEntry) {
const data = retrievedEntry.contents.toString();
console.log(data); // Expected output: Caching this data
} else {
console.log('Data not found or expired.');
// Delete specific entry
await cache.deleteCacheEntryByKey('user:john');
// Clean expired entries
await cache.cleanOutdated();
// Clear entire cache
await cache.cleanAll();
```
### Storage Tiers Explained
`LevelCache` automatically determines the optimal storage tier based on data size and available capacity:
#### 1. **Memory Cache** 🧠
- **Speed**: Microsecond access
- **Best for**: Frequently accessed, small data
- **Default limit**: 0.5MB (configurable)
- First tier checked for all operations
#### 2. **Disk Cache** 💾
- **Speed**: Millisecond access
- **Best for**: Medium-sized data, persistent storage needed
- **Default limit**: 10MB (configurable)
- Data survives process restarts when `persistentCache: true`
#### 3. **S3 Cache** ☁️
- **Speed**: Network latency (typically 50-200ms)
- **Best for**: Large data, long-term storage, distributed caching
- **Default limit**: 50MB (configurable)
- Requires S3 configuration
- Ideal for shared cache across multiple instances
### Real-World Use Cases
#### API Response Caching
```typescript
class ApiCache {
private cache: LevelCache;
constructor() {
this.cache = new LevelCache({
cacheId: 'apiResponses',
maxMemoryStorageInMB: 256,
maxDiskStorageInMB: 2048,
persistentCache: true
});
}
async getCachedResponse(endpoint: string, params: any) {
const cacheKey = `api:${endpoint}:${JSON.stringify(params)}`;
let cached = await this.cache.retrieveCacheEntryByKey(cacheKey);
if (cached) {
return JSON.parse(cached.contents.toString());
}
// Fetch fresh data
const response = await fetch(endpoint, { params });
const data = await response.json();
// Cache for 5 minutes
const entry = new CacheEntry({
contents: Buffer.from(JSON.stringify(data)),
ttl: 300000
});
await this.cache.storeCacheEntryByKey(cacheKey, entry);
return data;
}
}
```
### 4. Key Management: Updating and Deleting
#### Deleting Cache Entries
Remove entries with `deleteCacheEntryByKey`, enabling clean cache management.
#### Session Storage
```typescript
async function deleteData() {
// Removes an entry using its unique key identifier
await myCache.deleteCacheEntryByKey('someDataKey');
class SessionManager {
private cache: LevelCache;
constructor() {
this.cache = new LevelCache({
cacheId: 'sessions',
maxMemoryStorageInMB: 64,
maxDiskStorageInMB: 512,
immutableCache: false,
persistentCache: true
});
}
async createSession(userId: string, sessionData: any) {
const sessionId = generateSessionId();
const entry = new CacheEntry({
contents: Buffer.from(JSON.stringify({
userId,
...sessionData,
createdAt: Date.now()
})),
ttl: 86400000 // 24 hour sessions
});
await this.cache.storeCacheEntryByKey(`session:${sessionId}`, entry);
return sessionId;
}
async getSession(sessionId: string) {
const entry = await this.cache.retrieveCacheEntryByKey(`session:${sessionId}`);
return entry ? JSON.parse(entry.contents.toString()) : null;
}
async destroySession(sessionId: string) {
await this.cache.deleteCacheEntryByKey(`session:${sessionId}`);
}
}
```
### 5. Cache Cleaning
Often, managing storage limits or removing outdated data becomes essential. The library supports these scenarios.
#### Automated Cleaning
While cache entries will naturally expire with `ttl` values, you can force-remove outdated entries.
#### Distributed Processing Cache
```typescript
// Clean outdated or expired entries
await myCache.cleanOutdated();
```
#### Full Cache Reset
Clear all entries, efficiently resetting your cache storage.
```typescript
// Flush entire cache content
await myCache.cleanAll();
```
### 6. Configuring and Managing Advanced Use Cases
The flexible nature of `@push.rocks/levelcache` grants additional customization suited for more advanced requirements.
#### Custom Route Management
For certain demands, you might want to specify distinct data handling policies or routing logic.
- Adjust S3 handling, size thresholds, or immutability options dynamically.
- Utilize internal API expansions defined within the library for fine-grained operations.
#### Handling Large Datasets
Tailor the cache levels (memory, disk, S3) to accommodate higher loads:
```typescript
const largeDatasetCache = new LevelCache({
cacheId: 'largeDatasetCache',
// Customize limits and behavior for particular patterns
maxMemoryStorageInMB: 1024, // 1 GB memory allocation
maxDiskStorageInMB: 2048, // 2 GB disk space allowance
maxS3StorageInMB: 10240, // 10 GB S3 backup buffering
// Share computed results across multiple workers using S3
const distributedCache = new LevelCache({
cacheId: 'mlModelResults',
maxMemoryStorageInMB: 512,
maxDiskStorageInMB: 5120,
maxS3StorageInMB: 102400, // 100GB for model outputs
s3Config: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
region: 'us-west-2'
},
s3BucketName: 'ml-computation-cache',
persistentCache: true
});
// Worker process can store results
async function storeComputationResult(jobId: string, result: Buffer) {
const entry = new CacheEntry({
contents: result,
ttl: 604800000, // 7 days
typeInfo: 'application/octet-stream'
});
await distributedCache.storeCacheEntryByKey(`job:${jobId}`, entry);
}
// Other workers can retrieve results
async function getComputationResult(jobId: string) {
const entry = await distributedCache.retrieveCacheEntryByKey(`job:${jobId}`);
return entry ? entry.contents : null;
}
```
With intelligent routing and management embedded, `LevelCache` ensures optimal trade-offs between speed and stability.
### Performance Tips 🎯
### Conclusion
1. **Size your tiers appropriately** - Set memory limits based on your hot data size
2. **Use meaningful cache keys** - Include version/hash in keys for cache invalidation
3. **Set realistic TTLs** - Balance freshness with performance
4. **Monitor cache hit rates** - Track `checkKeyPresence()` to optimize tier sizes
5. **Batch operations** - Group related cache operations when possible
6. **Use compression** - Compress large values before caching to maximize tier utilization
By adapting to bespoke caching styles and leveraging extensive storage structures (in-memory, on-disk, and cloud-based), `@push.rocks/levelcache` can handle varied data caching use-cases with ease. Whether you're aiming for top-tier speed for volatile data or need extended persistence for critical datasets, configure `LevelCache` to excellently complement your operational context.
### Migration & Compatibility
Explore the package further through testing and customization, ensuring you're getting the most benefit from integrated features and storage mechanisms. The robustness of `@push.rocks/levelcache` consistently optimizes the caching and retrieval process across different runtime environments.
Coming from other caching solutions? Here's how LevelCache compares:
We recommend examining your own application's storage behavior taxonomy; this helps frame caching strategies that consider both speed and durability requirements. Integrate `@push.rocks/levelcache` as a billing cornerstone of your systems architecture built on TypeScript and Node.js, embracing luxury control over resource use and performance elevation.
```
- **Redis** → LevelCache provides similar speed with added persistence and S3 backup
- **Memcached** → LevelCache adds persistence and automatic tier management
- **Local storage** → LevelCache adds memory tier and S3 backup capabilities
- **S3 only** → LevelCache adds memory and disk tiers for dramatic speed improvements
## API Reference
### LevelCache Class
#### Constructor Options
| Option | Type | Default | Description |
|--------|------|---------|-------------|
| `cacheId` | string | required | Unique identifier for the cache instance |
| `maxMemoryStorageInMB` | number | 0.5 | Maximum memory storage in megabytes |
| `maxDiskStorageInMB` | number | 10 | Maximum disk storage in megabytes |
| `maxS3StorageInMB` | number | 50 | Maximum S3 storage in megabytes |
| `diskStoragePath` | string | '.nogit' | Path for disk cache storage |
| `s3Config` | object | undefined | AWS S3 configuration object |
| `s3BucketName` | string | undefined | S3 bucket name for cache storage |
| `forceLevel` | string | undefined | Force storage to specific tier |
| `immutableCache` | boolean | false | Prevent cache entry modifications |
| `persistentCache` | boolean | false | Persist cache across restarts |
#### Methods
| Method | Returns | Description |
|--------|---------|-------------|
| `ready` | Promise<void> | Resolves when cache is initialized |
| `storeCacheEntryByKey(key, entry)` | Promise<void> | Store a cache entry |
| `retrieveCacheEntryByKey(key)` | Promise<CacheEntry\|null> | Retrieve a cache entry |
| `checkKeyPresence(key)` | Promise<boolean> | Check if key exists |
| `deleteCacheEntryByKey(key)` | Promise<void> | Delete a cache entry |
| `cleanOutdated()` | Promise<void> | Remove expired entries |
| `cleanAll()` | Promise<void> | Clear entire cache |
### CacheEntry Class
#### Constructor Options
| Option | Type | Required | Description |
|--------|------|----------|-------------|
| `contents` | Buffer | yes | The data to cache |
| `ttl` | number | yes | Time-to-live in milliseconds |
| `typeInfo` | string | no | Optional metadata about content type |
## License and Legal Information
@@ -180,4 +386,4 @@ Registered at District court Bremen HRB 35230 HB, Germany
For any legal inquiries or if you require further information, please contact us via email at hello@task.vc.
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.
By using this repository, you acknowledge that you have read this section, agree to comply with its terms, and understand that the licensing of the code does not imply endorsement by Task Venture Capital GmbH of any derivative works.

View File

@@ -1,4 +1,4 @@
import { expect, tap } from '@push.rocks/tapbundle';
import { expect, tap } from '@git.zone/tstest/tapbundle';
import * as levelcache from '../ts/index.js';
import { CacheEntry } from '../ts/index.js';
@@ -18,7 +18,7 @@ tap.test('should cache a value', async () => {
contents: Buffer.from('heythere'),
ttl: 10000,
typeInfo: 'string',
})
}),
);
const result = await testLevelCache.retrieveCacheEntryByKey('mykey');
expect(result.contents.toString()).toEqual('heythere');
@@ -31,7 +31,7 @@ tap.test('should respect ttl', async (tools) => {
contents: Buffer.from('heythere'),
ttl: 1000,
typeInfo: 'string',
})
}),
);
const result = await testLevelCache.retrieveCacheEntryByKey('mykey');
expect(result.contents.toString()).toEqual('heythere');

View File

@@ -3,6 +3,6 @@
*/
export const commitinfo = {
name: '@push.rocks/levelcache',
version: '3.1.0',
version: '3.2.0',
description: 'A versatile caching solution offering multi-level storage utilizing memory, disk, and Amazon S3 for efficient data management and backup.'
}

View File

@@ -8,7 +8,10 @@ export abstract class AbstractCache {
/**
* store a Blob
*/
public abstract storeCacheEntryByKey(keyArg: string, valueArg: CacheEntry): Promise<void>;
public abstract storeCacheEntryByKey(
keyArg: string,
valueArg: CacheEntry,
): Promise<void>;
/**
* retrieve cache entry

View File

@@ -1,7 +1,10 @@
import * as plugins from './levelcache.plugins.js';
import * as paths from './levelcache.paths.js';
import { AbstractCache } from './levelcache.abstract.classes.cache.js';
import { type ILevelCacheConstructorOptions, LevelCache } from './levelcache.classes.levelcache.js';
import {
type ILevelCacheConstructorOptions,
LevelCache,
} from './levelcache.classes.levelcache.js';
import { CacheEntry } from './levelcache.classes.cacheentry.js';
/**
@@ -26,10 +29,13 @@ export class CacheDiskManager extends AbstractCache {
if (this.levelCacheRef.options.diskStoragePath) {
this.fsPath = plugins.path.join(
this.levelCacheRef.options.diskStoragePath,
this.levelCacheRef.options.cacheId
this.levelCacheRef.options.cacheId,
);
} else {
this.fsPath = plugins.path.join(paths.nogitDir, this.levelCacheRef.options.cacheId);
this.fsPath = plugins.path.join(
paths.nogitDir,
this.levelCacheRef.options.cacheId,
);
}
if (this.status === 'active') {
plugins.smartfile.fs.ensureDirSync(this.fsPath);
@@ -39,7 +45,7 @@ export class CacheDiskManager extends AbstractCache {
public async retrieveCacheEntryByKey(keyArg: string): Promise<CacheEntry> {
const fileString = await plugins.smartfile.fs.toStringSync(
plugins.path.join(this.fsPath, encodeURIComponent(keyArg))
plugins.path.join(this.fsPath, encodeURIComponent(keyArg)),
);
return CacheEntry.fromStorageJsonString(fileString);
}
@@ -47,16 +53,20 @@ export class CacheDiskManager extends AbstractCache {
public async storeCacheEntryByKey(keyArg: string, cacheEntryArg: CacheEntry) {
await plugins.smartfile.memory.toFs(
cacheEntryArg.foldToJson(),
plugins.path.join(this.fsPath, encodeURIComponent(keyArg))
plugins.path.join(this.fsPath, encodeURIComponent(keyArg)),
);
}
public async checkKeyPresence(keyArg: string): Promise<boolean> {
return plugins.smartfile.fs.isFile(plugins.path.join(this.fsPath, encodeURIComponent(keyArg)));
return plugins.smartfile.fs.isFile(
plugins.path.join(this.fsPath, encodeURIComponent(keyArg)),
);
}
public async deleteCacheEntryByKey(keyArg: string) {
await plugins.smartfile.fs.remove(plugins.path.join(this.fsPath, encodeURIComponent(keyArg)));
await plugins.smartfile.fs.remove(
plugins.path.join(this.fsPath, encodeURIComponent(keyArg)),
);
}
public async cleanOutdated() {}

View File

@@ -1,7 +1,10 @@
import * as plugins from './levelcache.plugins.js';
import { AbstractCache } from './levelcache.abstract.classes.cache.js';
import { CacheEntry } from './levelcache.classes.cacheentry.js';
import { type ILevelCacheConstructorOptions, LevelCache } from './levelcache.classes.levelcache.js';
import {
type ILevelCacheConstructorOptions,
LevelCache,
} from './levelcache.classes.levelcache.js';
export class CacheMemoryManager extends AbstractCache {
private levelCacheRef: LevelCache;
@@ -22,7 +25,10 @@ export class CacheMemoryManager extends AbstractCache {
this.readyDeferred.resolve();
}
public async storeCacheEntryByKey(keyArg: string, cacheEntryArg: CacheEntry): Promise<void> {
public async storeCacheEntryByKey(
keyArg: string,
cacheEntryArg: CacheEntry,
): Promise<void> {
this.fastMap.addToMap(keyArg, cacheEntryArg, { force: true });
}

View File

@@ -24,15 +24,21 @@ export class CacheS3Manager extends AbstractCache {
public async init() {
if (this.levelCacheRef.options.s3Config) {
this.smartbucket = new plugins.smartbucket.SmartBucket(this.levelCacheRef.options.s3Config);
this.smartbucket = new plugins.smartbucket.SmartBucket(
this.levelCacheRef.options.s3Config,
);
this.s3CacheBucket = await this.smartbucket.getBucketByName('');
this.s3CacheDir = await (
await this.s3CacheBucket.getBaseDirectory()
).getSubDirectoryByName(this.levelCacheRef.options.cacheId);
if (this.levelCacheRef.options.maxS3StorageInMB) {
console.log(`cache level S3 activated with ${this.levelCacheRef.options.maxS3StorageInMB}`);
console.log(
`cache level S3 activated with ${this.levelCacheRef.options.maxS3StorageInMB}`,
);
} else {
console.log(`s3 cache started without limit. Automatically applying timebox of 1 month`);
console.log(
`s3 cache started without limit. Automatically applying timebox of 1 month`,
);
}
this.status = 'active';
} else {
@@ -42,16 +48,20 @@ export class CacheS3Manager extends AbstractCache {
}
public async retrieveCacheEntryByKey(keyArg: string): Promise<CacheEntry> {
const jsonFileString = (await this.s3CacheDir.fastGet(encodeURIComponent(keyArg))).toString();
const jsonFileString = (
await this.s3CacheDir.fastGet({
path: encodeURIComponent(keyArg),
})
).toString();
const cacheEntry = CacheEntry.fromStorageJsonString(jsonFileString);
return cacheEntry;
}
public async storeCacheEntryByKey(keyArg: string, cacheEntryArg: CacheEntry) {
await this.s3CacheDir.fastStore(
encodeURIComponent(keyArg),
cacheEntryArg.toStorageJsonString()
);
await this.s3CacheDir.fastPut({
path: encodeURIComponent(keyArg),
contents: cacheEntryArg.toStorageJsonString(),
});
}
public async checkKeyPresence(keyArg: string): Promise<boolean> {
@@ -66,7 +76,9 @@ export class CacheS3Manager extends AbstractCache {
public async deleteCacheEntryByKey(keyArg: string) {
if (this.status === 'active') {
await this.s3CacheDir.fastRemove(encodeURIComponent(keyArg));
await this.s3CacheDir.fastRemove({
path: encodeURIComponent(keyArg),
});
}
}
@@ -76,6 +88,8 @@ export class CacheS3Manager extends AbstractCache {
public async cleanOutdated() {}
public async cleanAll() {
await this.s3CacheDir.deleteWithAllContents();
await this.s3CacheDir.delete({
mode: 'permanent',
});
}
}

View File

@@ -14,12 +14,18 @@ export class CacheRouter {
/**
* gets the relevant cache to perform a store action on
*/
async getCacheForStoreAction(keyArg: string, cacheEntry: CacheEntry): Promise<AbstractCache> {
async getCacheForStoreAction(
keyArg: string,
cacheEntry: CacheEntry,
): Promise<AbstractCache> {
let returnCache: AbstractCache;
const mbToBytesMultiplier = 1000 * 1000;
const maxMemoryBytes = this.levelCacheRef.options.maxMemoryStorageInMB * mbToBytesMultiplier;
const maxDiskBytes = this.levelCacheRef.options.maxDiskStorageInMB * mbToBytesMultiplier;
const maxS3Bytes = this.levelCacheRef.options.maxS3StorageInMB * mbToBytesMultiplier;
const maxMemoryBytes =
this.levelCacheRef.options.maxMemoryStorageInMB * mbToBytesMultiplier;
const maxDiskBytes =
this.levelCacheRef.options.maxDiskStorageInMB * mbToBytesMultiplier;
const maxS3Bytes =
this.levelCacheRef.options.maxS3StorageInMB * mbToBytesMultiplier;
switch (true) {
case cacheEntry.contents.byteLength <= maxMemoryBytes &&

View File

@@ -64,9 +64,15 @@ export class LevelCache extends AbstractCache {
}
// store things
public async storeCacheEntryByKey(keyArg: string, cacheEntryArg: CacheEntry): Promise<void> {
public async storeCacheEntryByKey(
keyArg: string,
cacheEntryArg: CacheEntry,
): Promise<void> {
cacheEntryArg.key = keyArg;
const targetCache = await this.cacheRouter.getCacheForStoreAction(keyArg, cacheEntryArg);
const targetCache = await this.cacheRouter.getCacheForStoreAction(
keyArg,
cacheEntryArg,
);
cacheEntryArg.createdAt = Date.now();
await targetCache.storeCacheEntryByKey(keyArg, cacheEntryArg);
}
@@ -76,7 +82,8 @@ export class LevelCache extends AbstractCache {
* retrieve cache entry
*/
public async retrieveCacheEntryByKey(keyArg: string): Promise<CacheEntry> {
const targetCache = await this.cacheRouter.getCacheForRetrieveAction(keyArg);
const targetCache =
await this.cacheRouter.getCacheForRetrieveAction(keyArg);
if (targetCache) {
const cacheEntry = await targetCache.retrieveCacheEntryByKey(keyArg);
if (cacheEntry.createdAt + cacheEntry.ttl < Date.now()) {

View File

@@ -2,6 +2,6 @@ import * as plugins from './levelcache.plugins.js';
export const packageDir = plugins.path.join(
plugins.smartpath.get.dirnameFromImportMetaUrl(import.meta.url),
'../'
'../',
);
export const nogitDir = plugins.path.join(packageDir, '.nogit/');

View File

@@ -31,6 +31,4 @@ export {
// @tsclass scope
import * as tsclass from '@tsclass/tsclass';
export {
tsclass,
};
export { tsclass };

View File

@@ -7,9 +7,8 @@
"moduleResolution": "NodeNext",
"esModuleInterop": true,
"verbatimModuleSyntax": true,
"strict": true
"baseUrl": ".",
"paths": {}
},
"exclude": [
"dist_*/**/*.d.ts"
]
"exclude": ["dist_*/**/*.d.ts"]
}