diff --git a/BENCHMARK.md b/BENCHMARK.md index 7f1c030..17bd996 100644 --- a/BENCHMARK.md +++ b/BENCHMARK.md @@ -203,7 +203,7 @@ Multi-level caching provides the most benefit when: ## Performance Tips -1. **Tune Memory Strategies:** Adjust `MemoryPercentageLimitStrategy` threshold based on your application's memory profile +1. **Tune Memory Strategies:** Adjust `RamPercentageLimitStrategy` threshold based on your application's memory profile 2. **Choose Appropriate TTL:** Set cache TTL values that balance freshness and hit rate 3. **Monitor Cache Metrics:** Track hit rates to optimize cache configuration 4. **Size Your Cache:** Use Benchmark 4 to estimate memory requirements diff --git a/README.md b/README.md index 8d289ff..62e3379 100644 --- a/README.md +++ b/README.md @@ -68,12 +68,12 @@ import { CacheService, MemoryCacheLevel, FirstExpiringMemoryPolicy, - MemoryPercentageLimitStrategy + RamPercentageLimitStrategy } from 'cacheforge'; // Create memory cache with eviction policy and strategy const memoryCache = new MemoryCacheLevel({ - memoryStrategies: [new MemoryPercentageLimitStrategy(80)], // Trigger at 80% memory + memoryStrategies: [new RamPercentageLimitStrategy(80)], // Trigger at 80% memory evictionPolicy: new FirstExpiringMemoryPolicy() }); @@ -103,11 +103,11 @@ At the top (CacheService), fallbacks are handled. However the added layers do no Fast, in-memory caching using a Map and min-heap for efficient expiration tracking. ```typescript -import { MemoryCacheLevel, FirstExpiringMemoryPolicy, MemoryPercentageLimitStrategy } from 'cacheforge'; +import { MemoryCacheLevel, FirstExpiringMemoryPolicy, RamPercentageLimitStrategy } from 'cacheforge'; const memoryCache = new MemoryCacheLevel({ memoryStrategies: [ - new MemoryPercentageLimitStrategy(75) // Evict when memory exceeds 75% + new RamPercentageLimitStrategy(75) // Evict when memory exceeds 75% ], evictionPolicy: new FirstExpiringMemoryPolicy() }); @@ -145,14 +145,26 @@ const policy = new FirstExpiringMemoryPolicy(); Strategies check conditions and trigger eviction policies when thresholds are met. -#### MemoryPercentageLimitStrategy +#### MemorySizeLimitStrategy (Recommended Default) +Triggers eviction when the total size of items in the cache exceeds a defined threshold (as a percentage of the Node.js process heap). + +This strategy is recommended as the default for most applications, as it provides a more accurate measurement of cache memory usage and helps prevent out-of-memory errors. + +```typescript +import { MemorySizeLimitStrategy } from 'cacheforge'; + +// Trigger eviction when cache uses 10% or more of Node.js heap +const strategy = new MemorySizeLimitStrategy(10); +``` + +#### RamPercentageLimitStrategy Triggers eviction when system memory usage exceeds a percentage threshold. ```typescript -import { MemoryPercentageLimitStrategy } from 'cacheforge'; +import { RamPercentageLimitStrategy } from 'cacheforge'; // Trigger eviction at 80% memory usage -const strategy = new MemoryPercentageLimitStrategy(80); +const strategy = new RamPercentageLimitStrategy(80); ``` ## Usage Guide @@ -166,13 +178,13 @@ import { CacheService, MemoryCacheLevel, FirstExpiringMemoryPolicy, - MemoryPercentageLimitStrategy + RamPercentageLimitStrategy } from 'cacheforge'; const cache = new CacheService({ levels: [ new MemoryCacheLevel({ - memoryStrategies: [new MemoryPercentageLimitStrategy(80)], + memoryStrategies: [new RamPercentageLimitStrategy(80)], evictionPolicy: new FirstExpiringMemoryPolicy() }) ], @@ -202,12 +214,12 @@ import { MemoryCacheLevel, RedisCacheLevel, FirstExpiringMemoryPolicy, - MemoryPercentageLimitStrategy + RamPercentageLimitStrategy } from 'cacheforge'; import Redis from 'ioredis'; const memoryCache = new MemoryCacheLevel({ - memoryStrategies: [new MemoryPercentageLimitStrategy(75)], + memoryStrategies: [new RamPercentageLimitStrategy(75)], evictionPolicy: new FirstExpiringMemoryPolicy() }); @@ -236,7 +248,7 @@ import Redis from 'ioredis'; const cache = new CacheService({ levels: [ new MemoryCacheLevel({ - memoryStrategies: [new MemoryPercentageLimitStrategy(80)], + memoryStrategies: [new RamPercentageLimitStrategy(80)], evictionPolicy: new FirstExpiringMemoryPolicy() }), new RedisCacheLevel(new Redis()) @@ -556,8 +568,11 @@ await cache.invalidateKey('user:123'); ### 4. Memory Strategy Thresholds -- Development: 80-90% (more headroom) -- Production: 70-75% (prevent OOM issues) + +- **Recommended Default:** Use `MemorySizeLimitStrategy` with a threshold of 10-20% of Node.js heap for most production workloads. +- **RamPercentageLimitStrategy:** + - Development: 80-90% (more headroom) + - Production: 70-75% (prevent OOM issues) ### 5. Distributed Locking @@ -608,14 +623,14 @@ Example test using the library: ```typescript import { describe, it, expect, beforeEach, afterEach } from 'vitest'; -import { CacheService, MemoryCacheLevel, FirstExpiringMemoryPolicy, MemoryPercentageLimitStrategy } from 'cacheforge'; +import { CacheService, MemoryCacheLevel, FirstExpiringMemoryPolicy, RamPercentageLimitStrategy } from 'cacheforge'; describe('Cache Service', () => { let cache: CacheService; beforeEach(() => { const memoryCache = new MemoryCacheLevel({ - memoryStrategies: [new MemoryPercentageLimitStrategy(80)], + memoryStrategies: [new RamPercentageLimitStrategy(80)], evictionPolicy: new FirstExpiringMemoryPolicy() }); diff --git a/package-lock.json b/package-lock.json index 3071851..565c7b6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "cacheforge", - "version": "1.1.0", + "version": "1.2.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "cacheforge", - "version": "1.1.0", + "version": "1.2.0", "dependencies": { "@datastructures-js/heap": "^4.3.7", "@sesamecare-oss/redlock": "^1.4.0", diff --git a/package.json b/package.json index 7c5d55b..5178076 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "cacheforge", - "version": "1.1.0", + "version": "1.2.0", "description": "A multi-level caching library for Node.js applications, supporting in-memory and Redis, and custom cache levels.", "main": "dist/src/index.js", "types": "dist/src/index.d.ts", diff --git a/src/cache.service.spec.ts b/src/cache.service.spec.ts index 502c904..29593b2 100644 --- a/src/cache.service.spec.ts +++ b/src/cache.service.spec.ts @@ -13,7 +13,7 @@ import { type StoredHeapItem, } from "./levels"; import { FirstExpiringMemoryPolicy } from "./policies/first-expiring-memory.policy"; -import { MemoryPercentageLimitStrategy } from "./strategies/memory-percentage-limit.strategy"; +import { RamPercentageLimitStrategy } from "./strategies/ram-percentage-limit.strategy"; let redisContainer: StartedRedisContainer; let redisLevel: RedisCacheLevel; @@ -25,7 +25,7 @@ let faultyFirstLevelVersionedCacheService: CacheService; let faultyFirstLevelCacheService: CacheService; let allFaultyLevelsCacheService: CacheService; let allFaultyLevelsVersionedCacheService: CacheService; -const memoryStrategy = new MemoryPercentageLimitStrategy(70); +const memoryStrategy = new RamPercentageLimitStrategy(70); const evictionPolicy = new FirstExpiringMemoryPolicy(); memoryLevel = new MemoryCacheLevel({ memoryStrategies: [memoryStrategy], diff --git a/src/levels/interfaces/in-memory.ts b/src/levels/interfaces/in-memory.ts index 73ab71b..0ddf300 100644 --- a/src/levels/interfaces/in-memory.ts +++ b/src/levels/interfaces/in-memory.ts @@ -12,4 +12,10 @@ export interface InMemory { * @return Array of items in the heap. */ getHeap(): MemoryHeap; + + /** + * Get the size of the key-value store in bytes. + * @return Size of the store in bytes. + */ + getStoreSize(): number; } diff --git a/src/levels/memory/eviction-manager.spec.ts b/src/levels/memory/eviction-manager.spec.ts index baed1ab..6560bd4 100644 --- a/src/levels/memory/eviction-manager.spec.ts +++ b/src/levels/memory/eviction-manager.spec.ts @@ -1,7 +1,7 @@ import { beforeEach, describe, expect, it, vi } from "vitest"; import { AbstractMemoryEvictionPolicy } from "../../policies/abstract/abstract-memory-eviction.policy"; import { FirstExpiringMemoryPolicy } from "../../policies/first-expiring-memory.policy"; -import { MemoryPercentageLimitStrategy } from "../../strategies/memory-percentage-limit.strategy"; +import { RamPercentageLimitStrategy } from "../../strategies/ram-percentage-limit.strategy"; import { EvictionManager } from "./eviction-manager"; import { MemoryCacheLevel, type StoredHeapItem } from "./memory.level"; import { triggerMemoryChange } from "./memory-event.manager"; @@ -14,11 +14,11 @@ describe("EvictionManager", () => { let memoryLevel: MemoryCacheLevel; let evictionPolicy: FirstExpiringMemoryPolicy; let memoryWithoutEvictionPolicy: MemoryCacheLevel; - let memoryStrategy: MemoryPercentageLimitStrategy; + let memoryStrategy: RamPercentageLimitStrategy; beforeEach(() => { evictionPolicy = new FirstExpiringMemoryPolicy(); - memoryStrategy = new MemoryPercentageLimitStrategy(0); // Always triggers + memoryStrategy = new RamPercentageLimitStrategy(0); // Always triggers memoryLevel = new MemoryCacheLevel({ memoryStrategies: [memoryStrategy], evictionPolicy, @@ -73,7 +73,7 @@ describe("EvictionManager", () => { }); it("does not evict if no strategy triggers", async () => { - const neverStrategy = new MemoryPercentageLimitStrategy(100); // Never triggers + const neverStrategy = new RamPercentageLimitStrategy(100); // Never triggers const neverOptions = { memoryStrategies: [neverStrategy], evictionPolicy, diff --git a/src/levels/memory/memory.level.spec.ts b/src/levels/memory/memory.level.spec.ts index 2e988df..4cabad1 100644 --- a/src/levels/memory/memory.level.spec.ts +++ b/src/levels/memory/memory.level.spec.ts @@ -2,11 +2,11 @@ import { faker, fakerZH_TW } from "@faker-js/faker"; import { afterEach, describe, expect, it, vi } from "vitest"; import { generateJSONData } from "../../../tests/utilities/data.utilities"; import { FirstExpiringMemoryPolicy } from "../../policies"; -import { MemoryPercentageLimitStrategy } from "../../strategies/memory-percentage-limit.strategy"; +import { RamPercentageLimitStrategy } from "../../strategies/ram-percentage-limit.strategy"; import { MemoryCacheLevel, type StoredHeapItem } from ".."; const evictionPolicy = new FirstExpiringMemoryPolicy(); -const strategy = new MemoryPercentageLimitStrategy(80); +const strategy = new RamPercentageLimitStrategy(80); const cacheEngine = new MemoryCacheLevel({ memoryStrategies: [strategy], evictionPolicy: evictionPolicy, @@ -125,6 +125,12 @@ describe("should successfully store data, and retrieve it on demand", async () = await cacheEngine.mget(["bingo", "bingo1", "bingo2"]), ).toEqual([undefined, undefined, undefined]); }); + + it("should get store size in bytes", () => { + const storeSize = cacheEngine.getStoreSize(); + expect(typeof storeSize).toBe("number"); + expect(storeSize).toBeGreaterThanOrEqual(0); + }); }); describe("It should successfully manage the application memory usage", () => { diff --git a/src/levels/memory/memory.level.ts b/src/levels/memory/memory.level.ts index 1d342cc..eb2af15 100644 --- a/src/levels/memory/memory.level.ts +++ b/src/levels/memory/memory.level.ts @@ -3,6 +3,7 @@ import { DEFAULT_TTL } from "../../constants"; import type { AbstractMemoryEvictionPolicy } from "../../policies/abstract/abstract-memory-eviction.policy"; import type { MemoryManagementStrategy } from "../../strategies/interfaces/memory-management-strategy"; import { createCacheHeap } from "../../utils/heap.utils"; +import { serialize } from "../../utils/parsing.utils"; import type { CacheLevel } from "../interfaces/cache-level"; import type { InMemory } from "../interfaces/in-memory"; import type { Purgable } from "../interfaces/purgable"; @@ -26,6 +27,7 @@ export class MemoryCacheLevel implements CacheLevel, Purgable, InMemory { protected store = new Map(); + protected size = 0; protected heap = createCacheHeap((item) => item.expiry); protected evictionManager: EvictionManager; @@ -41,17 +43,17 @@ export class MemoryCacheLevel await Promise.all(deletePromises); } - private insertHeapItem(item: StoredHeapItem) { - this.heap.insert(item); - } - private updateStore(key: string, item: StoredItem) { this.store.set(key, item); - - this.insertHeapItem({ ...item, key }); + this.heap.insert({ ...item, key }); + this.size += serialize(item).length; triggerMemoryChange(); } + public getStoreSize(): number { + return this.size; + } + async mset( keys: string[], values: T[], diff --git a/src/levels/redis/redis.level.ts b/src/levels/redis/redis.level.ts index a2da3c2..a73585c 100644 --- a/src/levels/redis/redis.level.ts +++ b/src/levels/redis/redis.level.ts @@ -3,10 +3,7 @@ import type IoRedis from "ioredis"; import type { Cluster } from "ioredis"; import { DEFAULT_TTL } from "../../constants"; import { parseIfJSON } from "../../utils/cache.utils"; -import { - deserializeFromRedis, - serializeForRedis, -} from "../../utils/parsing.utils"; +import { deserialize, serialize } from "../../utils/parsing.utils"; import { generateVersionLookupKey } from "../../utils/version.utils"; import type { CacheLevel } from "../interfaces/cache-level"; import type { Lockable } from "../interfaces/lockable"; @@ -38,7 +35,7 @@ export class RedisCacheLevel implements CacheLevel, Lockable { for (let i = 0; i < keys.length; i++) { const key = keys[i]; const value = values[i]; - pipeline.set(key, serializeForRedis(value), "EX", ttl); + pipeline.set(key, serialize(value), "EX", ttl); } await pipeline.exec(); @@ -56,7 +53,7 @@ export class RedisCacheLevel implements CacheLevel, Lockable { if (cachedValue === null || cachedValue === undefined) { finalResults.push(undefined as T); } else { - finalResults.push(deserializeFromRedis(cachedValue)); + finalResults.push(deserialize(cachedValue)); } } @@ -64,7 +61,7 @@ export class RedisCacheLevel implements CacheLevel, Lockable { } async set(key: string, value: T, ttl = DEFAULT_TTL) { - await this.client.set(key, serializeForRedis(value), "EX", ttl); + await this.client.set(key, serialize(value), "EX", ttl); return parseIfJSON(value) as T; } diff --git a/src/policies/first-expiring-memory.policy.spec.ts b/src/policies/first-expiring-memory.policy.spec.ts index 02a6100..33a3e89 100644 --- a/src/policies/first-expiring-memory.policy.spec.ts +++ b/src/policies/first-expiring-memory.policy.spec.ts @@ -1,11 +1,11 @@ import { afterEach, describe, expect, it } from "vitest"; import { generateJSONData } from "../../tests/utilities/data.utilities"; import { MemoryCacheLevel } from "../levels"; -import { MemoryPercentageLimitStrategy } from "../strategies/memory-percentage-limit.strategy"; +import { RamPercentageLimitStrategy } from "../strategies/ram-percentage-limit.strategy"; import { FirstExpiringMemoryPolicy } from "./first-expiring-memory.policy"; const policy = new FirstExpiringMemoryPolicy(); -const strategy = new MemoryPercentageLimitStrategy(80); +const strategy = new RamPercentageLimitStrategy(80); const cacheEngine = new MemoryCacheLevel({ memoryStrategies: [strategy], evictionPolicy: policy, @@ -33,10 +33,9 @@ describe("First Expiring Memory Policy", () => { const policy = new FirstExpiringMemoryPolicy(); - for (let i = 0; i <= 10000; i++) { - await policy.evict(cacheEngine); - } + await policy.evict(cacheEngine); - expect(cacheEngine.getHeap().getCount()).toEqual(0); + // 10% has been removed. + expect(cacheEngine.getHeap().getCount()).toEqual(900); }); }); diff --git a/src/strategies/index.ts b/src/strategies/index.ts index 01080f6..b1f382f 100644 --- a/src/strategies/index.ts +++ b/src/strategies/index.ts @@ -1 +1,2 @@ -export * from "./memory-percentage-limit.strategy"; +export * from "./memory-size-limit.strategy"; +export * from "./ram-percentage-limit.strategy"; diff --git a/src/strategies/memory-percentage-limit.strategy.ts b/src/strategies/memory-percentage-limit.strategy.ts deleted file mode 100644 index 9a80064..0000000 --- a/src/strategies/memory-percentage-limit.strategy.ts +++ /dev/null @@ -1,13 +0,0 @@ -import type { StoredHeapItem } from "../levels"; -import type { InMemory } from "../levels/interfaces/in-memory"; -import type { MemoryManagementStrategy } from "./interfaces/memory-management-strategy"; - -export class MemoryPercentageLimitStrategy - implements MemoryManagementStrategy -{ - constructor(private threshold: number) {} - checkCondition(memory: InMemory): boolean { - const usage = memory.getMemoryUsage(); - return usage > this.threshold; - } -} diff --git a/src/strategies/memory-size-limit.strategy.spec.ts b/src/strategies/memory-size-limit.strategy.spec.ts new file mode 100644 index 0000000..887a35c --- /dev/null +++ b/src/strategies/memory-size-limit.strategy.spec.ts @@ -0,0 +1,75 @@ +import { describe, expect, it } from "vitest"; +import { generateJSONData } from "../../tests/utilities/data.utilities"; +import { MemoryCacheLevel, type StoredHeapItem } from "../levels"; +import { FirstExpiringMemoryPolicy } from "../policies/first-expiring-memory.policy"; +import { MemorySizeLimitStrategy } from "./memory-size-limit.strategy"; + +describe("MemorySizeLimitStrategy will ensure memory usage is within limits", () => { + it("should not clear memory when memory usage does not exceed threshold", async () => { + const policy = new FirstExpiringMemoryPolicy(); + const strategy = new MemorySizeLimitStrategy(0.01); + const cacheEngine = new MemoryCacheLevel({ + memoryStrategies: [strategy], + evictionPolicy: policy, + }); + + const heapSize = cacheEngine.getHeap().getTotalSize(); + + expect(heapSize).toBe(0); + + expect(strategy.checkCondition(cacheEngine)).toBe(false); + + await policy.evict(cacheEngine); + + // The heap size should remain unchanged because no eviction is needed + const heapSnapshot = cacheEngine.getHeap().getSnapshot(); + expect(heapSnapshot.length).toBe(heapSize); + }); + + it("should trigger eviction when memory usage exceeds threshold", async () => { + const policy = new FirstExpiringMemoryPolicy(); + const strategy = new MemorySizeLimitStrategy(0); + const cacheEngine = new MemoryCacheLevel({ + memoryStrategies: [strategy], + evictionPolicy: policy, + }); + + expect(strategy.checkCondition(cacheEngine)).toBe(true); + + await policy.evict(cacheEngine); + + // The heap should be empty after eviction + const heapSnapshot = cacheEngine.getHeap().getSnapshot(); + expect(heapSnapshot.length).toBe(0); + }); + + it("should evict items when memory usage exceeds threshold after adding bulk data", async () => { + const policy = new FirstExpiringMemoryPolicy(); + const strategy = new MemorySizeLimitStrategy(1); + const cacheEngine = new MemoryCacheLevel({ + memoryStrategies: [strategy], + evictionPolicy: policy, + }); + + await generateJSONData(cacheEngine, 10000); + + const postEvictionSnapshot = cacheEngine.getHeap().getSnapshot(); + + expect(postEvictionSnapshot.length).toBeLessThan(10000); + }); + + it("should not evict items when memory usage is within threshold after adding bulk data", async () => { + const policy = new FirstExpiringMemoryPolicy(); + const strategy = new MemorySizeLimitStrategy(50); + const cacheEngine = new MemoryCacheLevel({ + memoryStrategies: [strategy], + evictionPolicy: policy, + }); + + await generateJSONData(cacheEngine, 1000); + + const postEvictionSnapshot = cacheEngine.getHeap().getSnapshot(); + + expect(postEvictionSnapshot.length).toBe(1000); + }); +}); diff --git a/src/strategies/memory-size-limit.strategy.ts b/src/strategies/memory-size-limit.strategy.ts new file mode 100644 index 0000000..4da5541 --- /dev/null +++ b/src/strategies/memory-size-limit.strategy.ts @@ -0,0 +1,26 @@ +import type { StoredHeapItem } from "../levels"; +import type { InMemory } from "../levels/interfaces/in-memory"; +import type { MemoryManagementStrategy } from "./interfaces/memory-management-strategy"; + +/** + * This strategy checks if the total size of items in the cache exceeds a defined threshold. + * The threshold is a percentage of the total RAM allocated to the Node.js process. + * If the size exceeds the threshold, it signals that eviction should occur. + * This helps in managing memory usage effectively and prevents the cache from consuming excessive memory. + * For instance, if my cache has 11% of the total RAM allocated to Node.js, and the threshold is set to 10%, + * the strategy will indicate that eviction is necessary. + */ + +export class MemorySizeLimitStrategy + implements MemoryManagementStrategy +{ + constructor(private threshold: number) {} + checkCondition(memory: InMemory): boolean { + const heap = memory.getHeap(); + const heapSize = heap.getTotalSize(); + const keyStoreSize = memory.getStoreSize(); + const totalSize = heapSize + keyStoreSize; + const usage = (totalSize / process.memoryUsage().heapTotal) * 100; + return usage >= this.threshold; + } +} diff --git a/src/strategies/memory-percentage-limit.strategy.spec.ts b/src/strategies/ram-percentage-limit.strategy.spec.ts similarity index 79% rename from src/strategies/memory-percentage-limit.strategy.spec.ts rename to src/strategies/ram-percentage-limit.strategy.spec.ts index abcf829..d1678fa 100644 --- a/src/strategies/memory-percentage-limit.strategy.spec.ts +++ b/src/strategies/ram-percentage-limit.strategy.spec.ts @@ -1,12 +1,12 @@ import { describe, expect, it } from "vitest"; import { MemoryCacheLevel, type StoredHeapItem } from "../levels"; import { FirstExpiringMemoryPolicy } from "../policies/first-expiring-memory.policy"; -import { MemoryPercentageLimitStrategy } from "./memory-percentage-limit.strategy"; +import { RamPercentageLimitStrategy } from "./ram-percentage-limit.strategy"; -describe("MemoryPercentageLimitStrategy will ensure memory usage is within limits", () => { +describe("RamPercentageLimitStrategy will ensure memory usage is within limits", () => { it("should trigger eviction when memory usage does not exceed threshold", async () => { const policy = new FirstExpiringMemoryPolicy(); - const strategy = new MemoryPercentageLimitStrategy(80); + const strategy = new RamPercentageLimitStrategy(80); const cacheEngine = new MemoryCacheLevel({ memoryStrategies: [strategy], evictionPolicy: policy, @@ -24,7 +24,7 @@ describe("MemoryPercentageLimitStrategy will ensure memory usage is within limit it("should trigger eviction when memory usage exceeds threshold", async () => { const policy = new FirstExpiringMemoryPolicy(); - const strategy = new MemoryPercentageLimitStrategy(0); + const strategy = new RamPercentageLimitStrategy(0); const cacheEngine = new MemoryCacheLevel({ memoryStrategies: [strategy], evictionPolicy: policy, diff --git a/src/strategies/ram-percentage-limit.strategy.ts b/src/strategies/ram-percentage-limit.strategy.ts new file mode 100644 index 0000000..3ea4b84 --- /dev/null +++ b/src/strategies/ram-percentage-limit.strategy.ts @@ -0,0 +1,20 @@ +import type { StoredHeapItem } from "../levels"; +import type { InMemory } from "../levels/interfaces/in-memory"; +import type { MemoryManagementStrategy } from "./interfaces/memory-management-strategy"; + +/** + * This strategy checks if the current RAM usage percentage exceeds a defined threshold. + * If the usage exceeds the threshold, it signals that eviction should occur. + * This is useful for preventing the cache from consuming too much memory + * and ensures that the application remains responsive. + * However, it may lead to more frequent evictions in memory-constrained environments. + */ +export class RamPercentageLimitStrategy + implements MemoryManagementStrategy +{ + constructor(private threshold: number) {} + checkCondition(memory: InMemory): boolean { + const usage = memory.getMemoryUsage(); + return usage > this.threshold; + } +} diff --git a/src/utils/heap.utils.ts b/src/utils/heap.utils.ts index 7f91dca..eccb7a1 100644 --- a/src/utils/heap.utils.ts +++ b/src/utils/heap.utils.ts @@ -1,15 +1,34 @@ import { MinHeap } from "@datastructures-js/heap"; +import { serialize } from "./parsing.utils"; export interface MemoryHeap extends MinHeap { rebuild: (items: T[]) => void; getSnapshot: () => T[]; getCount: () => number; + getTotalSize: () => number; } export const createCacheHeap = (comparator: (a: T) => number) => { let itemCounter = 0; + let totalSize = 0; const heap = new MinHeap(comparator) as MemoryHeap; + const originalInsert = heap.insert.bind(heap); + heap.insert = (item: T) => { + itemCounter++; + totalSize += Buffer.byteLength(serialize(item), "utf8"); + return originalInsert(item); + }; + + heap.getTotalSize = () => totalSize; + + const originalClear = heap.clear.bind(heap); + heap.clear = () => { + itemCounter = 0; + totalSize = 0; + return originalClear(); + }; + heap.rebuild = (items: T[]) => { heap.clear(); items.forEach((item) => { @@ -21,19 +40,9 @@ export const createCacheHeap = (comparator: (a: T) => number) => { const originalPop = heap.pop.bind(heap); heap.pop = () => { itemCounter -= 1; - return originalPop(); - }; - - const originalInsert = heap.insert.bind(heap); - heap.insert = (item: T) => { - itemCounter++; - return originalInsert(item); - }; - - const originalClear = heap.clear.bind(heap); - heap.clear = () => { - itemCounter = 0; - return originalClear(); + const item = originalPop(); + totalSize -= Buffer.byteLength(serialize(item), "utf8"); + return item; }; heap.getSnapshot = () => Array.from(heap); diff --git a/src/utils/parsing.utils.spec.ts b/src/utils/parsing.utils.spec.ts index 85058e1..fde0efd 100644 --- a/src/utils/parsing.utils.spec.ts +++ b/src/utils/parsing.utils.spec.ts @@ -1,6 +1,6 @@ import { faker } from "@faker-js/faker"; import { describe, expect, it } from "vitest"; -import { deserializeFromRedis } from "./parsing.utils"; +import { deserialize } from "./parsing.utils"; describe("deserialise from redis", () => { it("should handle standard objects, even if not parsed with superjson", () => { @@ -11,6 +11,6 @@ describe("deserialise from redis", () => { const parsed = JSON.stringify(object); - expect(deserializeFromRedis(parsed as never)).toEqual(object); + expect(deserialize(parsed as never)).toEqual(object); }); }); diff --git a/src/utils/parsing.utils.ts b/src/utils/parsing.utils.ts index c58b7c8..a3469c6 100644 --- a/src/utils/parsing.utils.ts +++ b/src/utils/parsing.utils.ts @@ -5,15 +5,15 @@ import { JSONParse, JSONStringify } from "json-with-bigint"; * @param item The value to serialize * @returns A string safe for Redis storage */ -export const serializeForRedis = (item: unknown) => { +export const serialize = (item: unknown) => { return JSONStringify(item); }; /** - * Deserializes a value from Redis (previously stored with serializeForRedis). + * Deserializes a value from Redis (previously stored with serialize). * @param str The string from Redis * @returns The original JS value */ -export const deserializeFromRedis = (str: never) => { +export const deserialize = (str: never) => { return JSONParse(str); }; diff --git a/tests/benchmarks/cache.benchmark.spec.ts b/tests/benchmarks/cache.benchmark.spec.ts index bad88f9..2e37480 100644 --- a/tests/benchmarks/cache.benchmark.spec.ts +++ b/tests/benchmarks/cache.benchmark.spec.ts @@ -10,7 +10,7 @@ import { RedisCacheLevel, } from "../../src/levels"; import { FirstExpiringMemoryPolicy } from "../../src/policies/first-expiring-memory.policy"; -import { MemoryPercentageLimitStrategy } from "../../src/strategies/memory-percentage-limit.strategy"; +import { RamPercentageLimitStrategy } from "../../src/strategies/memory-percentage-limit.strategy"; import type { StoredHeapItem } from "../../src/levels/memory/memory.level"; import { type BenchmarkResult, @@ -52,7 +52,7 @@ describe("Cache Performance Benchmarks", () => { // Create fresh cache instances for this test const redisClient = new Redis(redisContainer.getConnectionUrl()); const memoryLevel = new MemoryCacheLevel({ - memoryStrategies: [new MemoryPercentageLimitStrategy(80)], + memoryStrategies: [new RamPercentageLimitStrategy(80)], evictionPolicy: new FirstExpiringMemoryPolicy(), }); const redisLevel = new RedisCacheLevel(redisClient); @@ -113,7 +113,7 @@ describe("Cache Performance Benchmarks", () => { const redisOnlyClient = new Redis(redisContainer.getConnectionUrl()); const memoryLevel = new MemoryCacheLevel({ - memoryStrategies: [new MemoryPercentageLimitStrategy(80)], + memoryStrategies: [new RamPercentageLimitStrategy(80)], evictionPolicy: new FirstExpiringMemoryPolicy(), }); const redisLevel = new RedisCacheLevel(redisClient); @@ -190,7 +190,7 @@ describe("Cache Performance Benchmarks", () => { const redisOnlyClient = new Redis(redisContainer.getConnectionUrl()); const memoryLevel = new MemoryCacheLevel({ - memoryStrategies: [new MemoryPercentageLimitStrategy(80)], + memoryStrategies: [new RamPercentageLimitStrategy(80)], evictionPolicy: new FirstExpiringMemoryPolicy(), }); const redisLevel = new RedisCacheLevel(redisClient); @@ -259,7 +259,7 @@ describe("Cache Performance Benchmarks", () => { // Create fresh cache instance for this test const redisClient = new Redis(redisContainer.getConnectionUrl()); const memoryLevel = new MemoryCacheLevel({ - memoryStrategies: [new MemoryPercentageLimitStrategy(80)], + memoryStrategies: [new RamPercentageLimitStrategy(80)], evictionPolicy: new FirstExpiringMemoryPolicy(), }); const redisLevel = new RedisCacheLevel(redisClient); diff --git a/tests/utilities/cache-setup.ts b/tests/utilities/cache-setup.ts new file mode 100644 index 0000000..18c0a24 --- /dev/null +++ b/tests/utilities/cache-setup.ts @@ -0,0 +1,32 @@ +import { Redis } from 'ioredis'; +import { CacheService } from '../../src/cache.service'; +import { MemoryCacheLevel, RedisCacheLevel } from '../../src/levels'; +import { FirstExpiringMemoryPolicy } from '../../src/policies/first-expiring-memory.policy'; +import { RamPercentageLimitStrategy } from '../../src/strategies/ram-percentage-limit.strategy'; +import type { StoredHeapItem } from '../../src/levels/memory/memory.level'; + +export function createMemoryLevel(): MemoryCacheLevel { + return new MemoryCacheLevel({ + memoryStrategies: [new RamPercentageLimitStrategy(80)], + evictionPolicy: new FirstExpiringMemoryPolicy(), + }); +} + +export function createRedisLevel(redisClient: Redis): RedisCacheLevel { + return new RedisCacheLevel(redisClient); +} + +export function createMultiLevelCache(memoryLevel: MemoryCacheLevel, redisLevel: RedisCacheLevel): CacheService { + return new CacheService({ + levels: [memoryLevel, redisLevel], + defaultTTL: 3600, + }); +} + +export async function prepopulateCache(cache: CacheService, uniqueKeys: number, valueFactory: (i: number) => any) { + for (let i = 0; i < uniqueKeys; i++) { + const key = `benchmark_key_${i}`; + const value = valueFactory(i); + await cache.set(key, value); + } +} diff --git a/tests/utilities/data.utilities.ts b/tests/utilities/data.utilities.ts index ce55dd7..d1abd0b 100644 --- a/tests/utilities/data.utilities.ts +++ b/tests/utilities/data.utilities.ts @@ -6,7 +6,7 @@ export async function generateJSONData( recordNum: number, ) { for (let i = 0; i < recordNum; i++) { - const randomTTL = Date.now() + Math.floor(Math.random() * 3600) + 1; + const ttl = Date.now() + 3600 await cacheLevel.set( `key${i}`, { @@ -22,7 +22,7 @@ export async function generateJSONData( 3, ), }, - randomTTL, + ttl, ); } }