From 2c9d50c26f095b4e4e208a0b6d53219f5b281166 Mon Sep 17 00:00:00 2001 From: Oliver Kucharzewski Date: Sun, 9 Nov 2025 13:40:45 +1100 Subject: [PATCH 1/4] refactor: replace MemoryPercentageLimitStrategy with RamPercentageLimitStrategy and update related tests and documentation --- BENCHMARK.md | 2 +- README.md | 28 +++---- src/cache.service.spec.ts | 4 +- src/levels/memory/eviction-manager.spec.ts | 8 +- src/levels/memory/memory.level.spec.ts | 4 +- src/levels/redis/redis.level.ts | 10 +-- .../first-expiring-memory.policy.spec.ts | 11 ++- src/strategies/index.ts | 3 +- .../memory-percentage-limit.strategy.ts | 13 ---- .../memory-size-limit.strategy.spec.ts | 75 +++++++++++++++++++ src/strategies/memory-size-limit.strategy.ts | 24 ++++++ ... => ram-percentage-limit.strategy.spec.ts} | 8 +- .../ram-percentage-limit.strategy.ts | 20 +++++ src/utils/heap.utils.ts | 37 +++++---- src/utils/parsing.utils.spec.ts | 4 +- src/utils/parsing.utils.ts | 6 +- tests/benchmarks/cache.benchmark.spec.ts | 10 +-- tests/utilities/cache-setup.ts | 32 ++++++++ tests/utilities/data.utilities.ts | 4 +- 19 files changed, 226 insertions(+), 77 deletions(-) delete mode 100644 src/strategies/memory-percentage-limit.strategy.ts create mode 100644 src/strategies/memory-size-limit.strategy.spec.ts create mode 100644 src/strategies/memory-size-limit.strategy.ts rename src/strategies/{memory-percentage-limit.strategy.spec.ts => ram-percentage-limit.strategy.spec.ts} (79%) create mode 100644 src/strategies/ram-percentage-limit.strategy.ts create mode 100644 tests/utilities/cache-setup.ts diff --git a/BENCHMARK.md b/BENCHMARK.md index 7f1c030..17bd996 100644 --- a/BENCHMARK.md +++ b/BENCHMARK.md @@ -203,7 +203,7 @@ Multi-level caching provides the most benefit when: ## Performance Tips -1. **Tune Memory Strategies:** Adjust `MemoryPercentageLimitStrategy` threshold based on your application's memory profile +1. **Tune Memory Strategies:** Adjust `RamPercentageLimitStrategy` threshold based on your application's memory profile 2. **Choose Appropriate TTL:** Set cache TTL values that balance freshness and hit rate 3. **Monitor Cache Metrics:** Track hit rates to optimize cache configuration 4. **Size Your Cache:** Use Benchmark 4 to estimate memory requirements diff --git a/README.md b/README.md index 8d289ff..8a69d14 100644 --- a/README.md +++ b/README.md @@ -68,12 +68,12 @@ import { CacheService, MemoryCacheLevel, FirstExpiringMemoryPolicy, - MemoryPercentageLimitStrategy + RamPercentageLimitStrategy } from 'cacheforge'; // Create memory cache with eviction policy and strategy const memoryCache = new MemoryCacheLevel({ - memoryStrategies: [new MemoryPercentageLimitStrategy(80)], // Trigger at 80% memory + memoryStrategies: [new RamPercentageLimitStrategy(80)], // Trigger at 80% memory evictionPolicy: new FirstExpiringMemoryPolicy() }); @@ -103,11 +103,11 @@ At the top (CacheService), fallbacks are handled. However the added layers do no Fast, in-memory caching using a Map and min-heap for efficient expiration tracking. ```typescript -import { MemoryCacheLevel, FirstExpiringMemoryPolicy, MemoryPercentageLimitStrategy } from 'cacheforge'; +import { MemoryCacheLevel, FirstExpiringMemoryPolicy, RamPercentageLimitStrategy } from 'cacheforge'; const memoryCache = new MemoryCacheLevel({ memoryStrategies: [ - new MemoryPercentageLimitStrategy(75) // Evict when memory exceeds 75% + new RamPercentageLimitStrategy(75) // Evict when memory exceeds 75% ], evictionPolicy: new FirstExpiringMemoryPolicy() }); @@ -145,14 +145,14 @@ const policy = new FirstExpiringMemoryPolicy(); Strategies check conditions and trigger eviction policies when thresholds are met. -#### MemoryPercentageLimitStrategy +#### RamPercentageLimitStrategy Triggers eviction when system memory usage exceeds a percentage threshold. ```typescript -import { MemoryPercentageLimitStrategy } from 'cacheforge'; +import { RamPercentageLimitStrategy } from 'cacheforge'; // Trigger eviction at 80% memory usage -const strategy = new MemoryPercentageLimitStrategy(80); +const strategy = new RamPercentageLimitStrategy(80); ``` ## Usage Guide @@ -166,13 +166,13 @@ import { CacheService, MemoryCacheLevel, FirstExpiringMemoryPolicy, - MemoryPercentageLimitStrategy + RamPercentageLimitStrategy } from 'cacheforge'; const cache = new CacheService({ levels: [ new MemoryCacheLevel({ - memoryStrategies: [new MemoryPercentageLimitStrategy(80)], + memoryStrategies: [new RamPercentageLimitStrategy(80)], evictionPolicy: new FirstExpiringMemoryPolicy() }) ], @@ -202,12 +202,12 @@ import { MemoryCacheLevel, RedisCacheLevel, FirstExpiringMemoryPolicy, - MemoryPercentageLimitStrategy + RamPercentageLimitStrategy } from 'cacheforge'; import Redis from 'ioredis'; const memoryCache = new MemoryCacheLevel({ - memoryStrategies: [new MemoryPercentageLimitStrategy(75)], + memoryStrategies: [new RamPercentageLimitStrategy(75)], evictionPolicy: new FirstExpiringMemoryPolicy() }); @@ -236,7 +236,7 @@ import Redis from 'ioredis'; const cache = new CacheService({ levels: [ new MemoryCacheLevel({ - memoryStrategies: [new MemoryPercentageLimitStrategy(80)], + memoryStrategies: [new RamPercentageLimitStrategy(80)], evictionPolicy: new FirstExpiringMemoryPolicy() }), new RedisCacheLevel(new Redis()) @@ -608,14 +608,14 @@ Example test using the library: ```typescript import { describe, it, expect, beforeEach, afterEach } from 'vitest'; -import { CacheService, MemoryCacheLevel, FirstExpiringMemoryPolicy, MemoryPercentageLimitStrategy } from 'cacheforge'; +import { CacheService, MemoryCacheLevel, FirstExpiringMemoryPolicy, RamPercentageLimitStrategy } from 'cacheforge'; describe('Cache Service', () => { let cache: CacheService; beforeEach(() => { const memoryCache = new MemoryCacheLevel({ - memoryStrategies: [new MemoryPercentageLimitStrategy(80)], + memoryStrategies: [new RamPercentageLimitStrategy(80)], evictionPolicy: new FirstExpiringMemoryPolicy() }); diff --git a/src/cache.service.spec.ts b/src/cache.service.spec.ts index 502c904..29593b2 100644 --- a/src/cache.service.spec.ts +++ b/src/cache.service.spec.ts @@ -13,7 +13,7 @@ import { type StoredHeapItem, } from "./levels"; import { FirstExpiringMemoryPolicy } from "./policies/first-expiring-memory.policy"; -import { MemoryPercentageLimitStrategy } from "./strategies/memory-percentage-limit.strategy"; +import { RamPercentageLimitStrategy } from "./strategies/ram-percentage-limit.strategy"; let redisContainer: StartedRedisContainer; let redisLevel: RedisCacheLevel; @@ -25,7 +25,7 @@ let faultyFirstLevelVersionedCacheService: CacheService; let faultyFirstLevelCacheService: CacheService; let allFaultyLevelsCacheService: CacheService; let allFaultyLevelsVersionedCacheService: CacheService; -const memoryStrategy = new MemoryPercentageLimitStrategy(70); +const memoryStrategy = new RamPercentageLimitStrategy(70); const evictionPolicy = new FirstExpiringMemoryPolicy(); memoryLevel = new MemoryCacheLevel({ memoryStrategies: [memoryStrategy], diff --git a/src/levels/memory/eviction-manager.spec.ts b/src/levels/memory/eviction-manager.spec.ts index baed1ab..6560bd4 100644 --- a/src/levels/memory/eviction-manager.spec.ts +++ b/src/levels/memory/eviction-manager.spec.ts @@ -1,7 +1,7 @@ import { beforeEach, describe, expect, it, vi } from "vitest"; import { AbstractMemoryEvictionPolicy } from "../../policies/abstract/abstract-memory-eviction.policy"; import { FirstExpiringMemoryPolicy } from "../../policies/first-expiring-memory.policy"; -import { MemoryPercentageLimitStrategy } from "../../strategies/memory-percentage-limit.strategy"; +import { RamPercentageLimitStrategy } from "../../strategies/ram-percentage-limit.strategy"; import { EvictionManager } from "./eviction-manager"; import { MemoryCacheLevel, type StoredHeapItem } from "./memory.level"; import { triggerMemoryChange } from "./memory-event.manager"; @@ -14,11 +14,11 @@ describe("EvictionManager", () => { let memoryLevel: MemoryCacheLevel; let evictionPolicy: FirstExpiringMemoryPolicy; let memoryWithoutEvictionPolicy: MemoryCacheLevel; - let memoryStrategy: MemoryPercentageLimitStrategy; + let memoryStrategy: RamPercentageLimitStrategy; beforeEach(() => { evictionPolicy = new FirstExpiringMemoryPolicy(); - memoryStrategy = new MemoryPercentageLimitStrategy(0); // Always triggers + memoryStrategy = new RamPercentageLimitStrategy(0); // Always triggers memoryLevel = new MemoryCacheLevel({ memoryStrategies: [memoryStrategy], evictionPolicy, @@ -73,7 +73,7 @@ describe("EvictionManager", () => { }); it("does not evict if no strategy triggers", async () => { - const neverStrategy = new MemoryPercentageLimitStrategy(100); // Never triggers + const neverStrategy = new RamPercentageLimitStrategy(100); // Never triggers const neverOptions = { memoryStrategies: [neverStrategy], evictionPolicy, diff --git a/src/levels/memory/memory.level.spec.ts b/src/levels/memory/memory.level.spec.ts index 2e988df..0977160 100644 --- a/src/levels/memory/memory.level.spec.ts +++ b/src/levels/memory/memory.level.spec.ts @@ -2,11 +2,11 @@ import { faker, fakerZH_TW } from "@faker-js/faker"; import { afterEach, describe, expect, it, vi } from "vitest"; import { generateJSONData } from "../../../tests/utilities/data.utilities"; import { FirstExpiringMemoryPolicy } from "../../policies"; -import { MemoryPercentageLimitStrategy } from "../../strategies/memory-percentage-limit.strategy"; +import { RamPercentageLimitStrategy } from "../../strategies/ram-percentage-limit.strategy"; import { MemoryCacheLevel, type StoredHeapItem } from ".."; const evictionPolicy = new FirstExpiringMemoryPolicy(); -const strategy = new MemoryPercentageLimitStrategy(80); +const strategy = new RamPercentageLimitStrategy(80); const cacheEngine = new MemoryCacheLevel({ memoryStrategies: [strategy], evictionPolicy: evictionPolicy, diff --git a/src/levels/redis/redis.level.ts b/src/levels/redis/redis.level.ts index a2da3c2..25ad344 100644 --- a/src/levels/redis/redis.level.ts +++ b/src/levels/redis/redis.level.ts @@ -4,8 +4,8 @@ import type { Cluster } from "ioredis"; import { DEFAULT_TTL } from "../../constants"; import { parseIfJSON } from "../../utils/cache.utils"; import { - deserializeFromRedis, - serializeForRedis, + deserialize, + serialize, } from "../../utils/parsing.utils"; import { generateVersionLookupKey } from "../../utils/version.utils"; import type { CacheLevel } from "../interfaces/cache-level"; @@ -38,7 +38,7 @@ export class RedisCacheLevel implements CacheLevel, Lockable { for (let i = 0; i < keys.length; i++) { const key = keys[i]; const value = values[i]; - pipeline.set(key, serializeForRedis(value), "EX", ttl); + pipeline.set(key, serialize(value), "EX", ttl); } await pipeline.exec(); @@ -56,7 +56,7 @@ export class RedisCacheLevel implements CacheLevel, Lockable { if (cachedValue === null || cachedValue === undefined) { finalResults.push(undefined as T); } else { - finalResults.push(deserializeFromRedis(cachedValue)); + finalResults.push(deserialize(cachedValue)); } } @@ -64,7 +64,7 @@ export class RedisCacheLevel implements CacheLevel, Lockable { } async set(key: string, value: T, ttl = DEFAULT_TTL) { - await this.client.set(key, serializeForRedis(value), "EX", ttl); + await this.client.set(key, serialize(value), "EX", ttl); return parseIfJSON(value) as T; } diff --git a/src/policies/first-expiring-memory.policy.spec.ts b/src/policies/first-expiring-memory.policy.spec.ts index 02a6100..33a3e89 100644 --- a/src/policies/first-expiring-memory.policy.spec.ts +++ b/src/policies/first-expiring-memory.policy.spec.ts @@ -1,11 +1,11 @@ import { afterEach, describe, expect, it } from "vitest"; import { generateJSONData } from "../../tests/utilities/data.utilities"; import { MemoryCacheLevel } from "../levels"; -import { MemoryPercentageLimitStrategy } from "../strategies/memory-percentage-limit.strategy"; +import { RamPercentageLimitStrategy } from "../strategies/ram-percentage-limit.strategy"; import { FirstExpiringMemoryPolicy } from "./first-expiring-memory.policy"; const policy = new FirstExpiringMemoryPolicy(); -const strategy = new MemoryPercentageLimitStrategy(80); +const strategy = new RamPercentageLimitStrategy(80); const cacheEngine = new MemoryCacheLevel({ memoryStrategies: [strategy], evictionPolicy: policy, @@ -33,10 +33,9 @@ describe("First Expiring Memory Policy", () => { const policy = new FirstExpiringMemoryPolicy(); - for (let i = 0; i <= 10000; i++) { - await policy.evict(cacheEngine); - } + await policy.evict(cacheEngine); - expect(cacheEngine.getHeap().getCount()).toEqual(0); + // 10% has been removed. + expect(cacheEngine.getHeap().getCount()).toEqual(900); }); }); diff --git a/src/strategies/index.ts b/src/strategies/index.ts index 01080f6..575a88c 100644 --- a/src/strategies/index.ts +++ b/src/strategies/index.ts @@ -1 +1,2 @@ -export * from "./memory-percentage-limit.strategy"; +export * from "./ram-percentage-limit.strategy"; +export * from "./memory-size-limit.strategy"; \ No newline at end of file diff --git a/src/strategies/memory-percentage-limit.strategy.ts b/src/strategies/memory-percentage-limit.strategy.ts deleted file mode 100644 index 9a80064..0000000 --- a/src/strategies/memory-percentage-limit.strategy.ts +++ /dev/null @@ -1,13 +0,0 @@ -import type { StoredHeapItem } from "../levels"; -import type { InMemory } from "../levels/interfaces/in-memory"; -import type { MemoryManagementStrategy } from "./interfaces/memory-management-strategy"; - -export class MemoryPercentageLimitStrategy - implements MemoryManagementStrategy -{ - constructor(private threshold: number) {} - checkCondition(memory: InMemory): boolean { - const usage = memory.getMemoryUsage(); - return usage > this.threshold; - } -} diff --git a/src/strategies/memory-size-limit.strategy.spec.ts b/src/strategies/memory-size-limit.strategy.spec.ts new file mode 100644 index 0000000..a02d985 --- /dev/null +++ b/src/strategies/memory-size-limit.strategy.spec.ts @@ -0,0 +1,75 @@ +import { describe, expect, it } from "vitest"; +import { MemoryCacheLevel, type StoredHeapItem } from "../levels"; +import { FirstExpiringMemoryPolicy } from "../policies/first-expiring-memory.policy"; +import { MemorySizeLimitStrategy } from "./memory-size-limit.strategy"; +import { generateJSONData } from "../../tests/utilities/data.utilities"; + +describe("MemorySizeLimitStrategy will ensure memory usage is within limits", () => { + it("should not clear memory when memory usage does not exceed threshold", async () => { + const policy = new FirstExpiringMemoryPolicy(); + const strategy = new MemorySizeLimitStrategy(0.01); + const cacheEngine = new MemoryCacheLevel({ + memoryStrategies: [strategy], + evictionPolicy: policy, + }); + + const heapSize = cacheEngine.getHeap().getTotalSize(); + + expect(heapSize).toBe(0); + + expect(strategy.checkCondition(cacheEngine)).toBe(false); + + await policy.evict(cacheEngine); + + // The heap size should remain unchanged because no eviction is needed + const heapSnapshot = cacheEngine.getHeap().getSnapshot(); + expect(heapSnapshot.length).toBe(heapSize); + }); + + it("should trigger eviction when memory usage exceeds threshold", async () => { + const policy = new FirstExpiringMemoryPolicy(); + const strategy = new MemorySizeLimitStrategy(0); + const cacheEngine = new MemoryCacheLevel({ + memoryStrategies: [strategy], + evictionPolicy: policy, + }); + + expect(strategy.checkCondition(cacheEngine)).toBe(true); + + await policy.evict(cacheEngine); + + // The heap should be empty after eviction + const heapSnapshot = cacheEngine.getHeap().getSnapshot(); + expect(heapSnapshot.length).toBe(0); + }); + + it("should evict items when memory usage exceeds threshold after adding bulk data", async () => { + const policy = new FirstExpiringMemoryPolicy(); + const strategy = new MemorySizeLimitStrategy(1); + const cacheEngine = new MemoryCacheLevel({ + memoryStrategies: [strategy], + evictionPolicy: policy, + }); + + await generateJSONData(cacheEngine, 10000); + + const postEvictionSnapshot = cacheEngine.getHeap().getSnapshot(); + + expect(postEvictionSnapshot.length).toBeLessThan(10000); + }); + + it('should not evict items when memory usage is within threshold after adding bulk data', async () => { + const policy = new FirstExpiringMemoryPolicy(); + const strategy = new MemorySizeLimitStrategy(50); + const cacheEngine = new MemoryCacheLevel({ + memoryStrategies: [strategy], + evictionPolicy: policy, + }); + + await generateJSONData(cacheEngine, 1000); + + const postEvictionSnapshot = cacheEngine.getHeap().getSnapshot(); + + expect(postEvictionSnapshot.length).toBe(1000); + }); +}); diff --git a/src/strategies/memory-size-limit.strategy.ts b/src/strategies/memory-size-limit.strategy.ts new file mode 100644 index 0000000..44d4a3b --- /dev/null +++ b/src/strategies/memory-size-limit.strategy.ts @@ -0,0 +1,24 @@ +import type { StoredHeapItem } from "../levels"; +import type { InMemory } from "../levels/interfaces/in-memory"; +import type { MemoryManagementStrategy } from "./interfaces/memory-management-strategy"; + + +/** + * This strategy checks if the total size of items in the cache exceeds a defined threshold. + * The threshold is a percentage of the total RAM allocated to the Node.js process. + * If the size exceeds the threshold, it signals that eviction should occur. + * This helps in managing memory usage effectively and prevents the cache from consuming excessive memory. + * For instance, if my cache has 11% of the total RAM allocated to Node.js, and the threshold is set to 10%, + * the strategy will indicate that eviction is necessary. + */ + +export class MemorySizeLimitStrategy + implements MemoryManagementStrategy +{ + constructor(private threshold: number) {} + checkCondition(memory: InMemory): boolean { + const heap = memory.getHeap(); + const usage = (heap.getTotalSize() / process.memoryUsage().heapTotal) * 100; + return usage >= this.threshold; + } +} diff --git a/src/strategies/memory-percentage-limit.strategy.spec.ts b/src/strategies/ram-percentage-limit.strategy.spec.ts similarity index 79% rename from src/strategies/memory-percentage-limit.strategy.spec.ts rename to src/strategies/ram-percentage-limit.strategy.spec.ts index abcf829..d1678fa 100644 --- a/src/strategies/memory-percentage-limit.strategy.spec.ts +++ b/src/strategies/ram-percentage-limit.strategy.spec.ts @@ -1,12 +1,12 @@ import { describe, expect, it } from "vitest"; import { MemoryCacheLevel, type StoredHeapItem } from "../levels"; import { FirstExpiringMemoryPolicy } from "../policies/first-expiring-memory.policy"; -import { MemoryPercentageLimitStrategy } from "./memory-percentage-limit.strategy"; +import { RamPercentageLimitStrategy } from "./ram-percentage-limit.strategy"; -describe("MemoryPercentageLimitStrategy will ensure memory usage is within limits", () => { +describe("RamPercentageLimitStrategy will ensure memory usage is within limits", () => { it("should trigger eviction when memory usage does not exceed threshold", async () => { const policy = new FirstExpiringMemoryPolicy(); - const strategy = new MemoryPercentageLimitStrategy(80); + const strategy = new RamPercentageLimitStrategy(80); const cacheEngine = new MemoryCacheLevel({ memoryStrategies: [strategy], evictionPolicy: policy, @@ -24,7 +24,7 @@ describe("MemoryPercentageLimitStrategy will ensure memory usage is within limit it("should trigger eviction when memory usage exceeds threshold", async () => { const policy = new FirstExpiringMemoryPolicy(); - const strategy = new MemoryPercentageLimitStrategy(0); + const strategy = new RamPercentageLimitStrategy(0); const cacheEngine = new MemoryCacheLevel({ memoryStrategies: [strategy], evictionPolicy: policy, diff --git a/src/strategies/ram-percentage-limit.strategy.ts b/src/strategies/ram-percentage-limit.strategy.ts new file mode 100644 index 0000000..3ea4b84 --- /dev/null +++ b/src/strategies/ram-percentage-limit.strategy.ts @@ -0,0 +1,20 @@ +import type { StoredHeapItem } from "../levels"; +import type { InMemory } from "../levels/interfaces/in-memory"; +import type { MemoryManagementStrategy } from "./interfaces/memory-management-strategy"; + +/** + * This strategy checks if the current RAM usage percentage exceeds a defined threshold. + * If the usage exceeds the threshold, it signals that eviction should occur. + * This is useful for preventing the cache from consuming too much memory + * and ensures that the application remains responsive. + * However, it may lead to more frequent evictions in memory-constrained environments. + */ +export class RamPercentageLimitStrategy + implements MemoryManagementStrategy +{ + constructor(private threshold: number) {} + checkCondition(memory: InMemory): boolean { + const usage = memory.getMemoryUsage(); + return usage > this.threshold; + } +} diff --git a/src/utils/heap.utils.ts b/src/utils/heap.utils.ts index 7f91dca..43e698c 100644 --- a/src/utils/heap.utils.ts +++ b/src/utils/heap.utils.ts @@ -1,14 +1,34 @@ import { MinHeap } from "@datastructures-js/heap"; +import { serialize } from "./parsing.utils"; export interface MemoryHeap extends MinHeap { rebuild: (items: T[]) => void; getSnapshot: () => T[]; getCount: () => number; + getTotalSize: () => number; } export const createCacheHeap = (comparator: (a: T) => number) => { let itemCounter = 0; + let totalSize = 0; const heap = new MinHeap(comparator) as MemoryHeap; + + const originalInsert = heap.insert.bind(heap); + heap.insert = (item: T) => { + itemCounter++; + totalSize += Buffer.byteLength(serialize(item), "utf8"); + return originalInsert(item); + }; + + heap.getTotalSize = () => totalSize; + + const originalClear = heap.clear.bind(heap); + heap.clear = () => { + itemCounter = 0; + totalSize = 0; + return originalClear(); + }; + heap.rebuild = (items: T[]) => { heap.clear(); @@ -18,22 +38,13 @@ export const createCacheHeap = (comparator: (a: T) => number) => { itemCounter = 0; }; + const originalPop = heap.pop.bind(heap); heap.pop = () => { itemCounter -= 1; - return originalPop(); - }; - - const originalInsert = heap.insert.bind(heap); - heap.insert = (item: T) => { - itemCounter++; - return originalInsert(item); - }; - - const originalClear = heap.clear.bind(heap); - heap.clear = () => { - itemCounter = 0; - return originalClear(); + const item = originalPop(); + totalSize -= Buffer.byteLength(serialize(item), "utf8"); + return item; }; heap.getSnapshot = () => Array.from(heap); diff --git a/src/utils/parsing.utils.spec.ts b/src/utils/parsing.utils.spec.ts index 85058e1..fde0efd 100644 --- a/src/utils/parsing.utils.spec.ts +++ b/src/utils/parsing.utils.spec.ts @@ -1,6 +1,6 @@ import { faker } from "@faker-js/faker"; import { describe, expect, it } from "vitest"; -import { deserializeFromRedis } from "./parsing.utils"; +import { deserialize } from "./parsing.utils"; describe("deserialise from redis", () => { it("should handle standard objects, even if not parsed with superjson", () => { @@ -11,6 +11,6 @@ describe("deserialise from redis", () => { const parsed = JSON.stringify(object); - expect(deserializeFromRedis(parsed as never)).toEqual(object); + expect(deserialize(parsed as never)).toEqual(object); }); }); diff --git a/src/utils/parsing.utils.ts b/src/utils/parsing.utils.ts index c58b7c8..a3469c6 100644 --- a/src/utils/parsing.utils.ts +++ b/src/utils/parsing.utils.ts @@ -5,15 +5,15 @@ import { JSONParse, JSONStringify } from "json-with-bigint"; * @param item The value to serialize * @returns A string safe for Redis storage */ -export const serializeForRedis = (item: unknown) => { +export const serialize = (item: unknown) => { return JSONStringify(item); }; /** - * Deserializes a value from Redis (previously stored with serializeForRedis). + * Deserializes a value from Redis (previously stored with serialize). * @param str The string from Redis * @returns The original JS value */ -export const deserializeFromRedis = (str: never) => { +export const deserialize = (str: never) => { return JSONParse(str); }; diff --git a/tests/benchmarks/cache.benchmark.spec.ts b/tests/benchmarks/cache.benchmark.spec.ts index bad88f9..2e37480 100644 --- a/tests/benchmarks/cache.benchmark.spec.ts +++ b/tests/benchmarks/cache.benchmark.spec.ts @@ -10,7 +10,7 @@ import { RedisCacheLevel, } from "../../src/levels"; import { FirstExpiringMemoryPolicy } from "../../src/policies/first-expiring-memory.policy"; -import { MemoryPercentageLimitStrategy } from "../../src/strategies/memory-percentage-limit.strategy"; +import { RamPercentageLimitStrategy } from "../../src/strategies/memory-percentage-limit.strategy"; import type { StoredHeapItem } from "../../src/levels/memory/memory.level"; import { type BenchmarkResult, @@ -52,7 +52,7 @@ describe("Cache Performance Benchmarks", () => { // Create fresh cache instances for this test const redisClient = new Redis(redisContainer.getConnectionUrl()); const memoryLevel = new MemoryCacheLevel({ - memoryStrategies: [new MemoryPercentageLimitStrategy(80)], + memoryStrategies: [new RamPercentageLimitStrategy(80)], evictionPolicy: new FirstExpiringMemoryPolicy(), }); const redisLevel = new RedisCacheLevel(redisClient); @@ -113,7 +113,7 @@ describe("Cache Performance Benchmarks", () => { const redisOnlyClient = new Redis(redisContainer.getConnectionUrl()); const memoryLevel = new MemoryCacheLevel({ - memoryStrategies: [new MemoryPercentageLimitStrategy(80)], + memoryStrategies: [new RamPercentageLimitStrategy(80)], evictionPolicy: new FirstExpiringMemoryPolicy(), }); const redisLevel = new RedisCacheLevel(redisClient); @@ -190,7 +190,7 @@ describe("Cache Performance Benchmarks", () => { const redisOnlyClient = new Redis(redisContainer.getConnectionUrl()); const memoryLevel = new MemoryCacheLevel({ - memoryStrategies: [new MemoryPercentageLimitStrategy(80)], + memoryStrategies: [new RamPercentageLimitStrategy(80)], evictionPolicy: new FirstExpiringMemoryPolicy(), }); const redisLevel = new RedisCacheLevel(redisClient); @@ -259,7 +259,7 @@ describe("Cache Performance Benchmarks", () => { // Create fresh cache instance for this test const redisClient = new Redis(redisContainer.getConnectionUrl()); const memoryLevel = new MemoryCacheLevel({ - memoryStrategies: [new MemoryPercentageLimitStrategy(80)], + memoryStrategies: [new RamPercentageLimitStrategy(80)], evictionPolicy: new FirstExpiringMemoryPolicy(), }); const redisLevel = new RedisCacheLevel(redisClient); diff --git a/tests/utilities/cache-setup.ts b/tests/utilities/cache-setup.ts new file mode 100644 index 0000000..18c0a24 --- /dev/null +++ b/tests/utilities/cache-setup.ts @@ -0,0 +1,32 @@ +import { Redis } from 'ioredis'; +import { CacheService } from '../../src/cache.service'; +import { MemoryCacheLevel, RedisCacheLevel } from '../../src/levels'; +import { FirstExpiringMemoryPolicy } from '../../src/policies/first-expiring-memory.policy'; +import { RamPercentageLimitStrategy } from '../../src/strategies/ram-percentage-limit.strategy'; +import type { StoredHeapItem } from '../../src/levels/memory/memory.level'; + +export function createMemoryLevel(): MemoryCacheLevel { + return new MemoryCacheLevel({ + memoryStrategies: [new RamPercentageLimitStrategy(80)], + evictionPolicy: new FirstExpiringMemoryPolicy(), + }); +} + +export function createRedisLevel(redisClient: Redis): RedisCacheLevel { + return new RedisCacheLevel(redisClient); +} + +export function createMultiLevelCache(memoryLevel: MemoryCacheLevel, redisLevel: RedisCacheLevel): CacheService { + return new CacheService({ + levels: [memoryLevel, redisLevel], + defaultTTL: 3600, + }); +} + +export async function prepopulateCache(cache: CacheService, uniqueKeys: number, valueFactory: (i: number) => any) { + for (let i = 0; i < uniqueKeys; i++) { + const key = `benchmark_key_${i}`; + const value = valueFactory(i); + await cache.set(key, value); + } +} diff --git a/tests/utilities/data.utilities.ts b/tests/utilities/data.utilities.ts index ce55dd7..d1abd0b 100644 --- a/tests/utilities/data.utilities.ts +++ b/tests/utilities/data.utilities.ts @@ -6,7 +6,7 @@ export async function generateJSONData( recordNum: number, ) { for (let i = 0; i < recordNum; i++) { - const randomTTL = Date.now() + Math.floor(Math.random() * 3600) + 1; + const ttl = Date.now() + 3600 await cacheLevel.set( `key${i}`, { @@ -22,7 +22,7 @@ export async function generateJSONData( 3, ), }, - randomTTL, + ttl, ); } } From f2101d08e69549183cb8831d08e8310426c86550 Mon Sep 17 00:00:00 2001 From: Oliver Kucharzewski Date: Sun, 9 Nov 2025 13:54:44 +1100 Subject: [PATCH 2/4] feat: add getStoreSize method to InMemory interface and implement in MemoryCacheLevel --- README.md | 19 +++++++++++++++++-- src/levels/interfaces/in-memory.ts | 6 ++++++ src/levels/memory/memory.level.spec.ts | 6 ++++++ src/levels/memory/memory.level.ts | 14 ++++++++------ src/strategies/memory-size-limit.strategy.ts | 5 ++++- 5 files changed, 41 insertions(+), 9 deletions(-) diff --git a/README.md b/README.md index 8a69d14..62e3379 100644 --- a/README.md +++ b/README.md @@ -145,6 +145,18 @@ const policy = new FirstExpiringMemoryPolicy(); Strategies check conditions and trigger eviction policies when thresholds are met. +#### MemorySizeLimitStrategy (Recommended Default) +Triggers eviction when the total size of items in the cache exceeds a defined threshold (as a percentage of the Node.js process heap). + +This strategy is recommended as the default for most applications, as it provides a more accurate measurement of cache memory usage and helps prevent out-of-memory errors. + +```typescript +import { MemorySizeLimitStrategy } from 'cacheforge'; + +// Trigger eviction when cache uses 10% or more of Node.js heap +const strategy = new MemorySizeLimitStrategy(10); +``` + #### RamPercentageLimitStrategy Triggers eviction when system memory usage exceeds a percentage threshold. @@ -556,8 +568,11 @@ await cache.invalidateKey('user:123'); ### 4. Memory Strategy Thresholds -- Development: 80-90% (more headroom) -- Production: 70-75% (prevent OOM issues) + +- **Recommended Default:** Use `MemorySizeLimitStrategy` with a threshold of 10-20% of Node.js heap for most production workloads. +- **RamPercentageLimitStrategy:** + - Development: 80-90% (more headroom) + - Production: 70-75% (prevent OOM issues) ### 5. Distributed Locking diff --git a/src/levels/interfaces/in-memory.ts b/src/levels/interfaces/in-memory.ts index 73ab71b..0ddf300 100644 --- a/src/levels/interfaces/in-memory.ts +++ b/src/levels/interfaces/in-memory.ts @@ -12,4 +12,10 @@ export interface InMemory { * @return Array of items in the heap. */ getHeap(): MemoryHeap; + + /** + * Get the size of the key-value store in bytes. + * @return Size of the store in bytes. + */ + getStoreSize(): number; } diff --git a/src/levels/memory/memory.level.spec.ts b/src/levels/memory/memory.level.spec.ts index 0977160..8d84a88 100644 --- a/src/levels/memory/memory.level.spec.ts +++ b/src/levels/memory/memory.level.spec.ts @@ -125,6 +125,12 @@ describe("should successfully store data, and retrieve it on demand", async () = await cacheEngine.mget(["bingo", "bingo1", "bingo2"]), ).toEqual([undefined, undefined, undefined]); }); + + it('should get store size in bytes', () => { + const storeSize = cacheEngine.getStoreSize(); + expect(typeof storeSize).toBe('number'); + expect(storeSize).toBeGreaterThanOrEqual(0); + }); }); describe("It should successfully manage the application memory usage", () => { diff --git a/src/levels/memory/memory.level.ts b/src/levels/memory/memory.level.ts index 1d342cc..7016253 100644 --- a/src/levels/memory/memory.level.ts +++ b/src/levels/memory/memory.level.ts @@ -8,6 +8,7 @@ import type { InMemory } from "../interfaces/in-memory"; import type { Purgable } from "../interfaces/purgable"; import { EvictionManager } from "./eviction-manager"; import { triggerMemoryChange } from "./memory-event.manager"; +import { serialize } from "../../utils/parsing.utils"; export interface StoredItem { value: unknown; expiry: number; @@ -26,6 +27,7 @@ export class MemoryCacheLevel implements CacheLevel, Purgable, InMemory { protected store = new Map(); + protected size = 0; protected heap = createCacheHeap((item) => item.expiry); protected evictionManager: EvictionManager; @@ -41,17 +43,17 @@ export class MemoryCacheLevel await Promise.all(deletePromises); } - private insertHeapItem(item: StoredHeapItem) { - this.heap.insert(item); - } - private updateStore(key: string, item: StoredItem) { this.store.set(key, item); - - this.insertHeapItem({ ...item, key }); + this.heap.insert({ ...item, key }); + this.size += serialize(item).length; triggerMemoryChange(); } + public getStoreSize(): number { + return this.size; + } + async mset( keys: string[], values: T[], diff --git a/src/strategies/memory-size-limit.strategy.ts b/src/strategies/memory-size-limit.strategy.ts index 44d4a3b..92f616a 100644 --- a/src/strategies/memory-size-limit.strategy.ts +++ b/src/strategies/memory-size-limit.strategy.ts @@ -18,7 +18,10 @@ export class MemorySizeLimitStrategy constructor(private threshold: number) {} checkCondition(memory: InMemory): boolean { const heap = memory.getHeap(); - const usage = (heap.getTotalSize() / process.memoryUsage().heapTotal) * 100; + const heapSize = heap.getTotalSize(); + const keyStoreSize = memory.getStoreSize(); + const totalSize = heapSize + keyStoreSize; + const usage = (totalSize / process.memoryUsage().heapTotal) * 100; return usage >= this.threshold; } } From eae05159224aee4ec821c5f4c5c528e7d06c7188 Mon Sep 17 00:00:00 2001 From: Oliver Kucharzewski Date: Sun, 9 Nov 2025 13:57:36 +1100 Subject: [PATCH 3/4] 1.2.0 --- package-lock.json | 4 ++-- package.json | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/package-lock.json b/package-lock.json index 3071851..565c7b6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "cacheforge", - "version": "1.1.0", + "version": "1.2.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "cacheforge", - "version": "1.1.0", + "version": "1.2.0", "dependencies": { "@datastructures-js/heap": "^4.3.7", "@sesamecare-oss/redlock": "^1.4.0", diff --git a/package.json b/package.json index 7c5d55b..5178076 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "cacheforge", - "version": "1.1.0", + "version": "1.2.0", "description": "A multi-level caching library for Node.js applications, supporting in-memory and Redis, and custom cache levels.", "main": "dist/src/index.js", "types": "dist/src/index.d.ts", From 7e5845bd343575d04583661317a24562f3f11c12 Mon Sep 17 00:00:00 2001 From: Oliver Kucharzewski Date: Sun, 9 Nov 2025 14:01:02 +1100 Subject: [PATCH 4/4] lint fix --- src/levels/memory/memory.level.spec.ts | 4 ++-- src/levels/memory/memory.level.ts | 2 +- src/levels/redis/redis.level.ts | 5 +---- src/strategies/index.ts | 2 +- .../memory-size-limit.strategy.spec.ts | 16 +++++++------- src/strategies/memory-size-limit.strategy.ts | 21 +++++++++---------- src/utils/heap.utils.ts | 4 +--- 7 files changed, 24 insertions(+), 30 deletions(-) diff --git a/src/levels/memory/memory.level.spec.ts b/src/levels/memory/memory.level.spec.ts index 8d84a88..4cabad1 100644 --- a/src/levels/memory/memory.level.spec.ts +++ b/src/levels/memory/memory.level.spec.ts @@ -126,9 +126,9 @@ describe("should successfully store data, and retrieve it on demand", async () = ).toEqual([undefined, undefined, undefined]); }); - it('should get store size in bytes', () => { + it("should get store size in bytes", () => { const storeSize = cacheEngine.getStoreSize(); - expect(typeof storeSize).toBe('number'); + expect(typeof storeSize).toBe("number"); expect(storeSize).toBeGreaterThanOrEqual(0); }); }); diff --git a/src/levels/memory/memory.level.ts b/src/levels/memory/memory.level.ts index 7016253..eb2af15 100644 --- a/src/levels/memory/memory.level.ts +++ b/src/levels/memory/memory.level.ts @@ -3,12 +3,12 @@ import { DEFAULT_TTL } from "../../constants"; import type { AbstractMemoryEvictionPolicy } from "../../policies/abstract/abstract-memory-eviction.policy"; import type { MemoryManagementStrategy } from "../../strategies/interfaces/memory-management-strategy"; import { createCacheHeap } from "../../utils/heap.utils"; +import { serialize } from "../../utils/parsing.utils"; import type { CacheLevel } from "../interfaces/cache-level"; import type { InMemory } from "../interfaces/in-memory"; import type { Purgable } from "../interfaces/purgable"; import { EvictionManager } from "./eviction-manager"; import { triggerMemoryChange } from "./memory-event.manager"; -import { serialize } from "../../utils/parsing.utils"; export interface StoredItem { value: unknown; expiry: number; diff --git a/src/levels/redis/redis.level.ts b/src/levels/redis/redis.level.ts index 25ad344..a73585c 100644 --- a/src/levels/redis/redis.level.ts +++ b/src/levels/redis/redis.level.ts @@ -3,10 +3,7 @@ import type IoRedis from "ioredis"; import type { Cluster } from "ioredis"; import { DEFAULT_TTL } from "../../constants"; import { parseIfJSON } from "../../utils/cache.utils"; -import { - deserialize, - serialize, -} from "../../utils/parsing.utils"; +import { deserialize, serialize } from "../../utils/parsing.utils"; import { generateVersionLookupKey } from "../../utils/version.utils"; import type { CacheLevel } from "../interfaces/cache-level"; import type { Lockable } from "../interfaces/lockable"; diff --git a/src/strategies/index.ts b/src/strategies/index.ts index 575a88c..b1f382f 100644 --- a/src/strategies/index.ts +++ b/src/strategies/index.ts @@ -1,2 +1,2 @@ +export * from "./memory-size-limit.strategy"; export * from "./ram-percentage-limit.strategy"; -export * from "./memory-size-limit.strategy"; \ No newline at end of file diff --git a/src/strategies/memory-size-limit.strategy.spec.ts b/src/strategies/memory-size-limit.strategy.spec.ts index a02d985..887a35c 100644 --- a/src/strategies/memory-size-limit.strategy.spec.ts +++ b/src/strategies/memory-size-limit.strategy.spec.ts @@ -1,8 +1,8 @@ import { describe, expect, it } from "vitest"; +import { generateJSONData } from "../../tests/utilities/data.utilities"; import { MemoryCacheLevel, type StoredHeapItem } from "../levels"; import { FirstExpiringMemoryPolicy } from "../policies/first-expiring-memory.policy"; import { MemorySizeLimitStrategy } from "./memory-size-limit.strategy"; -import { generateJSONData } from "../../tests/utilities/data.utilities"; describe("MemorySizeLimitStrategy will ensure memory usage is within limits", () => { it("should not clear memory when memory usage does not exceed threshold", async () => { @@ -12,10 +12,10 @@ describe("MemorySizeLimitStrategy will ensure memory usage is within limits", () memoryStrategies: [strategy], evictionPolicy: policy, }); - + const heapSize = cacheEngine.getHeap().getTotalSize(); - expect(heapSize).toBe(0); + expect(heapSize).toBe(0); expect(strategy.checkCondition(cacheEngine)).toBe(false); @@ -49,8 +49,8 @@ describe("MemorySizeLimitStrategy will ensure memory usage is within limits", () const cacheEngine = new MemoryCacheLevel({ memoryStrategies: [strategy], evictionPolicy: policy, - }); - + }); + await generateJSONData(cacheEngine, 10000); const postEvictionSnapshot = cacheEngine.getHeap().getSnapshot(); @@ -58,14 +58,14 @@ describe("MemorySizeLimitStrategy will ensure memory usage is within limits", () expect(postEvictionSnapshot.length).toBeLessThan(10000); }); - it('should not evict items when memory usage is within threshold after adding bulk data', async () => { + it("should not evict items when memory usage is within threshold after adding bulk data", async () => { const policy = new FirstExpiringMemoryPolicy(); const strategy = new MemorySizeLimitStrategy(50); const cacheEngine = new MemoryCacheLevel({ memoryStrategies: [strategy], evictionPolicy: policy, - }); - + }); + await generateJSONData(cacheEngine, 1000); const postEvictionSnapshot = cacheEngine.getHeap().getSnapshot(); diff --git a/src/strategies/memory-size-limit.strategy.ts b/src/strategies/memory-size-limit.strategy.ts index 92f616a..4da5541 100644 --- a/src/strategies/memory-size-limit.strategy.ts +++ b/src/strategies/memory-size-limit.strategy.ts @@ -2,7 +2,6 @@ import type { StoredHeapItem } from "../levels"; import type { InMemory } from "../levels/interfaces/in-memory"; import type { MemoryManagementStrategy } from "./interfaces/memory-management-strategy"; - /** * This strategy checks if the total size of items in the cache exceeds a defined threshold. * The threshold is a percentage of the total RAM allocated to the Node.js process. @@ -13,15 +12,15 @@ import type { MemoryManagementStrategy } from "./interfaces/memory-management-st */ export class MemorySizeLimitStrategy - implements MemoryManagementStrategy + implements MemoryManagementStrategy { - constructor(private threshold: number) {} - checkCondition(memory: InMemory): boolean { - const heap = memory.getHeap(); - const heapSize = heap.getTotalSize(); - const keyStoreSize = memory.getStoreSize(); - const totalSize = heapSize + keyStoreSize; - const usage = (totalSize / process.memoryUsage().heapTotal) * 100; - return usage >= this.threshold; - } + constructor(private threshold: number) {} + checkCondition(memory: InMemory): boolean { + const heap = memory.getHeap(); + const heapSize = heap.getTotalSize(); + const keyStoreSize = memory.getStoreSize(); + const totalSize = heapSize + keyStoreSize; + const usage = (totalSize / process.memoryUsage().heapTotal) * 100; + return usage >= this.threshold; + } } diff --git a/src/utils/heap.utils.ts b/src/utils/heap.utils.ts index 43e698c..eccb7a1 100644 --- a/src/utils/heap.utils.ts +++ b/src/utils/heap.utils.ts @@ -12,7 +12,7 @@ export const createCacheHeap = (comparator: (a: T) => number) => { let itemCounter = 0; let totalSize = 0; const heap = new MinHeap(comparator) as MemoryHeap; - + const originalInsert = heap.insert.bind(heap); heap.insert = (item: T) => { itemCounter++; @@ -29,7 +29,6 @@ export const createCacheHeap = (comparator: (a: T) => number) => { return originalClear(); }; - heap.rebuild = (items: T[]) => { heap.clear(); items.forEach((item) => { @@ -38,7 +37,6 @@ export const createCacheHeap = (comparator: (a: T) => number) => { itemCounter = 0; }; - const originalPop = heap.pop.bind(heap); heap.pop = () => { itemCounter -= 1;