From 139fe89bbbd911875d361dfba275ccb5d254280d Mon Sep 17 00:00:00 2001 From: Daniel Carvalho Date: Sun, 13 Jul 2025 12:44:17 -0300 Subject: [PATCH 1/6] chore: add baseCache class accepting capacity and evicting cached items --- src/cache-strategy/core/base-cache.ts | 68 +++++ src/cache-strategy/core/cache.ts | 5 + src/cache-strategy/utils/linked-list.ts | 63 ++++ .../cache-strategy/core/base-cache.spec.ts | 273 +++++++++++++++++ .../cache-strategy/utils/linked-list.spec.ts | 284 ++++++++++++++++++ 5 files changed, 693 insertions(+) create mode 100644 src/cache-strategy/core/base-cache.ts create mode 100644 src/cache-strategy/core/cache.ts create mode 100644 src/cache-strategy/utils/linked-list.ts create mode 100644 test/unit/cache-strategy/core/base-cache.spec.ts create mode 100644 test/unit/cache-strategy/utils/linked-list.spec.ts diff --git a/src/cache-strategy/core/base-cache.ts b/src/cache-strategy/core/base-cache.ts new file mode 100644 index 0000000..642353f --- /dev/null +++ b/src/cache-strategy/core/base-cache.ts @@ -0,0 +1,68 @@ +import { Cache } from './cache'; +import { Node, LinkedList } from '../utils/linked-list'; + +export abstract class BaseCache + implements Cache +{ + protected map: Map>; + protected linkedList: LinkedList; + protected size: number; + protected readonly capacity: number; + + constructor(capacity: number) { + if (capacity < 0) { + throw new Error('Capacity must be greater than or equal to 0'); + } + + this.map = new Map>(); + this.linkedList = new LinkedList(); + + this.capacity = capacity; + this.size = 0; + } + + abstract get(key: TKey): TResponse | Promise | undefined; + + set(key: TKey, value: TResponse | Promise): void { + if (this.capacity === 0) { + return; + } + + const item = this.map.get(key); + if (item) { + this.handleExistingItemAccess(item, value); + return; + } + + if (this.size === this.capacity) { + this.evictItem(); + } + + const node = this.linkedList.addNode(key, value); + this.size++; + this.map.set(key, node); + } + + delete(key: TKey): void { + const item = this.map.get(key); + if (item) { + this.linkedList.removeNode(item); + this.map.delete(key); + this.size--; + } + } + + protected abstract handleExistingItemAccess( + item: Node, + value: TResponse | Promise, + ): void; + protected abstract evictItem(): void; + + protected getHead(): Node { + return this.linkedList.getHead(); + } + + protected getTail(): Node { + return this.linkedList.getTail(); + } +} diff --git a/src/cache-strategy/core/cache.ts b/src/cache-strategy/core/cache.ts new file mode 100644 index 0000000..6d1a0ac --- /dev/null +++ b/src/cache-strategy/core/cache.ts @@ -0,0 +1,5 @@ +export interface Cache { + get(key: TKey): TResponse | Promise | undefined; + set(key: TKey, value: TResponse | Promise): void; + delete(key: TKey): void; +} diff --git a/src/cache-strategy/utils/linked-list.ts b/src/cache-strategy/utils/linked-list.ts new file mode 100644 index 0000000..843defd --- /dev/null +++ b/src/cache-strategy/utils/linked-list.ts @@ -0,0 +1,63 @@ +export class Node { + constructor( + public key?: TKey, + public value?: TResponse | Promise, + public next: Node | null = null, + public prev: Node | null = null, + ) {} +} + +export class LinkedList { + private head: Node; + private tail: Node; + + constructor() { + this.head = new Node(); + this.tail = new Node(); + this.head.next = this.tail; + this.tail.prev = this.head; + } + + addNode( + key: TKey | undefined, + value: TResponse | Promise | undefined, + ): Node { + const node = new Node(key, value); + + const headNext = this.head.next; + + this.head.next = node; + + node.prev = this.head; + node.next = headNext; + + if (headNext) { + headNext.prev = node; + } + + return node; + } + + removeNode(node: Node): void { + const nextNode = node.next; + const prevNode = node.prev; + + if (nextNode) { + nextNode.prev = prevNode; + } + if (prevNode) { + prevNode.next = nextNode; + } + + node.next = null; + node.prev = null; + } + + getHead(): Node { + return this.head; + } + + getTail(): Node { + return this.tail; + } +} diff --git a/test/unit/cache-strategy/core/base-cache.spec.ts b/test/unit/cache-strategy/core/base-cache.spec.ts new file mode 100644 index 0000000..b0ba94e --- /dev/null +++ b/test/unit/cache-strategy/core/base-cache.spec.ts @@ -0,0 +1,273 @@ +import { BaseCache } from '../../../../src/cache-strategy/core/base-cache'; +import { Node } from '../../../../src/cache-strategy/utils/linked-list'; + +// Create a concrete implementation for testing +class TestCache extends BaseCache< + TResponse, + TKey +> { + get(key: TKey): TResponse | Promise | undefined { + const item = this.map.get(key); + return item ? item.value : undefined; + } + + protected handleExistingItemAccess( + item: Node, + value: TResponse | Promise, + ): void { + // Test implementation: just update the value + item.value = value; + } + + protected evictItem(): void { + // Test implementation: remove the last item + const nodeToRemove = this.getTail().prev; + if (nodeToRemove && nodeToRemove.key) { + this.linkedList.removeNode(nodeToRemove); + this.map.delete(nodeToRemove.key); + this.size--; + } + } +} + +describe('BaseCache', () => { + let cache: TestCache; + + beforeEach(() => { + cache = new TestCache(3); + }); + + describe('constructor', () => { + it('should create an empty cache with specified capacity', () => { + const testCache = new TestCache(5); + expect(testCache.get('key1')).toBeUndefined(); + }); + + it('should initialize with correct capacity', () => { + const testCache = new TestCache(10); + // Access protected property through reflection or test methods + expect(testCache['capacity']).toBe(10); + }); + + it('should throw error for negative capacity', () => { + expect(() => new TestCache(-1)).toThrow( + 'Capacity must be greater than or equal to 0', + ); + }); + + it('should accept capacity of 0', () => { + expect(() => new TestCache(0)).not.toThrow(); + }); + + it('should not store items when capacity is 0', () => { + const zeroCache = new TestCache(0); + zeroCache.set('key1', 100); + zeroCache.set('key2', 200); + + expect(zeroCache.get('key1')).toBeUndefined(); + expect(zeroCache.get('key2')).toBeUndefined(); + expect(zeroCache['size']).toBe(0); + }); + }); + + describe('set and get', () => { + it('should store and retrieve values', () => { + cache.set('key1', 100); + cache.set('key2', 200); + + expect(cache.get('key1')).toBe(100); + expect(cache.get('key2')).toBe(200); + }); + + it('should return undefined for non-existent keys', () => { + expect(cache.get('nonexistent')).toBeUndefined(); + }); + + it('should handle Promise values', async () => { + const promise = Promise.resolve(300); + cache.set('key1', promise); + + const result = await cache.get('key1'); + expect(result).toBe(300); + }); + }); + + describe('capacity management', () => { + it('should evict items when capacity is exceeded', () => { + cache.set('key1', 100); + expect(cache['size']).toBe(1); + + cache.set('key2', 200); + expect(cache['size']).toBe(2); + + cache.set('key3', 300); + expect(cache['size']).toBe(3); + + cache.set('key4', 400); // Should evict key1 + expect(cache['size']).toBe(3); // Size should remain at capacity + + expect(cache.get('key1')).toBeUndefined(); + expect(cache.get('key2')).toBe(200); + expect(cache.get('key3')).toBe(300); + expect(cache.get('key4')).toBe(400); + }); + + it('should handle cache with capacity 1', () => { + const singleCache = new TestCache(1); + singleCache.set('key1', 100); + expect(singleCache['size']).toBe(1); + + singleCache.set('key2', 200); + expect(singleCache['size']).toBe(1); // Size should remain at capacity + + expect(singleCache.get('key1')).toBeUndefined(); + expect(singleCache.get('key2')).toBe(200); + }); + + it('should handle cache with capacity 0', () => { + const zeroCache = new TestCache(0); + zeroCache.set('key1', 100); + expect(zeroCache['size']).toBe(0); // Size should remain 0 + + expect(zeroCache.get('key1')).toBeUndefined(); + }); + }); + + describe('update behavior', () => { + it('should update existing items', () => { + cache.set('key1', 100); + cache.set('key1', 200); + + expect(cache.get('key1')).toBe(200); + }); + + it('should handle multiple updates', () => { + cache.set('key1', 100); + cache.set('key1', 200); + cache.set('key1', 300); + + expect(cache.get('key1')).toBe(300); + }); + }); + + describe('delete', () => { + it('should remove items from cache', () => { + cache.set('key1', 100); + cache.set('key2', 200); + + cache.delete('key1'); + + expect(cache.get('key1')).toBeUndefined(); + expect(cache.get('key2')).toBe(200); + }); + + it('should handle deleting non-existent keys', () => { + expect(() => cache.delete('nonexistent')).not.toThrow(); + }); + + it('should allow re-adding deleted items', () => { + cache.set('key1', 100); + cache.delete('key1'); + cache.set('key1', 200); + + expect(cache.get('key1')).toBe(200); + }); + + it('should handle deleting multiple items', () => { + cache.set('key1', 100); + cache.set('key2', 200); + cache.set('key3', 300); + + cache.delete('key1'); + cache.delete('key3'); + + expect(cache.get('key1')).toBeUndefined(); + expect(cache.get('key2')).toBe(200); + expect(cache.get('key3')).toBeUndefined(); + }); + }); + + describe('size management', () => { + it('should track size correctly', () => { + expect(cache['size']).toBe(0); + + cache.set('key1', 100); + expect(cache['size']).toBe(1); + + cache.set('key2', 200); + expect(cache['size']).toBe(2); + + cache.delete('key1'); + expect(cache['size']).toBe(1); + }); + + it('should not increase size when updating existing items', () => { + cache.set('key1', 100); + expect(cache['size']).toBe(1); + + cache.set('key1', 200); + expect(cache['size']).toBe(1); + }); + }); + + describe('edge cases', () => { + it('should handle multiple consecutive gets', () => { + cache.set('key1', 100); + cache.set('key2', 200); + + expect(cache.get('key1')).toBe(100); + expect(cache.get('key1')).toBe(100); + expect(cache.get('key2')).toBe(200); + expect(cache.get('key1')).toBe(100); + }); + + it('should handle setting same key multiple times', () => { + cache.set('key1', 100); + cache.set('key1', 200); + cache.set('key1', 300); + + expect(cache.get('key1')).toBe(300); + }); + + it('should handle rapid set/get operations', () => { + for (let i = 0; i < 10; i++) { + cache.set(`key${i}`, i); + expect(cache['size']).toBeLessThanOrEqual(3); // Size should never exceed capacity + } + + expect(cache['size']).toBe(3); // Final size should be at capacity + + // Should only have the last 3 items + expect(cache.get('key7')).toBe(7); + expect(cache.get('key8')).toBe(8); + expect(cache.get('key9')).toBe(9); + expect(cache.get('key0')).toBeUndefined(); + }); + }); + + describe('abstract methods', () => { + it('should call handleExistingItemAccess when updating existing items', () => { + cache.set('key1', 100); + + // Spy on the method to verify it's called + const spy = jest.spyOn(cache as any, 'handleExistingItemAccess'); + + cache.set('key1', 200); + + expect(spy).toHaveBeenCalledWith(expect.any(Node), 200); + }); + + it('should call evictItem when capacity is exceeded', () => { + cache.set('key1', 100); + cache.set('key2', 200); + cache.set('key3', 300); + + // Spy on the method to verify it's called + const spy = jest.spyOn(cache as any, 'evictItem'); + + cache.set('key4', 400); + + expect(spy).toHaveBeenCalled(); + }); + }); +}); diff --git a/test/unit/cache-strategy/utils/linked-list.spec.ts b/test/unit/cache-strategy/utils/linked-list.spec.ts new file mode 100644 index 0000000..6a0a66e --- /dev/null +++ b/test/unit/cache-strategy/utils/linked-list.spec.ts @@ -0,0 +1,284 @@ +import { + LinkedList, + Node, +} from '../../../../src/cache-strategy/utils/linked-list'; + +describe('LinkedList', () => { + let linkedList: LinkedList; + + beforeEach(() => { + linkedList = new LinkedList(); + }); + + describe('constructor', () => { + it('should create an empty linked list with head and tail sentinels', () => { + const head = linkedList.getHead(); + const tail = linkedList.getTail(); + + expect(head).toBeInstanceOf(Node); + expect(tail).toBeInstanceOf(Node); + expect(head.next).toBe(tail); + expect(tail.prev).toBe(head); + }); + }); + + describe('Node', () => { + it('should create a node with key and value', () => { + const node = new Node('key1', 100); + expect(node.key).toBe('key1'); + expect(node.value).toBe(100); + expect(node.next).toBeNull(); + expect(node.prev).toBeNull(); + }); + + it('should create a node with default values', () => { + const node = new Node(); + expect(node.key).toBeUndefined(); + expect(node.value).toBeUndefined(); + expect(node.next).toBeNull(); + expect(node.prev).toBeNull(); + }); + + it('should create a node with custom next and prev', () => { + const nextNode = new Node('key2', 200); + const prevNode = new Node('key0', 0); + const node = new Node('key1', 100, nextNode, prevNode); + + expect(node.key).toBe('key1'); + expect(node.value).toBe(100); + expect(node.next).toBe(nextNode); + expect(node.prev).toBe(prevNode); + }); + }); + + describe('addNode', () => { + it('should add a node to the front of the list and return the created node', () => { + const node1 = linkedList.addNode('key1', 100); + const node2 = linkedList.addNode('key2', 200); + + expect(node1).toBeInstanceOf(Node); + expect(node2).toBeInstanceOf(Node); + expect(node1.key).toBe('key1'); + expect(node1.value).toBe(100); + expect(node2.key).toBe('key2'); + expect(node2.value).toBe(200); + + const head = linkedList.getHead(); + expect(head.next).toBe(node2); + expect(node2.next).toBe(node1); + expect(node1.next).toBe(linkedList.getTail()); + }); + + it('should maintain correct prev pointers', () => { + const node1 = linkedList.addNode('key1', 100); + const node2 = linkedList.addNode('key2', 200); + + const head = linkedList.getHead(); + const tail = linkedList.getTail(); + + expect(node2.prev).toBe(head); + expect(node1.prev).toBe(node2); + expect(tail.prev).toBe(node1); + }); + + it('should handle adding single node', () => { + const node = linkedList.addNode('key1', 100); + + const head = linkedList.getHead(); + const tail = linkedList.getTail(); + + expect(head.next).toBe(node); + expect(node.prev).toBe(head); + expect(node.next).toBe(tail); + expect(tail.prev).toBe(node); + }); + + it('should handle undefined key and value', () => { + const node = linkedList.addNode(undefined, undefined); + + expect(node.key).toBeUndefined(); + expect(node.value).toBeUndefined(); + expect(node.prev).toBe(linkedList.getHead()); + expect(node.next).toBe(linkedList.getTail()); + }); + }); + + describe('removeNode', () => { + it('should remove a node from the middle of the list', () => { + const node1 = linkedList.addNode('key1', 100); + const node2 = linkedList.addNode('key2', 200); + const node3 = linkedList.addNode('key3', 300); + + linkedList.removeNode(node2); + + const head = linkedList.getHead(); + const tail = linkedList.getTail(); + + expect(head.next).toBe(node3); + expect(node3.next).toBe(node1); + expect(node1.next).toBe(tail); + expect(node3.prev).toBe(head); + expect(node1.prev).toBe(node3); + expect(tail.prev).toBe(node1); + + // Removed node should have null pointers + expect(node2.next).toBeNull(); + expect(node2.prev).toBeNull(); + }); + + it('should remove the first node', () => { + const node1 = linkedList.addNode('key1', 100); + const node2 = linkedList.addNode('key2', 200); + + linkedList.removeNode(node2); // node2 is first (most recently added) + + const head = linkedList.getHead(); + const tail = linkedList.getTail(); + + expect(head.next).toBe(node1); + expect(node1.next).toBe(tail); + expect(node1.prev).toBe(head); + expect(tail.prev).toBe(node1); + }); + + it('should remove the last node', () => { + const node1 = linkedList.addNode('key1', 100); + const node2 = linkedList.addNode('key2', 200); + + linkedList.removeNode(node1); // node1 is last (least recently added) + + const head = linkedList.getHead(); + const tail = linkedList.getTail(); + + expect(head.next).toBe(node2); + expect(node2.next).toBe(tail); + expect(node2.prev).toBe(head); + expect(tail.prev).toBe(node2); + }); + + it('should handle removing the only node', () => { + const node = linkedList.addNode('key1', 100); + + linkedList.removeNode(node); + + const head = linkedList.getHead(); + const tail = linkedList.getTail(); + + expect(head.next).toBe(tail); + expect(tail.prev).toBe(head); + expect(node.next).toBeNull(); + expect(node.prev).toBeNull(); + }); + + it('should handle removing a node that is not in the list', () => { + const node1 = linkedList.addNode('key1', 100); + const node2 = new Node('key2', 200); // Create node outside of list + + // Try to remove node2 which is not in the list + expect(() => linkedList.removeNode(node2)).not.toThrow(); + + const head = linkedList.getHead(); + const tail = linkedList.getTail(); + + expect(head.next).toBe(node1); + expect(node1.next).toBe(tail); + }); + }); + + describe('complex operations', () => { + it('should handle multiple add and remove operations', () => { + const node1 = linkedList.addNode('key1', 100); + const node2 = linkedList.addNode('key2', 200); + const node3 = linkedList.addNode('key3', 300); + + // Remove middle node + linkedList.removeNode(node2); + + // Add new node + const node4 = linkedList.addNode('key4', 400); + + // Remove first node + linkedList.removeNode(node4); + + const head = linkedList.getHead(); + const tail = linkedList.getTail(); + + expect(head.next).toBe(node3); + expect(node3.next).toBe(node1); + expect(node1.next).toBe(tail); + }); + + it('should maintain correct order after complex operations', () => { + const nodes = []; + for (let i = 0; i < 5; i++) { + nodes.push(linkedList.addNode(`key${i}`, i)); + } + + // Remove nodes 1 and 3 + linkedList.removeNode(nodes[1]); + linkedList.removeNode(nodes[3]); + + const head = linkedList.getHead(); + const tail = linkedList.getTail(); + + // Order should be: head -> node4 -> node2 -> node0 -> tail + expect(head.next).toBe(nodes[4]); + expect(nodes[4].next).toBe(nodes[2]); + expect(nodes[2].next).toBe(nodes[0]); + expect(nodes[0].next).toBe(tail); + }); + }); + + describe('edge cases', () => { + it('should handle removing a node multiple times', () => { + const node = linkedList.addNode('key1', 100); + + linkedList.removeNode(node); + expect(() => linkedList.removeNode(node)).not.toThrow(); + }); + + it('should handle rapid add and remove operations', () => { + const nodes = []; + for (let i = 0; i < 10; i++) { + nodes.push(linkedList.addNode(`key${i}`, i)); + } + + // Remove all nodes + nodes.forEach((node) => linkedList.removeNode(node)); + + const head = linkedList.getHead(); + const tail = linkedList.getTail(); + + expect(head.next).toBe(tail); + expect(tail.prev).toBe(head); + }); + + it('should handle adding nodes with null/undefined values', () => { + const node1 = linkedList.addNode('key1', null as any); + const node2 = linkedList.addNode('key2', undefined); + + expect(node1.value).toBeNull(); + expect(node2.value).toBeUndefined(); + }); + }); + + describe('getHead and getTail', () => { + it('should return the same head and tail instances', () => { + const head1 = linkedList.getHead(); + const head2 = linkedList.getHead(); + const tail1 = linkedList.getTail(); + const tail2 = linkedList.getTail(); + + expect(head1).toBe(head2); + expect(tail1).toBe(tail2); + }); + + it('should maintain head and tail relationship', () => { + const head = linkedList.getHead(); + const tail = linkedList.getTail(); + + expect(head.next).toBe(tail); + expect(tail.prev).toBe(head); + }); + }); +}); From cb0798179dc6e017cc8ad645f24c050e90e7889b Mon Sep 17 00:00:00 2001 From: Daniel Carvalho Date: Sun, 13 Jul 2025 12:44:35 -0300 Subject: [PATCH 2/6] feat: add fifo cache strategy --- .../implementations/fifo-cache.ts | 33 +++ .../implementations/fifo-cache.spec.ts | 234 ++++++++++++++++++ 2 files changed, 267 insertions(+) create mode 100644 src/cache-strategy/implementations/fifo-cache.ts create mode 100644 test/unit/cache-strategy/implementations/fifo-cache.spec.ts diff --git a/src/cache-strategy/implementations/fifo-cache.ts b/src/cache-strategy/implementations/fifo-cache.ts new file mode 100644 index 0000000..a0b7c3e --- /dev/null +++ b/src/cache-strategy/implementations/fifo-cache.ts @@ -0,0 +1,33 @@ +import { BaseCache } from '../core/base-cache'; +import { Node } from '../utils/linked-list'; + +export class FifoCache extends BaseCache< + TResponse, + TKey +> { + get(key: TKey): TResponse | Promise | undefined { + const item = this.map.get(key); + if (item) { + return item.value; + } + return undefined; + } + + protected handleExistingItemAccess( + item: Node, + value: TResponse | Promise, + ): void { + item.value = value; + } + + protected evictItem(): void { + const nodeToRemove = this.getTail().prev; + if (nodeToRemove) { + this.linkedList.removeNode(nodeToRemove); + if (nodeToRemove.key) { + this.map.delete(nodeToRemove.key); + } + } + this.size--; + } +} diff --git a/test/unit/cache-strategy/implementations/fifo-cache.spec.ts b/test/unit/cache-strategy/implementations/fifo-cache.spec.ts new file mode 100644 index 0000000..2941c91 --- /dev/null +++ b/test/unit/cache-strategy/implementations/fifo-cache.spec.ts @@ -0,0 +1,234 @@ +import { FifoCache } from '../../../../src/cache-strategy/implementations/fifo-cache'; + +describe('FifoCache', () => { + let cache: FifoCache; + + beforeEach(() => { + cache = new FifoCache(3); + }); + + describe('constructor', () => { + it('should create an empty cache with specified capacity', () => { + const testCache = new FifoCache(5); + expect(testCache.get('key1')).toBeUndefined(); + }); + + it('should throw error for negative capacity', () => { + expect(() => new FifoCache(-1)).toThrow( + 'Capacity must be greater than or equal to 0', + ); + }); + + it('should accept capacity of 0', () => { + expect(() => new FifoCache(0)).not.toThrow(); + }); + + it('should not store items when capacity is 0', () => { + const zeroCache = new FifoCache(0); + zeroCache.set('key1', 100); + zeroCache.set('key2', 200); + + expect(zeroCache.get('key1')).toBeUndefined(); + expect(zeroCache.get('key2')).toBeUndefined(); + expect(zeroCache['size']).toBe(0); + }); + }); + + describe('set and get', () => { + it('should store and retrieve values', () => { + cache.set('key1', 100); + cache.set('key2', 200); + + expect(cache.get('key1')).toBe(100); + expect(cache.get('key2')).toBe(200); + }); + + it('should return undefined for non-existent keys', () => { + expect(cache.get('nonexistent')).toBeUndefined(); + }); + + it('should handle Promise values', async () => { + const promise = Promise.resolve(300); + cache.set('key1', promise); + + const result = await cache.get('key1'); + expect(result).toBe(300); + }); + }); + + describe('FIFO behavior', () => { + it('should evict first in item when capacity is exceeded', () => { + cache.set('key1', 100); + expect(cache['size']).toBe(1); + + cache.set('key2', 200); + expect(cache['size']).toBe(2); + + cache.set('key3', 300); + expect(cache['size']).toBe(3); + + cache.set('key4', 400); // This should evict key1 (first in) + expect(cache['size']).toBe(3); // Size should remain at capacity + + expect(cache.get('key1')).toBeUndefined(); // Should be evicted + expect(cache.get('key2')).toBe(200); + expect(cache.get('key3')).toBe(300); + expect(cache.get('key4')).toBe(400); + }); + + it('should not move accessed items to front', () => { + cache.set('key1', 100); + cache.set('key2', 200); + cache.set('key3', 300); + + // Access key1, but it should not move to front + cache.get('key1'); + + // Add new item, should evict key1 (first in, regardless of access) + cache.set('key4', 400); + + expect(cache.get('key1')).toBeUndefined(); // Should be evicted + expect(cache.get('key2')).toBe(200); + expect(cache.get('key3')).toBe(300); + expect(cache.get('key4')).toBe(400); + }); + + it('should update existing items without changing position', () => { + cache.set('key1', 100); + cache.set('key2', 200); + cache.set('key3', 300); + + // Update key1, should not change its position + cache.set('key1', 150); + + // Add new item, should evict key1 (first in) + cache.set('key4', 400); + + expect(cache.get('key1')).toBeUndefined(); // Should be evicted + expect(cache.get('key2')).toBe(200); + expect(cache.get('key3')).toBe(300); + expect(cache.get('key4')).toBe(400); + }); + }); + + describe('delete', () => { + it('should remove items from cache', () => { + cache.set('key1', 100); + cache.set('key2', 200); + + cache.delete('key1'); + + expect(cache.get('key1')).toBeUndefined(); + expect(cache.get('key2')).toBe(200); + }); + + it('should handle deleting non-existent keys', () => { + expect(() => cache.delete('nonexistent')).not.toThrow(); + }); + + it('should allow re-adding deleted items', () => { + cache.set('key1', 100); + cache.delete('key1'); + cache.set('key1', 200); + + expect(cache.get('key1')).toBe(200); + }); + }); + + describe('edge cases', () => { + it('should handle cache with capacity 1', () => { + const singleCache = new FifoCache(1); + singleCache.set('key1', 100); + expect(singleCache['size']).toBe(1); + + singleCache.set('key2', 200); + expect(singleCache['size']).toBe(1); // Size should remain at capacity + + expect(singleCache.get('key1')).toBeUndefined(); + expect(singleCache.get('key2')).toBe(200); + }); + + it('should handle cache with capacity 0', () => { + const zeroCache = new FifoCache(0); + zeroCache.set('key1', 100); + expect(zeroCache['size']).toBe(0); // Size should remain 0 + + expect(zeroCache.get('key1')).toBeUndefined(); + }); + + it('should handle multiple consecutive gets', () => { + cache.set('key1', 100); + cache.set('key2', 200); + + expect(cache.get('key1')).toBe(100); + expect(cache.get('key1')).toBe(100); + expect(cache.get('key2')).toBe(200); + expect(cache.get('key1')).toBe(100); + }); + + it('should handle setting same key multiple times', () => { + cache.set('key1', 100); + cache.set('key1', 200); + cache.set('key1', 300); + + expect(cache.get('key1')).toBe(300); + }); + }); + + describe('complex scenarios', () => { + it('should maintain FIFO order after complex operations', () => { + cache.set('key1', 100); + cache.set('key2', 200); + cache.set('key3', 300); + + // Access key2 and key1, but they should not move + cache.get('key2'); + cache.get('key1'); + + // Add new item, should evict key1 (first in) + cache.set('key4', 400); + + expect(cache.get('key1')).toBeUndefined(); // Should be evicted + expect(cache.get('key2')).toBe(200); + expect(cache.get('key3')).toBe(300); + expect(cache.get('key4')).toBe(400); + }); + + it('should handle rapid set/get operations', () => { + for (let i = 0; i < 10; i++) { + cache.set(`key${i}`, i); + expect(cache['size']).toBeLessThanOrEqual(3); // Size should never exceed capacity + } + + expect(cache['size']).toBe(3); // Final size should be at capacity + + // In FIFO, the first items are evicted first + // So the last 3 items (key7, key8, key9) should remain + expect(cache.get('key7')).toBe(7); + expect(cache.get('key8')).toBe(8); + expect(cache.get('key9')).toBe(9); + expect(cache.get('key0')).toBeUndefined(); + }); + + it('should maintain strict FIFO order', () => { + cache.set('key1', 100); + cache.set('key2', 200); + cache.set('key3', 300); + + // Access all items multiple times + cache.get('key3'); + cache.get('key2'); + cache.get('key1'); + cache.get('key3'); + cache.get('key2'); + + // Add new item, should evict key1 (first in, regardless of access) + cache.set('key4', 400); + + expect(cache.get('key1')).toBeUndefined(); // Should be evicted + expect(cache.get('key2')).toBe(200); + expect(cache.get('key3')).toBe(300); + expect(cache.get('key4')).toBe(400); + }); + }); +}); From 369bb4a96e7968fc1fa20b161a92bf9a329603ed Mon Sep 17 00:00:00 2001 From: Daniel Carvalho Date: Sun, 13 Jul 2025 12:44:54 -0300 Subject: [PATCH 3/6] feat: add LRU cache strategy --- .../implementations/lru-cache.ts | 37 +++ .../implementations/lru-cache.spec.ts | 214 ++++++++++++++++++ 2 files changed, 251 insertions(+) create mode 100644 src/cache-strategy/implementations/lru-cache.ts create mode 100644 test/unit/cache-strategy/implementations/lru-cache.spec.ts diff --git a/src/cache-strategy/implementations/lru-cache.ts b/src/cache-strategy/implementations/lru-cache.ts new file mode 100644 index 0000000..6be4e65 --- /dev/null +++ b/src/cache-strategy/implementations/lru-cache.ts @@ -0,0 +1,37 @@ +import { BaseCache } from '../core/base-cache'; +import { Node } from '../utils/linked-list'; + +export class LruCache extends BaseCache< + TResponse, + TKey +> { + get(key: TKey): TResponse | Promise | undefined { + const item = this.map.get(key); + if (item) { + this.linkedList.removeNode(item); + this.linkedList.addNode(item.key, item.value); + return item.value; + } + return undefined; + } + + protected handleExistingItemAccess( + item: Node, + value: TResponse | Promise, + ): void { + this.linkedList.removeNode(item); + item.value = value; + this.linkedList.addNode(item.key, item.value); + } + + protected evictItem(): void { + const nodeToRemove = this.getTail().prev; + if (nodeToRemove) { + this.linkedList.removeNode(nodeToRemove); + if (nodeToRemove.key) { + this.map.delete(nodeToRemove.key); + } + } + this.size--; + } +} diff --git a/test/unit/cache-strategy/implementations/lru-cache.spec.ts b/test/unit/cache-strategy/implementations/lru-cache.spec.ts new file mode 100644 index 0000000..a3e7af3 --- /dev/null +++ b/test/unit/cache-strategy/implementations/lru-cache.spec.ts @@ -0,0 +1,214 @@ +import { LruCache } from '../../../../src/cache-strategy/implementations/lru-cache'; + +describe('LruCache', () => { + let cache: LruCache; + + beforeEach(() => { + cache = new LruCache(3); + }); + + describe('constructor', () => { + it('should create an empty cache with specified capacity', () => { + const testCache = new LruCache(5); + expect(testCache.get('key1')).toBeUndefined(); + }); + + it('should throw error for negative capacity', () => { + expect(() => new LruCache(-1)).toThrow( + 'Capacity must be greater than or equal to 0', + ); + }); + + it('should accept capacity of 0', () => { + expect(() => new LruCache(0)).not.toThrow(); + }); + + it('should not store items when capacity is 0', () => { + const zeroCache = new LruCache(0); + zeroCache.set('key1', 100); + zeroCache.set('key2', 200); + + expect(zeroCache.get('key1')).toBeUndefined(); + expect(zeroCache.get('key2')).toBeUndefined(); + expect(zeroCache['size']).toBe(0); + }); + }); + + describe('set and get', () => { + it('should store and retrieve values', () => { + cache.set('key1', 100); + cache.set('key2', 200); + + expect(cache.get('key1')).toBe(100); + expect(cache.get('key2')).toBe(200); + }); + + it('should return undefined for non-existent keys', () => { + expect(cache.get('nonexistent')).toBeUndefined(); + }); + + it('should handle Promise values', async () => { + const promise = Promise.resolve(300); + cache.set('key1', promise); + + const result = await cache.get('key1'); + expect(result).toBe(300); + }); + }); + + describe('LRU behavior', () => { + it('should evict least recently used item when capacity is exceeded', () => { + cache.set('key1', 100); + expect(cache['size']).toBe(1); + + cache.set('key2', 200); + expect(cache['size']).toBe(2); + + cache.set('key3', 300); + expect(cache['size']).toBe(3); + + cache.set('key4', 400); // This should evict key1 + expect(cache['size']).toBe(3); // Size should remain at capacity + + expect(cache.get('key1')).toBeUndefined(); + expect(cache.get('key2')).toBe(200); + expect(cache.get('key3')).toBe(300); + expect(cache.get('key4')).toBe(400); + }); + + it('should move accessed items to front (most recently used)', () => { + cache.set('key1', 100); + cache.set('key2', 200); + cache.set('key3', 300); + expect(cache['size']).toBe(3); + + // Access key1, making it most recently used + cache.get('key1'); + + // Add new item, should evict key2 (least recently used) + cache.set('key4', 400); + expect(cache['size']).toBe(3); // Size should remain at capacity + + expect(cache.get('key1')).toBe(100); // Should still exist + expect(cache.get('key2')).toBeUndefined(); // Should be evicted + expect(cache.get('key3')).toBe(300); + expect(cache.get('key4')).toBe(400); + }); + + it('should update existing items and move them to front', () => { + cache.set('key1', 100); + cache.set('key2', 200); + cache.set('key3', 300); + + // Update key1, should move it to front + cache.set('key1', 150); + + // Add new item, should evict key2 (least recently used) + cache.set('key4', 400); + + expect(cache.get('key1')).toBe(150); + expect(cache.get('key2')).toBeUndefined(); + expect(cache.get('key3')).toBe(300); + expect(cache.get('key4')).toBe(400); + }); + }); + + describe('delete', () => { + it('should remove items from cache', () => { + cache.set('key1', 100); + cache.set('key2', 200); + + cache.delete('key1'); + + expect(cache.get('key1')).toBeUndefined(); + expect(cache.get('key2')).toBe(200); + }); + + it('should handle deleting non-existent keys', () => { + expect(() => cache.delete('nonexistent')).not.toThrow(); + }); + + it('should allow re-adding deleted items', () => { + cache.set('key1', 100); + cache.delete('key1'); + cache.set('key1', 200); + + expect(cache.get('key1')).toBe(200); + }); + }); + + describe('edge cases', () => { + it('should handle cache with capacity 1', () => { + const singleCache = new LruCache(1); + singleCache.set('key1', 100); + expect(singleCache['size']).toBe(1); + + singleCache.set('key2', 200); + expect(singleCache['size']).toBe(1); // Size should remain at capacity + + expect(singleCache.get('key1')).toBeUndefined(); + expect(singleCache.get('key2')).toBe(200); + }); + + it('should handle cache with capacity 0', () => { + const zeroCache = new LruCache(0); + zeroCache.set('key1', 100); + expect(zeroCache['size']).toBe(0); // Size should remain 0 + + expect(zeroCache.get('key1')).toBeUndefined(); + }); + + it('should handle multiple consecutive gets', () => { + cache.set('key1', 100); + cache.set('key2', 200); + + expect(cache.get('key1')).toBe(100); + expect(cache.get('key1')).toBe(100); + expect(cache.get('key2')).toBe(200); + expect(cache.get('key1')).toBe(100); + }); + + it('should handle setting same key multiple times', () => { + cache.set('key1', 100); + cache.set('key1', 200); + cache.set('key1', 300); + + expect(cache.get('key1')).toBe(300); + }); + }); + + describe('complex scenarios', () => { + it('should maintain correct order after complex operations', () => { + cache.set('key1', 100); + cache.set('key2', 200); + cache.set('key3', 300); + + // Access key2, then key1 + cache.get('key2'); + cache.get('key1'); + + // Add new item, should evict key3 (least recently used) + cache.set('key4', 400); + + expect(cache.get('key1')).toBe(100); + expect(cache.get('key2')).toBe(200); + expect(cache.get('key3')).toBeUndefined(); + expect(cache.get('key4')).toBe(400); + }); + + it('should handle rapid set/get operations', () => { + for (let i = 0; i < 10; i++) { + cache.set(`key${i}`, i); + expect(cache['size']).toBeLessThanOrEqual(3); // Size should never exceed capacity + } + + expect(cache['size']).toBe(3); // Final size should be at capacity + + // Should only have the last 3 items + expect(cache.get('key7')).toBe(7); + expect(cache.get('key8')).toBe(8); + expect(cache.get('key9')).toBe(9); + expect(cache.get('key0')).toBeUndefined(); + }); + }); +}); From 38d0423675af93525dcef4b9a8d8549618839396 Mon Sep 17 00:00:00 2001 From: Daniel Carvalho Date: Sun, 13 Jul 2025 12:45:16 -0300 Subject: [PATCH 4/6] feat: add MRU cache strategy --- .../implementations/mru-cache.ts | 37 +++ .../implementations/mru-cache.spec.ts | 230 ++++++++++++++++++ 2 files changed, 267 insertions(+) create mode 100644 src/cache-strategy/implementations/mru-cache.ts create mode 100644 test/unit/cache-strategy/implementations/mru-cache.spec.ts diff --git a/src/cache-strategy/implementations/mru-cache.ts b/src/cache-strategy/implementations/mru-cache.ts new file mode 100644 index 0000000..50e5f9d --- /dev/null +++ b/src/cache-strategy/implementations/mru-cache.ts @@ -0,0 +1,37 @@ +import { BaseCache } from '../core/base-cache'; +import { Node } from '../utils/linked-list'; + +export class MruCache extends BaseCache< + TResponse, + TKey +> { + get(key: TKey): TResponse | Promise | undefined { + const item = this.map.get(key); + if (item) { + this.linkedList.removeNode(item); + this.linkedList.addNode(item.key, item.value); + return item.value; + } + return undefined; + } + + protected handleExistingItemAccess( + item: Node, + value: TResponse | Promise, + ): void { + this.linkedList.removeNode(item); + item.value = value; + this.linkedList.addNode(item.key, item.value); + } + + protected evictItem(): void { + const nodeToRemove = this.getHead().next; + if (nodeToRemove) { + this.linkedList.removeNode(nodeToRemove); + if (nodeToRemove.key) { + this.map.delete(nodeToRemove.key); + } + } + this.size--; + } +} diff --git a/test/unit/cache-strategy/implementations/mru-cache.spec.ts b/test/unit/cache-strategy/implementations/mru-cache.spec.ts new file mode 100644 index 0000000..76d829b --- /dev/null +++ b/test/unit/cache-strategy/implementations/mru-cache.spec.ts @@ -0,0 +1,230 @@ +import { MruCache } from '../../../../src/cache-strategy/implementations/mru-cache'; + +describe('MruCache', () => { + let cache: MruCache; + + beforeEach(() => { + cache = new MruCache(3); + }); + + describe('constructor', () => { + it('should create an empty cache with specified capacity', () => { + const testCache = new MruCache(5); + expect(testCache.get('key1')).toBeUndefined(); + }); + + it('should throw error for negative capacity', () => { + expect(() => new MruCache(-1)).toThrow( + 'Capacity must be greater than or equal to 0', + ); + }); + + it('should accept capacity of 0', () => { + expect(() => new MruCache(0)).not.toThrow(); + }); + + it('should not store items when capacity is 0', () => { + const zeroCache = new MruCache(0); + zeroCache.set('key1', 100); + zeroCache.set('key2', 200); + + expect(zeroCache.get('key1')).toBeUndefined(); + expect(zeroCache.get('key2')).toBeUndefined(); + expect(zeroCache['size']).toBe(0); + }); + }); + + describe('set and get', () => { + it('should store and retrieve values', () => { + cache.set('key1', 100); + cache.set('key2', 200); + + expect(cache.get('key1')).toBe(100); + expect(cache.get('key2')).toBe(200); + }); + + it('should return undefined for non-existent keys', () => { + expect(cache.get('nonexistent')).toBeUndefined(); + }); + + it('should handle Promise values', async () => { + const promise = Promise.resolve(300); + cache.set('key1', promise); + + const result = await cache.get('key1'); + expect(result).toBe(300); + }); + }); + + describe('MRU behavior', () => { + it('should evict most recently used item when capacity is exceeded', () => { + cache.set('key1', 100); + expect(cache['size']).toBe(1); + + cache.set('key2', 200); + expect(cache['size']).toBe(2); + + cache.set('key3', 300); + expect(cache['size']).toBe(3); + + cache.set('key4', 400); // This should evict key3 (most recently used) + expect(cache['size']).toBe(3); // Size should remain at capacity + + expect(cache.get('key1')).toBe(100); + expect(cache.get('key2')).toBe(200); + expect(cache.get('key3')).toBeUndefined(); // Should be evicted + expect(cache.get('key4')).toBe(400); + }); + + it('should move accessed items to front (most recently used)', () => { + cache.set('key1', 100); + cache.set('key2', 200); + cache.set('key3', 300); + + // Access key1, making it most recently used + cache.get('key1'); + + // Add new item, should evict key1 (most recently used) + cache.set('key4', 400); + + expect(cache.get('key1')).toBeUndefined(); // Should be evicted + expect(cache.get('key2')).toBe(200); + expect(cache.get('key3')).toBe(300); + expect(cache.get('key4')).toBe(400); + }); + + it('should update existing items and move them to front', () => { + cache.set('key1', 100); + cache.set('key2', 200); + cache.set('key3', 300); + + // Update key1, should move it to front + cache.set('key1', 150); + + // Add new item, should evict key1 (most recently used) + cache.set('key4', 400); + + expect(cache.get('key1')).toBeUndefined(); // Should be evicted + expect(cache.get('key2')).toBe(200); + expect(cache.get('key3')).toBe(300); + expect(cache.get('key4')).toBe(400); + }); + }); + + describe('delete', () => { + it('should remove items from cache', () => { + cache.set('key1', 100); + cache.set('key2', 200); + + cache.delete('key1'); + + expect(cache.get('key1')).toBeUndefined(); + expect(cache.get('key2')).toBe(200); + }); + + it('should handle deleting non-existent keys', () => { + expect(() => cache.delete('nonexistent')).not.toThrow(); + }); + + it('should allow re-adding deleted items', () => { + cache.set('key1', 100); + cache.delete('key1'); + cache.set('key1', 200); + + expect(cache.get('key1')).toBe(200); + }); + }); + + describe('edge cases', () => { + it('should handle cache with capacity 1', () => { + const singleCache = new MruCache(1); + singleCache.set('key1', 100); + expect(singleCache['size']).toBe(1); + + singleCache.set('key2', 200); + expect(singleCache['size']).toBe(1); // Size should remain at capacity + + expect(singleCache.get('key1')).toBeUndefined(); + expect(singleCache.get('key2')).toBe(200); + }); + + it('should handle cache with capacity 0', () => { + const zeroCache = new MruCache(0); + zeroCache.set('key1', 100); + expect(zeroCache['size']).toBe(0); // Size should remain 0 + + expect(zeroCache.get('key1')).toBeUndefined(); + }); + + it('should handle multiple consecutive gets', () => { + cache.set('key1', 100); + cache.set('key2', 200); + + expect(cache.get('key1')).toBe(100); + expect(cache.get('key1')).toBe(100); + expect(cache.get('key2')).toBe(200); + expect(cache.get('key1')).toBe(100); + }); + + it('should handle setting same key multiple times', () => { + cache.set('key1', 100); + cache.set('key1', 200); + cache.set('key1', 300); + + expect(cache.get('key1')).toBe(300); + }); + }); + + describe('complex scenarios', () => { + it('should maintain correct order after complex operations', () => { + cache.set('key1', 100); + cache.set('key2', 200); + cache.set('key3', 300); + + // Access key2, then key1 + cache.get('key2'); + cache.get('key1'); + + // Add new item, should evict key1 (most recently used) + cache.set('key4', 400); + + expect(cache.get('key1')).toBeUndefined(); // Should be evicted + expect(cache.get('key2')).toBe(200); + expect(cache.get('key3')).toBe(300); + expect(cache.get('key4')).toBe(400); + }); + + it('should handle rapid set/get operations', () => { + for (let i = 0; i < 10; i++) { + cache.set(`key${i}`, i); + expect(cache['size']).toBeLessThanOrEqual(3); // Size should never exceed capacity + } + + expect(cache['size']).toBe(3); // Final size should be at capacity + + // In MRU, the most recently used items are evicted first + // So the oldest items (key0, key1) should remain + expect(cache.get('key0')).toBe(0); + expect(cache.get('key1')).toBe(1); + expect(cache.get('key2')).toBeUndefined(); + expect(cache.get('key9')).toBe(9); + }); + + it('should evict most recently accessed item', () => { + cache.set('key1', 100); + cache.set('key2', 200); + cache.set('key3', 300); + + // Access key2, making it most recently used + cache.get('key2'); + + // Add new item, should evict key2 (most recently used) + cache.set('key4', 400); + + expect(cache.get('key1')).toBe(100); + expect(cache.get('key2')).toBeUndefined(); // Should be evicted + expect(cache.get('key3')).toBe(300); + expect(cache.get('key4')).toBe(400); + }); + }); +}); From 799426e74a1704308cdbcb3956cdd0287df13bb7 Mon Sep 17 00:00:00 2001 From: Daniel Carvalho Date: Sun, 13 Jul 2025 13:07:56 -0300 Subject: [PATCH 5/6] feat: support configurable evictionPolicy to select cache strategy --- package.json | 3 +- src/cache-strategy/cache-factory.ts | 23 ++ src/cache-strategy/core/cache-factory.ts | 26 +++ src/cache-strategy/implementations/index.ts | 4 + .../implementations/simple-cache.ts | 26 +++ src/cache-strategy/index.ts | 2 + src/remembered-config.ts | 21 +- src/remembered.ts | 33 +-- .../cache-strategy/core/cache-factory.spec.ts | 203 +++++++++++++++++ .../implementations/simple-cache.spec.ts | 215 ++++++++++++++++++ test/unit/index.spec.ts | 186 ++++++++++++++- 11 files changed, 723 insertions(+), 19 deletions(-) create mode 100644 src/cache-strategy/cache-factory.ts create mode 100644 src/cache-strategy/core/cache-factory.ts create mode 100644 src/cache-strategy/implementations/index.ts create mode 100644 src/cache-strategy/implementations/simple-cache.ts create mode 100644 src/cache-strategy/index.ts create mode 100644 test/unit/cache-strategy/core/cache-factory.spec.ts create mode 100644 test/unit/cache-strategy/implementations/simple-cache.spec.ts diff --git a/package.json b/package.json index da87ba4..ccd5a3d 100644 --- a/package.json +++ b/package.json @@ -17,10 +17,11 @@ "lint": "npm run lint:format && npm run lint:style", "lint:fix": "npm run lint:format:fix && npm run lint:style:fix", "build": "tsc -p tsconfig.build.json", - "test": "jest test/unit", + "test": "jest test/unit --runInBand --forceExit", "test:watch": "jest test/unit --watch", "test:coverage": "jest test/unit --coverage", "test:debug": "node --inspect-brk -r tsconfig-paths/register -r ts-node/register node_modules/.bin/jest --runInBand", + "test:only": "jest --runInBand $(grep -rnwl ./test -e \"test.only\\|it.only\\|describe.only\" --include \\*.ts | tr '\n' ' ') --forceExit", "test:e2e": "jest test/e2e", "clear": "npm run clear:build && npm run clear:modules", "clear:build": "del-cli ./dist", diff --git a/src/cache-strategy/cache-factory.ts b/src/cache-strategy/cache-factory.ts new file mode 100644 index 0000000..2ef7c75 --- /dev/null +++ b/src/cache-strategy/cache-factory.ts @@ -0,0 +1,23 @@ +import { + EvictionRememberedConfig, + RememberedConfig, +} from '../remembered-config'; +import { FifoCache, LruCache, MruCache, SimpleCache } from './implementations'; + +export function createCache( + config: RememberedConfig, +) { + switch (config.evictionPolicy) { + case 'LRU': + const lruConfig = config as EvictionRememberedConfig; + return new LruCache(lruConfig.capacity); + case 'FIFO': + const fifoConfig = config as EvictionRememberedConfig; + return new FifoCache(fifoConfig.capacity); + case 'MRU': + const mruConfig = config as EvictionRememberedConfig; + return new MruCache(mruConfig.capacity); + default: + return new SimpleCache(); + } +} diff --git a/src/cache-strategy/core/cache-factory.ts b/src/cache-strategy/core/cache-factory.ts new file mode 100644 index 0000000..7a99302 --- /dev/null +++ b/src/cache-strategy/core/cache-factory.ts @@ -0,0 +1,26 @@ +import { + EvictionRememberedConfig, + RememberedConfig, +} from '../../remembered-config'; +import { FifoCache } from '../implementations/fifo-cache'; +import { LruCache } from '../implementations/lru-cache'; +import { MruCache } from '../implementations/mru-cache'; +import { SimpleCache } from '../implementations/simple-cache'; + +export function createCache( + config: RememberedConfig, +) { + switch (config.evictionPolicy) { + case 'LRU': + const lruConfig = config as EvictionRememberedConfig; + return new LruCache(lruConfig.capacity); + case 'FIFO': + const fifoConfig = config as EvictionRememberedConfig; + return new FifoCache(fifoConfig.capacity); + case 'MRU': + const mruConfig = config as EvictionRememberedConfig; + return new MruCache(mruConfig.capacity); + default: + return new SimpleCache(); + } +} diff --git a/src/cache-strategy/implementations/index.ts b/src/cache-strategy/implementations/index.ts new file mode 100644 index 0000000..42998a3 --- /dev/null +++ b/src/cache-strategy/implementations/index.ts @@ -0,0 +1,4 @@ +export { SimpleCache } from './simple-cache'; +export { FifoCache } from './fifo-cache'; +export { LruCache } from './lru-cache'; +export { MruCache } from './mru-cache'; diff --git a/src/cache-strategy/implementations/simple-cache.ts b/src/cache-strategy/implementations/simple-cache.ts new file mode 100644 index 0000000..16f29cd --- /dev/null +++ b/src/cache-strategy/implementations/simple-cache.ts @@ -0,0 +1,26 @@ +import { Cache } from '../core/cache'; + +export class SimpleCache + implements Cache +{ + private map: Map>; + + constructor() { + this.map = new Map>(); + } + + get(key: TKey): TResponse | Promise | undefined { + return this.map.get(key); + } + + set(key: TKey, value: TResponse | Promise) { + this.map.set(key, value); + } + + delete(key: TKey) { + const item = this.map.get(key); + if (item) { + this.map.delete(key); + } + } +} diff --git a/src/cache-strategy/index.ts b/src/cache-strategy/index.ts new file mode 100644 index 0000000..9568562 --- /dev/null +++ b/src/cache-strategy/index.ts @@ -0,0 +1,2 @@ +export { Cache } from './core/cache'; +export { createCache } from './core/cache-factory'; diff --git a/src/remembered-config.ts b/src/remembered-config.ts index bebb5d6..43ae7fe 100644 --- a/src/remembered-config.ts +++ b/src/remembered-config.ts @@ -7,11 +7,30 @@ export type Ttl = | number | TtlFunction; -export interface RememberedConfig { +export type EvictionPolicy = 'LRU' | 'FIFO' | 'MRU'; + +export interface BaseRememberedConfig { ttl: Ttl; /** * Always keep a persistent last result for the cache when there is one, so the cache can be updated in the background */ nonBlocking?: boolean; onReused?: (key: string) => void; + evictionPolicy?: EvictionPolicy; } + +export interface EvictionRememberedConfig + extends BaseRememberedConfig { + evictionPolicy: 'LRU' | 'FIFO' | 'MRU'; + /** + * Maximum number of items to store in the cache + */ + capacity: number; +} + +export interface DefaultRememberedConfig + extends BaseRememberedConfig {} + +export type RememberedConfig = + | EvictionRememberedConfig + | DefaultRememberedConfig; diff --git a/src/remembered.ts b/src/remembered.ts index be1b5d1..12727f9 100644 --- a/src/remembered.ts +++ b/src/remembered.ts @@ -1,3 +1,4 @@ +import { createCache } from './cache-strategy/core/cache-factory'; import { dontWait } from './dont-wait'; import { Pacer } from './pacer'; import { RememberedConfig } from './remembered-config'; @@ -9,8 +10,8 @@ const defaultConfig = { ttl: 0 }; * A class that help you remember previous calls for you functions, to avoid new calls while it is not forgotten */ export class Remembered { - private map = new Map>(); - private nonBlockingMap = new Map(); + private cache; + private nonBlockingCache; private pacer: Pacer | undefined; private removeImmediately: boolean; private onReused?: (...args: any[]) => void; @@ -18,9 +19,11 @@ export class Remembered { constructor( private config: RememberedConfig = defaultConfig, ) { + this.cache = createCache(config); + this.nonBlockingCache = createCache(config); this.removeImmediately = !config.ttl; this.onReused = config.onReused; - this.pacer = new Pacer(config, (key: TKey) => this.map.delete(key)); + this.pacer = new Pacer(config, (key: TKey) => this.cache.delete(key)); } /** @@ -38,10 +41,10 @@ export class Remembered { ttl?: number, ): Promise { if (this.config.nonBlocking) { - if (this.nonBlockingMap.has(key)) { + const item = this.nonBlockingCache.get(key); + if (item !== undefined) { dontWait(() => this.blockingGet(key, callback, noCacheIf, ttl)); - - return this.nonBlockingMap.get(key) as R; + return item as R; } } @@ -59,7 +62,7 @@ export class Remembered { } dontWait(() => this.blockingGet(key, callback, noCacheIf, ttl)); - return this.nonBlockingMap.get(key) as R | undefined; + return this.nonBlockingCache.get(key) as R | undefined; } blockingGet( @@ -68,13 +71,13 @@ export class Remembered { noCacheIf?: (result: R) => boolean, ttl?: number, ): Promise { - const cached = this.map.get(key); - if (cached) { + const cached = this.cache.get(key); + if (cached !== undefined) { this.onReused?.(key); return cached as Promise; } const value = this.loadValue(key, callback, noCacheIf, ttl); - this.map.set(key, value); + this.cache.set(key, value); return value; } @@ -96,7 +99,7 @@ export class Remembered { } clearCache(key: TKey): void | Promise { - this.map.delete(key); + this.cache.delete(key); } private async loadValue( @@ -109,17 +112,17 @@ export class Remembered { try { result = await load(); if (noCacheIf?.(result)) { - this.map.delete(key); + this.cache.delete(key); } else if (this.config.nonBlocking) { - this.nonBlockingMap.set(key, result); + this.nonBlockingCache.set(key, result); } return result; } catch (err) { - this.map.delete(key); + this.cache.delete(key); throw err; } finally { if (this.removeImmediately) { - this.map.delete(key); + this.cache.delete(key); } else if (result !== Empty) { this.pacer?.schedulePurge(key, ttl, result); } diff --git a/test/unit/cache-strategy/core/cache-factory.spec.ts b/test/unit/cache-strategy/core/cache-factory.spec.ts new file mode 100644 index 0000000..b2a5f27 --- /dev/null +++ b/test/unit/cache-strategy/core/cache-factory.spec.ts @@ -0,0 +1,203 @@ +import { createCache } from '../../../../src/cache-strategy/cache-factory'; +import { LruCache } from '../../../../src/cache-strategy/implementations/lru-cache'; +import { MruCache } from '../../../../src/cache-strategy/implementations/mru-cache'; +import { FifoCache } from '../../../../src/cache-strategy/implementations/fifo-cache'; +import { SimpleCache } from '../../../../src/cache-strategy/implementations/simple-cache'; +import { RememberedConfig } from '../../../../src/remembered-config'; + +describe('createCache', () => { + describe('LRU policy', () => { + it('should create LRU cache with specified capacity', () => { + const config: RememberedConfig = { + ttl: 1000, + evictionPolicy: 'LRU', + capacity: 5, + }; + + const cache = createCache(config) as LruCache; + + expect(cache).toBeInstanceOf(LruCache); + expect(cache['capacity']).toBe(5); + }); + + it('should create LRU cache with capacity 0', () => { + const config: RememberedConfig = { + ttl: 1000, + evictionPolicy: 'LRU', + capacity: 0, + }; + + const cache = createCache(config) as LruCache; + + expect(cache).toBeInstanceOf(LruCache); + expect(cache['capacity']).toBe(0); + }); + }); + + describe('MRU policy', () => { + it('should create MRU cache with specified capacity', () => { + const config: RememberedConfig = { + ttl: 1000, + evictionPolicy: 'MRU', + capacity: 3, + }; + + const cache = createCache(config) as MruCache; + + expect(cache).toBeInstanceOf(MruCache); + expect(cache['capacity']).toBe(3); + }); + + it('should create MRU cache with capacity 0', () => { + const config: RememberedConfig = { + ttl: 1000, + evictionPolicy: 'MRU', + capacity: 0, + }; + + const cache = createCache(config) as MruCache; + + expect(cache).toBeInstanceOf(MruCache); + expect(cache['capacity']).toBe(0); + }); + }); + + describe('FIFO policy', () => { + it('should create FIFO cache with specified capacity', () => { + const config: RememberedConfig = { + ttl: 1000, + evictionPolicy: 'FIFO', + capacity: 4, + }; + + const cache = createCache(config) as FifoCache; + + expect(cache).toBeInstanceOf(FifoCache); + expect(cache['capacity']).toBe(4); + }); + + it('should create FIFO cache with capacity 0', () => { + const config: RememberedConfig = { + ttl: 1000, + evictionPolicy: 'FIFO', + capacity: 0, + }; + + const cache = createCache(config) as FifoCache; + + expect(cache).toBeInstanceOf(FifoCache); + expect(cache['capacity']).toBe(0); + }); + }); + + describe('default policy (Simple)', () => { + it('should create Simple cache when no eviction policy is specified', () => { + const config: RememberedConfig = { + ttl: 1000, + }; + + const cache = createCache(config); + + expect(cache).toBeInstanceOf(SimpleCache); + }); + + it('should create Simple cache when eviction policy is undefined', () => { + const config: RememberedConfig = { + ttl: 1000, + evictionPolicy: undefined, + }; + + const cache = createCache(config); + + expect(cache).toBeInstanceOf(SimpleCache); + }); + + it('should create Simple cache when eviction policy is null', () => { + const config: RememberedConfig = { + ttl: 1000, + evictionPolicy: null as any, + }; + + const cache = createCache(config); + + expect(cache).toBeInstanceOf(SimpleCache); + }); + + it('should create Simple cache when eviction policy is empty string', () => { + const config: RememberedConfig = { + ttl: 1000, + evictionPolicy: '' as any, + }; + + const cache = createCache(config); + + expect(cache).toBeInstanceOf(SimpleCache); + }); + + it('should create Simple cache when eviction policy is invalid', () => { + const config: RememberedConfig = { + ttl: 1000, + evictionPolicy: 'INVALID' as any, + }; + + const cache = createCache(config); + + expect(cache).toBeInstanceOf(SimpleCache); + }); + }); + describe('edge cases', () => { + it('should handle negative capacity gracefully', () => { + const config: RememberedConfig = { + ttl: 1000, + evictionPolicy: 'LRU', + capacity: -1, + }; + + // This should throw an error due to capacity validation in BaseCache + expect(() => createCache(config)).toThrow( + 'Capacity must be greater than or equal to 0', + ); + }); + + it('should handle very large capacity', () => { + const config: RememberedConfig = { + ttl: 1000, + evictionPolicy: 'LRU', + capacity: Number.MAX_SAFE_INTEGER, + }; + + const cache = createCache(config) as LruCache; + + expect(cache).toBeInstanceOf(LruCache); + expect(cache['capacity']).toBe(Number.MAX_SAFE_INTEGER); + }); + + it('should handle zero TTL', () => { + const config: RememberedConfig = { + ttl: 0, + evictionPolicy: 'LRU', + capacity: 5, + }; + + const cache = createCache(config) as MruCache; + + expect(cache).toBeInstanceOf(LruCache); + expect(cache['capacity']).toBe(5); + }); + + it('should handle function TTL', () => { + const ttlFunction = (key: string, response?: number) => + response ? response * 2 : 100; + const config: RememberedConfig = { + ttl: ttlFunction, + evictionPolicy: 'MRU', + capacity: 3, + }; + + const cache = createCache(config) as MruCache; + + expect(cache).toBeInstanceOf(MruCache); + expect(cache['capacity']).toBe(3); + }); + }); +}); diff --git a/test/unit/cache-strategy/implementations/simple-cache.spec.ts b/test/unit/cache-strategy/implementations/simple-cache.spec.ts new file mode 100644 index 0000000..46d2341 --- /dev/null +++ b/test/unit/cache-strategy/implementations/simple-cache.spec.ts @@ -0,0 +1,215 @@ +import { SimpleCache } from '../../../../src/cache-strategy/implementations/simple-cache'; + +describe('SimpleCache', () => { + let cache: SimpleCache; + + beforeEach(() => { + cache = new SimpleCache(); + }); + + describe('constructor', () => { + it('should create an empty cache', () => { + const testCache = new SimpleCache(); + expect(testCache.get('key1')).toBeUndefined(); + }); + }); + + describe('set and get', () => { + it('should store and retrieve values', () => { + cache.set('key1', 100); + cache.set('key2', 200); + + expect(cache.get('key1')).toBe(100); + expect(cache.get('key2')).toBe(200); + }); + + it('should return undefined for non-existent keys', () => { + expect(cache.get('nonexistent')).toBeUndefined(); + }); + + it('should handle Promise values', async () => { + const promise = Promise.resolve(300); + cache.set('key1', promise); + + const result = await cache.get('key1'); + expect(result).toBe(300); + }); + + it('should handle different data types', () => { + const stringCache = new SimpleCache(); + const objectCache = new SimpleCache(); + + stringCache.set('key1', 'value1'); + objectCache.set('key1', { prop: 'value' }); + + expect(stringCache.get('key1')).toBe('value1'); + expect(objectCache.get('key1')).toEqual({ prop: 'value' }); + }); + }); + + describe('update behavior', () => { + it('should update existing values', () => { + cache.set('key1', 100); + cache.set('key1', 200); + + expect(cache.get('key1')).toBe(200); + }); + + it('should handle multiple updates', () => { + cache.set('key1', 100); + cache.set('key1', 200); + cache.set('key1', 300); + cache.set('key1', 400); + + expect(cache.get('key1')).toBe(400); + }); + }); + + describe('delete', () => { + it('should remove items from cache', () => { + cache.set('key1', 100); + cache.set('key2', 200); + + cache.delete('key1'); + + expect(cache.get('key1')).toBeUndefined(); + expect(cache.get('key2')).toBe(200); + }); + + it('should handle deleting non-existent keys', () => { + expect(() => cache.delete('nonexistent')).not.toThrow(); + }); + + it('should allow re-adding deleted items', () => { + cache.set('key1', 100); + cache.delete('key1'); + cache.set('key1', 200); + + expect(cache.get('key1')).toBe(200); + }); + + it('should handle deleting multiple items', () => { + cache.set('key1', 100); + cache.set('key2', 200); + cache.set('key3', 300); + + cache.delete('key1'); + cache.delete('key3'); + + expect(cache.get('key1')).toBeUndefined(); + expect(cache.get('key2')).toBe(200); + expect(cache.get('key3')).toBeUndefined(); + }); + }); + + describe('edge cases', () => { + it('should handle null and undefined values', () => { + cache.set('key1', null as any); + cache.set('key2', undefined as any); + + expect(cache.get('key1')).toBeNull(); + expect(cache.get('key2')).toBeUndefined(); + }); + + it('should handle empty string keys', () => { + cache.set('', 100); + expect(cache.get('')).toBe(100); + }); + + it('should handle multiple consecutive gets', () => { + cache.set('key1', 100); + cache.set('key2', 200); + + expect(cache.get('key1')).toBe(100); + expect(cache.get('key1')).toBe(100); + expect(cache.get('key2')).toBe(200); + expect(cache.get('key1')).toBe(100); + }); + + it('should handle setting same key multiple times', () => { + cache.set('key1', 100); + cache.set('key1', 200); + cache.set('key1', 300); + + expect(cache.get('key1')).toBe(300); + }); + }); + + describe('complex scenarios', () => { + it('should handle large number of items', () => { + for (let i = 0; i < 1000; i++) { + cache.set(`key${i}`, i); + } + + expect(cache.get('key0')).toBe(0); + expect(cache.get('key500')).toBe(500); + expect(cache.get('key999')).toBe(999); + }); + + it('should handle rapid set/get operations', () => { + for (let i = 0; i < 100; i++) { + cache.set(`key${i}`, i); + } + + for (let i = 0; i < 100; i++) { + expect(cache.get(`key${i}`)).toBe(i); + } + }); + + it('should handle concurrent operations', () => { + // Simulate concurrent-like operations + cache.set('key1', 100); + cache.set('key2', 200); + cache.get('key1'); + cache.set('key3', 300); + cache.delete('key1'); + cache.set('key4', 400); + cache.get('key2'); + + expect(cache.get('key1')).toBeUndefined(); + expect(cache.get('key2')).toBe(200); + expect(cache.get('key3')).toBe(300); + expect(cache.get('key4')).toBe(400); + }); + + it('should handle mixed data types', () => { + const mixedCache = new SimpleCache(); + mixedCache.set('string', 'hello'); + mixedCache.set('number', 42); + mixedCache.set('boolean', true); + mixedCache.set('array', [1, 2, 3]); + mixedCache.set('object', { a: 1, b: 2 }); + + expect(mixedCache.get('string')).toBe('hello'); + expect(mixedCache.get('number')).toBe(42); + expect(mixedCache.get('boolean')).toBe(true); + expect(mixedCache.get('array')).toEqual([1, 2, 3]); + expect(mixedCache.get('object')).toEqual({ a: 1, b: 2 }); + }); + }); + + describe('memory behavior', () => { + it('should not have capacity limits', () => { + // Simple cache has no capacity limits + for (let i = 0; i < 1000; i++) { + cache.set(`key${i}`, i); + } + + expect(cache.get('key0')).toBe(0); + expect(cache.get('key500')).toBe(500); + expect(cache.get('key999')).toBe(999); + }); + + it('should track size correctly without limits', () => { + // Simple cache doesn't have a size property, but we can test the behavior + for (let i = 0; i < 100; i++) { + cache.set(`key${i}`, i); + } + + // All items should be retrievable + for (let i = 0; i < 100; i++) { + expect(cache.get(`key${i}`)).toBe(i); + } + }); + }); +}); diff --git a/test/unit/index.spec.ts b/test/unit/index.spec.ts index 79f4678..3154faf 100644 --- a/test/unit/index.spec.ts +++ b/test/unit/index.spec.ts @@ -351,12 +351,194 @@ describe(Remembered.name, () => { describe(methods.clearCache, () => { it('should remove key', () => { - target['map'].set('abc', Promise.resolve('def')); + target['cache'].set('abc', Promise.resolve('def')); const result = target.clearCache('abc'); expect(result).toBeUndefined(); - expect(target['map'].has('abc')).toBe(false); + expect(target['cache'].get('abc')).toBeUndefined(); + }); + }); + + describe('eviction policies', () => { + it('should evict least recently used item when using LRU policy', async () => { + const lruTarget = new Remembered({ + ttl: 1000, + evictionPolicy: 'LRU', + capacity: 2, + }); + let count = 0; + const getter = jest.fn().mockImplementation(async () => ++count); + + await lruTarget.get('a', getter); // 1 + await lruTarget.get('b', getter); // 2 + await lruTarget.get('a', getter); // access 'a' again + await lruTarget.get('c', getter); // 3, should evict 'b' + + // 'b' should be evicted, so next access should call getter again + const result = await lruTarget.get('b', getter); + expect(result).toBe(4); + }); + + it('should evict most recently used item when using MRU policy', async () => { + const mruTarget = new Remembered({ + ttl: 1000, + evictionPolicy: 'MRU', + capacity: 2, + }); + let count = 0; + const getter = jest.fn().mockImplementation(async () => ++count); + + await mruTarget.get('a', getter); // 1 + await mruTarget.get('b', getter); // 2 + await mruTarget.get('a', getter); // access 'a' again (making it most recently used) + await mruTarget.get('c', getter); // 3, should evict 'a' (most recently used) + + // 'a' should be evicted, so next access should call getter again + const result = await mruTarget.get('a', getter); + expect(result).toBe(4); + }); + + it('should evict first in item when using FIFO policy', async () => { + const fifoTarget = new Remembered({ + ttl: 1000, + evictionPolicy: 'FIFO', + capacity: 2, + }); + let count = 0; + const getter = jest.fn().mockImplementation(async () => ++count); + + await fifoTarget.get('a', getter); // 1 + await fifoTarget.get('b', getter); // 2 + await fifoTarget.get('a', getter); // access 'a' again (shouldn't change order) + await fifoTarget.get('c', getter); // 3, should evict 'a' (first in) + + // 'a' should be evicted, so next access should call getter again + const result = await fifoTarget.get('a', getter); + expect(result).toBe(4); + }); + + it('should never cache when capacity is 0', async () => { + const zeroCapacityTarget = new Remembered({ + ttl: 1000, + evictionPolicy: 'LRU', + capacity: 0, + }); + let count = 0; + const getter = jest.fn().mockImplementation(async () => ++count); + + await zeroCapacityTarget.get('a', getter); + await zeroCapacityTarget.get('a', getter); + expect(count).toBe(2); // Always calls getter + }); + + it('should use Simple cache when no eviction policy is specified', async () => { + const simpleTarget = new Remembered({ ttl: 1000 }); + let count = 0; + const getter = jest.fn().mockImplementation(async () => ++count); + + // Simple cache has no capacity limits, so all items should be cached + for (let i = 0; i < 100; i++) { + await simpleTarget.get(`key${i}`, getter); + } + + // All items should still be cached + for (let i = 0; i < 100; i++) { + await simpleTarget.get(`key${i}`, getter); + } + + expect(count).toBe(100); // Only called once per key + }); + }); + + describe('onReused callback', () => { + it('should call onReused when a cached value is reused', async () => { + const onReused = jest.fn(); + const callbackTarget = new Remembered({ + ttl: 1000, + onReused, + }); + let count = 0; + const getter = jest.fn().mockImplementation(async () => ++count); + + await callbackTarget.get('a', getter); + await callbackTarget.get('a', getter); + expect(onReused).toHaveBeenCalledWith('a'); + expect(onReused).toHaveBeenCalledTimes(1); + }); + + it('should call onReused for each cached key', async () => { + const onReused = jest.fn(); + const callbackTarget = new Remembered({ + ttl: 1000, + onReused, + }); + let count = 0; + const getter = jest.fn().mockImplementation(async () => ++count); + + await callbackTarget.get('a', getter); + await callbackTarget.get('b', getter); + await callbackTarget.get('a', getter); + await callbackTarget.get('b', getter); + + expect(onReused).toHaveBeenCalledWith('a'); + expect(onReused).toHaveBeenCalledWith('b'); + expect(onReused).toHaveBeenCalledTimes(2); + }); + + it('should not call onReused for new cache entries', async () => { + const onReused = jest.fn(); + const callbackTarget = new Remembered({ + ttl: 1000, + onReused, + }); + let count = 0; + const getter = jest.fn().mockImplementation(async () => ++count); + + await callbackTarget.get('a', getter); + expect(onReused).not.toHaveBeenCalled(); + }); + + it('should not call onReused when cache is cleared', async () => { + const onReused = jest.fn(); + const callbackTarget = new Remembered({ + ttl: 1000, + onReused, + }); + let count = 0; + const getter = jest.fn().mockImplementation(async () => ++count); + + await callbackTarget.get('a', getter); + callbackTarget.clearCache('a'); + await callbackTarget.get('a', getter); + expect(onReused).not.toHaveBeenCalled(); + }); + }); + + describe('non-blocking with eviction policies', () => { + it('should use same eviction policy for non-blocking cache', async () => { + const nonBlockingTarget = new Remembered({ + ttl: 1000, + nonBlocking: true, + evictionPolicy: 'LRU', + capacity: 2, + }); + let count = 0; + const getter = jest.fn().mockImplementation(async () => ++count); + + // Fill cache + await nonBlockingTarget.get('a', getter); + await nonBlockingTarget.get('b', getter); + await nonBlockingTarget.get('c', getter); // Should evict 'a' + + // Use getSync to test non-blocking cache + const result1 = nonBlockingTarget.getSync('a', getter); + const result2 = nonBlockingTarget.getSync('b', getter); + const result3 = nonBlockingTarget.getSync('c', getter); + + expect(result1).toBeUndefined(); // 'a' was evicted + expect(result2).toBe(2); // 'b' still exists + expect(result3).toBe(3); // 'c' still exists }); }); }); From a77c2d8bf0e7da5f9da412a4f44d65101773e386 Mon Sep 17 00:00:00 2001 From: Daniel Carvalho Date: Sun, 13 Jul 2025 13:19:03 -0300 Subject: [PATCH 6/6] chore: update readme --- README.md | 124 ++++++++++++++++++++++++++++++++++++- docs/classes/remembered.md | 39 ++++++++++-- 2 files changed, 156 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 6eaafec..a56a778 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ [![Packages](https://david-dm.org/Codibre/remembered.svg)](https://david-dm.org/Codibre/remembered) [![npm version](https://badge.fury.io/js/remembered.svg)](https://badge.fury.io/js/remembered) -A module to remember for a given time the promises you made. +A module to remember for a given time the promises you made, with configurable cache strategies and eviction policies. # How to install @@ -16,6 +16,8 @@ npm install remembered # Usage +## Basic Usage + Create a new Remembered instance giving the ttl you want, in ms. ``` ts @@ -36,7 +38,7 @@ const [r1, r2, r3] = await Promise.all([ ]); ``` -In the above example, **r1**, **r2** and **r3** will receive the same exact +promise. +In the above example, **r1**, **r2** and **r3** will receive the same exact promise. Remembered don't "cache" the result of your async operation: it caches the promise itself. This is very useful for concurrent tasks where you have the same heavy call and you want it to happen just once. @@ -44,8 +46,102 @@ In this example, the promise is resolved in 200 milliseconds, but the ttl is 1 s If you want for the promise to be remembered just while it is not resolved, you can use **ttl** 0. In this case, while the promise is pending, Remembered will return the same reference, but, after it is resolved, then callback will be called -Another option is to use the **wrap** method: +## Cache Strategies and Eviction Policies + +Remembered supports different cache strategies with configurable eviction policies to manage memory usage: + +### Available Eviction Policies + +- **LRU (Least Recently Used)**: Removes the least recently accessed item when capacity is reached +- **MRU (Most Recently Used)**: Removes the most recently accessed item when capacity is reached +- **FIFO (First In, First Out)**: Removes the oldest item when capacity is reached +- **Simple**: No eviction policy, stores items indefinitely (default) + +### Configuration Options + +```ts +interface RememberedConfig { + ttl: number | TtlFunction; + evictionPolicy?: 'LRU' | 'MRU' | 'FIFO'; + capacity?: number; // Required when using eviction policies + nonBlocking?: boolean; + onReused?: (key: string) => void; +} +``` + +### Examples + +#### LRU Cache with Capacity Limit + +```ts +const remembered = new Remembered({ + ttl: 5000, + evictionPolicy: 'LRU', + capacity: 100 +}); +``` + +#### MRU Cache for Recent Items + +```ts +const remembered = new Remembered({ + ttl: 3000, + evictionPolicy: 'MRU', + capacity: 50 +}); +``` + +#### FIFO Cache for Time-based Eviction + +```ts +const remembered = new Remembered({ + ttl: 10000, + evictionPolicy: 'FIFO', + capacity: 200 +}); +``` + +#### Simple Cache (Default) + +```ts +const remembered = new Remembered({ + ttl: 5000 + // No eviction policy = Simple cache +}); +``` + +### Advanced Configuration + +#### Dynamic TTL Function + +```ts +const remembered = new Remembered({ + ttl: (key: string, response?: any) => { + // Different TTL based on key or response + return key.startsWith('user:') ? 30000 : 5000; + }, + evictionPolicy: 'LRU', + capacity: 1000 +}); +``` +#### Non-blocking Mode with Callback + +```ts +const remembered = new Remembered({ + ttl: 5000, + evictionPolicy: 'LRU', + capacity: 100, + nonBlocking: true, + onReused: (key: string) => { + console.log(`Cache hit for key: ${key}`); + } +}); +``` + +## Wrapping Functions + +Another option is to use the **wrap** method: ```ts const callback = () => new Promise((resolve) => { @@ -66,6 +162,28 @@ The wrap method returns a version of your function that receives the exact same The given ttl is meant to be readonly. So, if you change the ttl value of the provided, it will not take effect on the previous Remembered instances. +# Cache Strategy Details + +## LRU (Least Recently Used) +- Best for: Frequently accessed data +- Evicts: Least recently accessed items +- Use case: General purpose caching, user sessions + +## MRU (Most Recently Used) +- Best for: Data that becomes stale quickly +- Evicts: Most recently accessed items +- Use case: Temporary data, rate limiting + +## FIFO (First In, First Out) +- Best for: Time-sensitive data +- Evicts: Oldest items regardless of access +- Use case: Logs, time-series data + +## Simple +- Best for: Small datasets, development +- Evicts: Never (memory grows indefinitely) +- Use case: Testing, small applications + # Saudade There is no proper translation for the word *saudade* in English. diff --git a/docs/classes/remembered.md b/docs/classes/remembered.md index 87f1474..48b9e89 100644 --- a/docs/classes/remembered.md +++ b/docs/classes/remembered.md @@ -2,7 +2,7 @@ # Class: Remembered -A class that help you remember previous calls for you functions, to avoid new calls while it is not forgotten +A class that help you remember previous calls for you functions, to avoid new calls while it is not forgotten. Supports configurable cache strategies with eviction policies to manage memory usage. ## Table of contents @@ -28,14 +28,45 @@ A class that help you remember previous calls for you functions, to avoid new ca \+ **new Remembered**(`config?`: RememberedConfig): [*Remembered*](remembered.md) +Creates a new Remembered instance with optional cache configuration. + #### Parameters: -Name | Type | -:------ | :------ | -`config` | RememberedConfig | +Name | Type | Description | +:------ | :------ | :------ | +`config` | RememberedConfig | Configuration object with TTL and optional cache strategy settings | **Returns:** [*Remembered*](remembered.md) +#### Configuration Options: + +- `ttl`: Time to live in milliseconds or a function that returns TTL +- `evictionPolicy`: Optional cache eviction policy ('LRU', 'MRU', 'FIFO', or undefined for Simple) +- `capacity`: Maximum number of items to store (required when using eviction policies) +- `nonBlocking`: Whether to keep persistent last result for background updates +- `onReused`: Callback function called when a cached value is reused + +#### Examples: + +```ts +// Simple cache (default) +const remembered = new Remembered({ ttl: 1000 }); + +// LRU cache with capacity limit +const remembered = new Remembered({ + ttl: 5000, + evictionPolicy: 'LRU', + capacity: 100 +}); + +// Dynamic TTL with MRU eviction +const remembered = new Remembered({ + ttl: (key, response) => key.startsWith('user:') ? 30000 : 5000, + evictionPolicy: 'MRU', + capacity: 50 +}); +``` + ## Properties ### map