Add an example of the LRU (Least Recently Used) Cache implementation (#980)

* Add an example of the LRU Cache implementation.

* Promote the node on set() as well.

* Add LRU Cache images.
This commit is contained in:
Oleksii Trekhleb 2023-01-23 18:03:40 +01:00 committed by GitHub
parent 6c335c5d83
commit e4f2ccdbec
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 339 additions and 0 deletions

View File

@ -64,6 +64,7 @@ the data.
* `A` [Graph](src/data-structures/graph) (both directed and undirected)
* `A` [Disjoint Set](src/data-structures/disjoint-set)
* `A` [Bloom Filter](src/data-structures/bloom-filter)
* `A` [LRU Cache](src/data-structures/lru-cache/) - Least Recently Used (LRU) cache
## Algorithms

View File

@ -0,0 +1,134 @@
/* eslint-disable no-param-reassign */
import LinkedListNode from './LinkedListNode';
/**
* Implementation of the LRU (Least Recently Used) Cache
* based on the HashMap and Doubly Linked List data-structures.
*
* Current implementation allows to have fast (O(1)) read and write operations.
*
* At any moment in time the LRU Cache holds not more that "capacity" number of items in it.
*/
class LRUCache {
/**
* Creates a cache instance of a specific capacity.
* @param {number} capacity
*/
constructor(capacity) {
this.capacity = capacity; // How many items to store in cache at max.
this.nodesMap = {}; // The quick links to each linked list node in cache.
this.size = 0; // The number of items that is currently stored in the cache.
this.head = new LinkedListNode(); // The Head (first) linked list node.
this.tail = new LinkedListNode(); // The Tail (last) linked list node.
}
/**
* Returns the cached value by its key.
* Time complexity: O(1).
* @param {string} key
* @returns {any}
*/
get(key) {
if (this.nodesMap[key] === undefined) return undefined;
const node = this.nodesMap[key];
this.promote(node);
return node.val;
}
/**
* Sets the value to cache by its key.
* Time complexity: O(1).
* @param {string} key
* @param {any} val
*/
set(key, val) {
if (this.nodesMap[key]) {
const node = this.nodesMap[key];
node.val = val;
this.promote(node);
} else {
const node = new LinkedListNode(key, val);
this.append(node);
}
}
/**
* Promotes the node to the end of the linked list.
* It means that the node is most frequently used.
* It also reduces the chance for such node to get evicted from cache.
* @param {LinkedListNode} node
*/
promote(node) {
this.evict(node);
this.append(node);
}
/**
* Appends a new node to the end of the cache linked list.
* @param {LinkedListNode} node
*/
append(node) {
this.nodesMap[node.key] = node;
if (!this.head.next) {
// First node to append.
this.head.next = node;
this.tail.prev = node;
node.prev = this.head;
node.next = this.tail;
} else {
// Append to an existing tail.
const oldTail = this.tail.prev;
oldTail.next = node;
node.prev = oldTail;
node.next = this.tail;
this.tail.prev = node;
}
this.size += 1;
if (this.size > this.capacity) {
this.evict(this.head.next);
}
}
/**
* Evicts (removes) the node from cache linked list.
* @param {LinkedListNode} node
*/
evict(node) {
delete this.nodesMap[node.key];
this.size -= 1;
const prevNode = node.prev;
const nextNode = node.next;
// If one and only node.
if (prevNode === this.head && nextNode === this.tail) {
this.head.next = null;
this.tail.prev = null;
this.size = 0;
return;
}
// If this is a Head node.
if (prevNode === this.head) {
nextNode.prev = this.head;
this.head.next = nextNode;
return;
}
// If this is a Tail node.
if (nextNode === this.tail) {
prevNode.next = this.tail;
this.tail.prev = prevNode;
return;
}
// If the node is in the middle.
prevNode.next = nextNode;
nextNode.prev = prevNode;
}
}
export default LRUCache;

View File

@ -0,0 +1,17 @@
class LinkedListNode {
/**
* Creates a doubly-linked list node.
* @param {string} key
* @param {any} val
* @param {LinkedListNode} prev
* @param {LinkedListNode} next
*/
constructor(key, val, prev = null, next = null) {
this.key = key;
this.val = val;
this.prev = prev;
this.next = next;
}
}
export default LinkedListNode;

View File

@ -0,0 +1,37 @@
# Least Recently Used (LRU) Cache
A **Least Recently Used (LRU) Cache** organizes items in order of use, allowing you to quickly identify which item hasn't been used for the longest amount of time.
Picture a clothes rack, where clothes are always hung up on one side. To find the least-recently used item, look at the item on the other end of the rack.
## The problem statement
Implement the LRUCache class:
- `LRUCache(int capacity)` Initialize the LRU cache with **positive** size `capacity`.
- `int get(int key)` Return the value of the `key` if the `key` exists, otherwise return `undefined`.
- `void set(int key, int value)` Update the value of the `key` if the `key` exists. Otherwise, add the `key-value` pair to the cache. If the number of keys exceeds the `capacity` from this operation, **evict** the least recently used key.
The functions `get()` and `set()` must each run in `O(1)` average time complexity.
## Implementation
See the `LRUCache` implementation example in [LRUCache.js](./LRUCache.js). The solution uses a `HashMap` for fast `O(1)` cache items access, and a `DoublyLinkedList` for fast `O(1)` cache items promotions and eviction (to keep the maximum allowed cache capacity).
![Linked List](./images/lru-cache.jpg)
*Made with [okso.app](https://okso.app)*
## Costs
| | Worst Case |
|---|---|
| Space |`O(n)`|
| Get item | `O(1)` |
| Set item | `O(1)` |
## References
- [LRU Cache on LeetCode](https://leetcode.com/problems/lru-cache/solutions/244744/lru-cache/)
- [LRU Cache on InterviewCake](https://www.interviewcake.com/concept/java/lru-cache)
- [LRU Cache on Wiki](https://en.wikipedia.org/wiki/Cache_replacement_policies)

View File

@ -0,0 +1,150 @@
import LRUCache from '../LRUCache';
describe('LRUCache', () => {
it('should set and get values to and from the cache', () => {
const cache = new LRUCache(100);
expect(cache.get('key-1')).toBeUndefined();
cache.set('key-1', 15);
cache.set('key-2', 16);
cache.set('key-3', 17);
expect(cache.get('key-1')).toBe(15);
expect(cache.get('key-2')).toBe(16);
expect(cache.get('key-3')).toBe(17);
expect(cache.get('key-3')).toBe(17);
expect(cache.get('key-2')).toBe(16);
expect(cache.get('key-1')).toBe(15);
cache.set('key-1', 5);
cache.set('key-2', 6);
cache.set('key-3', 7);
expect(cache.get('key-1')).toBe(5);
expect(cache.get('key-2')).toBe(6);
expect(cache.get('key-3')).toBe(7);
});
it('should evict least recently used items from cache with cache size of 1', () => {
const cache = new LRUCache(1);
expect(cache.get('key-1')).toBeUndefined();
cache.set('key-1', 15);
expect(cache.get('key-1')).toBe(15);
cache.set('key-2', 16);
expect(cache.get('key-1')).toBeUndefined();
expect(cache.get('key-2')).toBe(16);
cache.set('key-2', 17);
expect(cache.get('key-2')).toBe(17);
cache.set('key-3', 18);
cache.set('key-4', 19);
expect(cache.get('key-2')).toBeUndefined();
expect(cache.get('key-3')).toBeUndefined();
expect(cache.get('key-4')).toBe(19);
});
it('should evict least recently used items from cache with cache size of 2', () => {
const cache = new LRUCache(2);
expect(cache.get('key-21')).toBeUndefined();
cache.set('key-21', 15);
expect(cache.get('key-21')).toBe(15);
cache.set('key-22', 16);
expect(cache.get('key-21')).toBe(15);
expect(cache.get('key-22')).toBe(16);
cache.set('key-22', 17);
expect(cache.get('key-22')).toBe(17);
cache.set('key-23', 18);
expect(cache.size).toBe(2);
expect(cache.get('key-21')).toBeUndefined();
expect(cache.get('key-22')).toBe(17);
expect(cache.get('key-23')).toBe(18);
cache.set('key-24', 19);
expect(cache.size).toBe(2);
expect(cache.get('key-21')).toBeUndefined();
expect(cache.get('key-22')).toBeUndefined();
expect(cache.get('key-23')).toBe(18);
expect(cache.get('key-24')).toBe(19);
});
it('should evict least recently used items from cache with cache size of 3', () => {
const cache = new LRUCache(3);
cache.set('key-1', 1);
cache.set('key-2', 2);
cache.set('key-3', 3);
expect(cache.get('key-1')).toBe(1);
expect(cache.get('key-2')).toBe(2);
expect(cache.get('key-3')).toBe(3);
cache.set('key-3', 4);
expect(cache.get('key-1')).toBe(1);
expect(cache.get('key-2')).toBe(2);
expect(cache.get('key-3')).toBe(4);
cache.set('key-4', 5);
expect(cache.get('key-1')).toBeUndefined();
expect(cache.get('key-2')).toBe(2);
expect(cache.get('key-3')).toBe(4);
expect(cache.get('key-4')).toBe(5);
});
it('should promote the node while calling set() method', () => {
const cache = new LRUCache(2);
cache.set('2', 1);
cache.set('1', 1);
cache.set('2', 3);
cache.set('4', 1);
expect(cache.get('1')).toBeUndefined();
expect(cache.get('2')).toBe(3);
});
it('should promote the recently accessed item with cache size of 3', () => {
const cache = new LRUCache(3);
cache.set('key-1', 1);
cache.set('key-2', 2);
cache.set('key-3', 3);
expect(cache.get('key-1')).toBe(1);
cache.set('key-4', 4);
expect(cache.get('key-1')).toBe(1);
expect(cache.get('key-3')).toBe(3);
expect(cache.get('key-4')).toBe(4);
expect(cache.get('key-2')).toBeUndefined();
});
it('should promote the recently accessed item with cache size of 4', () => {
const cache = new LRUCache(4);
cache.set('key-1', 1);
cache.set('key-2', 2);
cache.set('key-3', 3);
cache.set('key-4', 4);
expect(cache.get('key-4')).toBe(4);
expect(cache.get('key-3')).toBe(3);
expect(cache.get('key-2')).toBe(2);
expect(cache.get('key-1')).toBe(1);
cache.set('key-5', 5);
expect(cache.get('key-1')).toBe(1);
expect(cache.get('key-2')).toBe(2);
expect(cache.get('key-3')).toBe(3);
expect(cache.get('key-4')).toBeUndefined();
expect(cache.get('key-5')).toBe(5);
cache.set('key-6', 6);
expect(cache.get('key-1')).toBeUndefined();
expect(cache.get('key-2')).toBe(2);
expect(cache.get('key-3')).toBe(3);
expect(cache.get('key-4')).toBeUndefined();
expect(cache.get('key-5')).toBe(5);
expect(cache.get('key-6')).toBe(6);
});
});

Binary file not shown.

After

Width:  |  Height:  |  Size: 662 KiB