Spaces:
Running
Running
File size: 5,940 Bytes
979bf48 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 | /**
* Node in the doubly-linked list used for LRU tracking.
* Each node represents a cache entry with bidirectional pointers.
*/ "use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
Object.defineProperty(exports, "LRUCache", {
enumerable: true,
get: function() {
return LRUCache;
}
});
class LRUNode {
constructor(key, data, size){
this.prev = null;
this.next = null;
this.key = key;
this.data = data;
this.size = size;
}
}
/**
* Sentinel node used for head/tail boundaries.
* These nodes don't contain actual cache data but simplify list operations.
*/ class SentinelNode {
constructor(){
this.prev = null;
this.next = null;
}
}
class LRUCache {
constructor(maxSize, calculateSize){
this.cache = new Map();
this.totalSize = 0;
this.maxSize = maxSize;
this.calculateSize = calculateSize;
// Create sentinel nodes to simplify doubly-linked list operations
// HEAD <-> TAIL (empty list)
this.head = new SentinelNode();
this.tail = new SentinelNode();
this.head.next = this.tail;
this.tail.prev = this.head;
}
/**
* Adds a node immediately after the head (marks as most recently used).
* Used when inserting new items or when an item is accessed.
* PRECONDITION: node must be disconnected (prev/next should be null)
*/ addToHead(node) {
node.prev = this.head;
node.next = this.head.next;
// head.next is always non-null (points to tail or another node)
this.head.next.prev = node;
this.head.next = node;
}
/**
* Removes a node from its current position in the doubly-linked list.
* Updates the prev/next pointers of adjacent nodes to maintain list integrity.
* PRECONDITION: node must be connected (prev/next are non-null)
*/ removeNode(node) {
// Connected nodes always have non-null prev/next
node.prev.next = node.next;
node.next.prev = node.prev;
}
/**
* Moves an existing node to the head position (marks as most recently used).
* This is the core LRU operation - accessed items become most recent.
*/ moveToHead(node) {
this.removeNode(node);
this.addToHead(node);
}
/**
* Removes and returns the least recently used node (the one before tail).
* This is called during eviction when the cache exceeds capacity.
* PRECONDITION: cache is not empty (ensured by caller)
*/ removeTail() {
const lastNode = this.tail.prev;
// tail.prev is always non-null and always LRUNode when cache is not empty
this.removeNode(lastNode);
return lastNode;
}
/**
* Sets a key-value pair in the cache.
* If the key exists, updates the value and moves to head.
* If new, adds at head and evicts from tail if necessary.
*
* Time Complexity:
* - O(1) for uniform item sizes
* - O(k) where k is the number of items evicted (can be O(N) for variable sizes)
*/ set(key, value) {
const size = (this.calculateSize == null ? void 0 : this.calculateSize.call(this, value)) ?? 1;
if (size > this.maxSize) {
console.warn('Single item size exceeds maxSize');
return;
}
const existing = this.cache.get(key);
if (existing) {
// Update existing node: adjust size and move to head (most recent)
existing.data = value;
this.totalSize = this.totalSize - existing.size + size;
existing.size = size;
this.moveToHead(existing);
} else {
// Add new node at head (most recent position)
const newNode = new LRUNode(key, value, size);
this.cache.set(key, newNode);
this.addToHead(newNode);
this.totalSize += size;
}
// Evict least recently used items until under capacity
while(this.totalSize > this.maxSize && this.cache.size > 0){
const tail = this.removeTail();
this.cache.delete(tail.key);
this.totalSize -= tail.size;
}
}
/**
* Checks if a key exists in the cache.
* This is a pure query operation - does NOT update LRU order.
*
* Time Complexity: O(1)
*/ has(key) {
return this.cache.has(key);
}
/**
* Retrieves a value by key and marks it as most recently used.
* Moving to head maintains the LRU property for future evictions.
*
* Time Complexity: O(1)
*/ get(key) {
const node = this.cache.get(key);
if (!node) return undefined;
// Mark as most recently used by moving to head
this.moveToHead(node);
return node.data;
}
/**
* Returns an iterator over the cache entries. The order is outputted in the
* order of most recently used to least recently used.
*/ *[Symbol.iterator]() {
let current = this.head.next;
while(current && current !== this.tail){
// Between head and tail, current is always LRUNode
const node = current;
yield [
node.key,
node.data
];
current = current.next;
}
}
/**
* Removes a specific key from the cache.
* Updates both the hash map and doubly-linked list.
*
* Time Complexity: O(1)
*/ remove(key) {
const node = this.cache.get(key);
if (!node) return;
this.removeNode(node);
this.cache.delete(key);
this.totalSize -= node.size;
}
/**
* Returns the number of items in the cache.
*/ get size() {
return this.cache.size;
}
/**
* Returns the current total size of all cached items.
* This uses the custom size calculation if provided.
*/ get currentSize() {
return this.totalSize;
}
}
//# sourceMappingURL=lru-cache.js.map |