Dare2Solve
An LRU cache is designed to store a fixed number of key-value pairs with efficient operations:
get(key)
: Retrieves the value associated with the key. If the key exists, it marks it as recently used (moves to the front). If not, returns -1.put(key, value)
: Updates the value if the key exists or adds a new key-value pair. If adding a new pair exceeds the capacity, it evicts the least recently used key.The goal is to achieve O(1) average time complexity for both get
and put
operations.
cache
): Stores key-value pairs for fast access.dll
): Maintains the order of keys based on usage, where the front represents the most recently used and the back represents the least recently used.
Node
): Each node contains key
, value
, prev
, and next
pointers.Initialization (__init__
):
cache
dictionary and the doubly linked list (dll
).get(key)
Operation:
cache
.dll
to mark it as recently used.-1
.put(key, value)
Operation:
cache
.dll
to mark it as recently used.dll
and store it in the cache
.dll
and cache
).Utility Methods:
_move_to_front(node)
: Moves a given node to the front of the dll
._remove_node(node)
: Removes a node from the dll
.Both get
and put
operations run in O(1) average time due to the use of hash maps (cache
) for fast lookups and doubly linked lists (dll
) for efficient node management.
O(capacity) for storing key-value pairs and O(capacity) for maintaining the doubly linked list structure, making it linear with respect to the cache capacity.
class LRUCache {
public:
LRUCache(int capacity) : capacity(capacity) {}
int get(int key) {
auto it = cache.find(key);
if (it == cache.end()) {
return -1;
} else {
// Move the accessed key-value pair to the front of the list
usage.splice(usage.begin(), usage, it->second.second);
return it->second.first;
}
}
void put(int key, int value) {
auto it = cache.find(key);
if (it != cache.end()) {
// Update the value and move to the front of the list
it->second.first = value;
usage.splice(usage.begin(), usage, it->second.second);
} else {
if (cache.size() >= capacity) {
// Remove the least recently used element
int lruKey = usage.back();
usage.pop_back();
cache.erase(lruKey);
}
// Insert the new key-value pair
usage.push_front(key);
cache[key] = {value, usage.begin()};
}
}
private:
int capacity;
std::list<int> usage; // Stores keys in order of usage
std::unordered_map<int, std::pair<int, std::list<int>::iterator>> cache; // Stores key-value pairs and their usage list iterators
};
class ListNode:
def __init__(self, key=None, value=None):
self.key = key
self.value = value
self.prev = None
self.next = None
class LRUCache:
def __init__(self, capacity: int):
self.capacity = capacity
self.cache = {}
self.head = ListNode()
self.tail = ListNode()
self.head.next = self.tail
self.tail.prev = self.head
def get(self, key: int) -> int:
if key not in self.cache:
return -1
node = self.cache[key]
self._move_to_front(node)
return node.value
def put(self, key: int, value: int) -> None:
if key in self.cache:
node = self.cache[key]
node.value = value
self._move_to_front(node)
else:
if len(self.cache) == self.capacity:
self._evict()
new_node = ListNode(key, value)
self.cache[key] = new_node
self._add_to_front(new_node)
def _move_to_front(self, node: ListNode) -> None:
# Remove node from current position
node.prev.next = node.next
node.next.prev = node.prev
# Add node to front
self._add_to_front(node)
def _add_to_front(self, node: ListNode) -> None:
next_node = self.head.next
self.head.next = node
node.prev = self.head
node.next = next_node
next_node.prev = node
def _evict(self) -> None:
if len(self.cache) > 0:
evict_node = self.tail.prev
del self.cache[evict_node.key]
evict_node.prev.next = self.tail
self.tail.prev = evict_node.prev
class LRUCache {
private final int capacity;
private final HashMap<Integer, Integer> cache;
private final LinkedHashSet<Integer> usage;
public LRUCache(int capacity) {
this.capacity = capacity;
this.cache = new HashMap<>();
this.usage = new LinkedHashSet<>();
}
public int get(int key) {
if (!cache.containsKey(key)) {
return -1;
}
// Move the accessed key to the end to show that it was recently used
usage.remove(key);
usage.add(key);
return cache.get(key);
}
public void put(int key, int value) {
if (cache.containsKey(key)) {
// Remove the old key to update its position in usage
usage.remove(key);
} else if (cache.size() >= capacity) {
int lruKey = usage.iterator().next();
usage.remove(lruKey);
cache.remove(lruKey);
}
cache.put(key, value);
usage.add(key);
}
}
/**
* @param {number} capacity
*/
var LRUCache = function(capacity) {
this.queue = new Map();
this.capacity = capacity;
};
/**
* @param {number} key
* @return {number}
*/
LRUCache.prototype.get = function(key) {
if(this.queue.has(key)){
const value = this.queue.get(key);
this.queue.delete(key);
this.queue.set(key, value);
return value;
}
return -1;
};
/**
* @param {number} key
* @param {number} value
* @return {void}
*/
LRUCache.prototype.put = function(key, value) {
if(this.queue.has(key)) {
this.queue.delete(key);
this.queue.set(key, value);
} else {
if(this.queue.size >= this.capacity){
const [firstKey] = this.queue.keys();
this.queue.delete(firstKey);
}
this.queue.set(key, value)
}
};