🎨Now live: Try our Free AI Image Generation Feature

Intuition
An LRU cache is designed to store a fixed number of key-value pairs with efficient operations:
get(key): Retrieves the value associated with the key. If the key exists, it marks it as recently used (moves to the front). If not, returns -1.put(key, value): Updates the value if the key exists or adds a new key-value pair. If adding a new pair exceeds the capacity, it evicts the least recently used key.
The goal is to achieve O(1) average time complexity for both get and put operations.
Approach
Data Structures:
- Dictionary (
cache): Stores key-value pairs for fast access. - Doubly Linked List (
dll): Maintains the order of keys based on usage, where the front represents the most recently used and the back represents the least recently used. - Nodes (Node): Each node containskey,value,prev, andnextpointers.
Operations:
- Initialization (
__init__): - Initializes the cache with the given capacity. - Sets up thecachedictionary and the doubly linked list (dll). get(key)Operation: - If the key exists: - Retrieve the value from thecache. - Move the corresponding node to the front of thedllto mark it as recently used. - Return the value. - If the key doesn't exist, return-1.put(key, value)Operation: - If the key exists: - Update the value in thecache. - Move the corresponding node to the front of thedllto mark it as recently used. - If the key doesn't exist: - Create a new node with the key-value pair. - Add it to the front of thedlland store it in thecache. - If adding exceeds the capacity, remove the least recently used node (from the back of thedllandcache).- Utility Methods:
-
_move_to_front(node): Moves a given node to the front of thedll. -_remove_node(node): Removes a node from thedll.
Complexity
Time Complexity:
Both get and put operations run in O(1) average time due to the use of hash maps (cache) for fast lookups and doubly linked lists (dll) for efficient node management.
Space Complexity:
O(capacity) for storing key-value pairs and O(capacity) for maintaining the doubly linked list structure, making it linear with respect to the cache capacity.
Code
C++
class LRUCache {
public:
LRUCache(int capacity) : capacity(capacity) {}
int get(int key) {
auto it = cache.find(key);
if (it == cache.end()) {
return -1;
} else {
// Move the accessed key-value pair to the front of the list
usage.splice(usage.begin(), usage, it->second.second);
return it->second.first;
}
}
void put(int key, int value) {
auto it = cache.find(key);
if (it != cache.end()) {
// Update the value and move to the front of the list
it->second.first = value;
usage.splice(usage.begin(), usage, it->second.second);
} else {
if (cache.size() >= capacity) {
// Remove the least recently used element
int lruKey = usage.back();
usage.pop_back();
cache.erase(lruKey);
}
// Insert the new key-value pair
usage.push_front(key);
cache[key] = {value, usage.begin()};
}
}
private:
int capacity;
std::list usage; // Stores keys in order of usage
std::unordered_map::iterator>> cache; // Stores key-value pairs and their usage list iterators
}; Python
class ListNode:
def __init__(self, key=None, value=None):
self.key = key
self.value = value
self.prev = None
self.next = None
class LRUCache:
def __init__(self, capacity: int):
self.capacity = capacity
self.cache = {}
self.head = ListNode()
self.tail = ListNode()
self.head.next = self.tail
self.tail.prev = self.head
def get(self, key: int) -> int:
if key not in self.cache:
return -1
node = self.cache[key]
self._move_to_front(node)
return node.value
def put(self, key: int, value: int) -> None:
if key in self.cache:
node = self.cache[key]
node.value = value
self._move_to_front(node)
else:
if len(self.cache) == self.capacity:
self._evict()
new_node = ListNode(key, value)
self.cache[key] = new_node
self._add_to_front(new_node)
def _move_to_front(self, node: ListNode) -> None:
# Remove node from current position
node.prev.next = node.next
node.next.prev = node.prev
# Add node to front
self._add_to_front(node)
def _add_to_front(self, node: ListNode) -> None:
next_node = self.head.next
self.head.next = node
node.prev = self.head
node.next = next_node
next_node.prev = node
def _evict(self) -> None:
if len(self.cache) > 0:
evict_node = self.tail.prev
del self.cache[evict_node.key]
evict_node.prev.next = self.tail
self.tail.prev = evict_node.prevJava
class LRUCache {
private final int capacity;
private final HashMap cache;
private final LinkedHashSet usage;
public LRUCache(int capacity) {
this.capacity = capacity;
this.cache = new HashMap<>();
this.usage = new LinkedHashSet<>();
}
public int get(int key) {
if (!cache.containsKey(key)) {
return -1;
}
// Move the accessed key to the end to show that it was recently used
usage.remove(key);
usage.add(key);
return cache.get(key);
}
public void put(int key, int value) {
if (cache.containsKey(key)) {
// Remove the old key to update its position in usage
usage.remove(key);
} else if (cache.size() >= capacity) {
int lruKey = usage.iterator().next();
usage.remove(lruKey);
cache.remove(lruKey);
}
cache.put(key, value);
usage.add(key);
}
} JavaScript
/**
* @param {number} capacity
*/
var LRUCache = function(capacity) {
this.queue = new Map();
this.capacity = capacity;
};
/**
* @param {number} key
* @return {number}
*/
LRUCache.prototype.get = function(key) {
if(this.queue.has(key)){
const value = this.queue.get(key);
this.queue.delete(key);
this.queue.set(key, value);
return value;
}
return -1;
};
/**
* @param {number} key
* @param {number} value
* @return {void}
*/
LRUCache.prototype.put = function(key, value) {
if(this.queue.has(key)) {
this.queue.delete(key);
this.queue.set(key, value);
} else {
if(this.queue.size >= this.capacity){
const [firstKey] = this.queue.keys();
this.queue.delete(firstKey);
}
this.queue.set(key, value)
}
};