We store all (key, value) pairs in a list.
To follow LRU (Least Recently Used) behavior:
Initialization
get(key)
-1.put(key, value)
[key, value] to the end.class LRUCache:
def __init__(self, capacity: int):
self.cache = []
self.capacity = capacity
def get(self, key: int) -> int:
for i in range(len(self.cache)):
if self.cache[i][0] == key:
tmp = self.cache.pop(i)
self.cache.append(tmp)
return tmp[1]
return -1
def put(self, key: int, value: int) -> None:
for i in range(len(self.cache)):
if self.cache[i][0] == key:
tmp = self.cache.pop(i)
tmp[1] = value
self.cache.append(tmp)
return
if self.capacity == len(self.cache):
self.cache.pop(0)
self.cache.append([key, value])We want all operations to be O(1) while still following LRU (Least Recently Used) rules.
To do that, we combine:
We keep:
Whenever we:
Dummy left and right nodes make insert/remove logic cleaner.
Data Structures
cache that maps key → node.left dummy: before the least recently used node.right dummy: after the most recently used node.Helper: remove(node)
node from the list by connecting its prev and next nodes.Helper: insert(node)
node just before right (mark as most recently used).get(key)
key not in cache, return -1.right (mark as recently used).put(key, value)
key already exists:cache[key].right.len(cache) > capacity:left (this is LRU).This way, both get and put run in O(1) time, and the LRU policy is always maintained.
class Node:
def __init__(self, key, val):
self.key, self.val = key, val
self.prev = self.next = None
class LRUCache:
def __init__(self, capacity: int):
self.cap = capacity
self.cache = {} # map key to node
self.left, self.right = Node(0, 0), Node(0, 0)
self.left.next, self.right.prev = self.right, self.left
def remove(self, node):
prev, nxt = node.prev, node.next
prev.next, nxt.prev = nxt, prev
def insert(self, node):
prev, nxt = self.right.prev, self.right
prev.next = nxt.prev = node
node.next, node.prev = nxt, prev
def get(self, key: int) -> int:
if key in self.cache:
self.remove(self.cache[key])
self.insert(self.cache[key])
return self.cache[key].val
return -1
def put(self, key: int, value: int) -> None:
if key in self.cache:
self.remove(self.cache[key])
self.cache[key] = Node(key, value)
self.insert(self.cache[key])
if len(self.cache) > self.cap:
lru = self.left.next
self.remove(lru)
del self.cache[lru.key]Many languages provide a built-in ordered map / dictionary that:
This is perfect for an LRU cache:
So the ordered map itself handles:
This gives a clean and concise LRU implementation using library support.
Initialization
cache.cap.get(key)
key is not in cache, return -1.key to the “most recent” position in the ordered map.put(key, value)
key is already in cache:key to the “most recent” position.key is not in cache:(key, value) into cache at the “most recent” position.cache is now greater than cap:This uses the built-in ordered map to achieve LRU behavior with O(1) average time for both get and put.
class LRUCache:
def __init__(self, capacity: int):
self.cache = OrderedDict()
self.cap = capacity
def get(self, key: int) -> int:
if key not in self.cache:
return -1
self.cache.move_to_end(key)
return self.cache[key]
def put(self, key: int, value: int) -> None:
if key in self.cache:
self.cache.move_to_end(key)
self.cache[key] = value
if len(self.cache) > self.cap:
self.cache.popitem(last=False)