" Accessing a node updates its position to the front of the LRU list.\n",
" \"\"\"\n",
" node = self.lookup[query]\n",
" node = self.lookup.get(query)\n",
" if node is None:\n",
" return None\n",
" self.linked_list.move_to_front(node)\n",
...
...
@@ -97,7 +97,7 @@
" If the entry is new and the cache is at capacity, removes the oldest entry\n",
" before the new entry is added.\n",
" \"\"\"\n",
" node = self.lookup[query]\n",
" node = self.lookup.get(query)\n",
" if node is not None:\n",
" # Key exists in cache, update the value\n",
" node.results = results\n",
...
...
%% Cell type:markdown id: tags:
This notebook was prepared by [Donne Martin](https://github.com/donnemartin). Source and license info is on [GitHub](https://github.com/donnemartin/system-design-primer-primer).
%% Cell type:markdown id: tags:
# Design an LRU cache
%% Cell type:markdown id: tags:
## Constraints and assumptions
* What are we caching?
* We are cahing the results of web queries
* Can we assume inputs are valid or do we have to validate them?
* Assume they're valid
* Can we assume this fits memory?
* Yes
%% Cell type:markdown id: tags:
## Solution
%% Cell type:code id: tags:
``` python
%%writefilelru_cache.py
classNode(object):
def__init__(self,results):
self.results=results
self.prev=None
self.next=None
classLinkedList(object):
def__init__(self):
self.head=None
self.tail=None
defmove_to_front(self,node):# ...
defappend_to_front(self,node):# ...
defremove_from_tail(self):# ...
classCache(object):
def__init__(self,MAX_SIZE):
self.MAX_SIZE=MAX_SIZE
self.size=0
self.lookup={}# key: query, value: node
self.linked_list=LinkedList()
defget(self,query)
"""Get the stored query result from the cache.
Accessing a node updates its position to the front of the LRU list.
"""
node=self.lookup[query]
node=self.lookup.get(query)
ifnodeisNone:
returnNone
self.linked_list.move_to_front(node)
returnnode.results
defset(self,results,query):
"""Set the result for the given query key in the cache.
When updating an entry, updates its position to the front of the LRU list.
If the entry is new and the cache is at capacity, removes the oldest entry
before the new entry is added.
"""
node=self.lookup[query]
node=self.lookup.get(query)
ifnodeisnotNone:
# Key exists in cache, update the value
node.results=results
self.linked_list.move_to_front(node)
else:
# Key does not exist in cache
ifself.size==self.MAX_SIZE:
# Remove the oldest entry from the linked list and lookup