system-design-primer/solutions/object_oriented_design/lru_cache/lru_cache.ipynb

142 lines
4.0 KiB
Python

{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"This notebook was prepared by [Donne Martin](https://github.com/donnemartin). Source and license info is on [GitHub](https://github.com/donnemartin/system-design-primer)."
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"# Design an LRU cache"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Constraints and assumptions\n",
"\n",
"* What are we caching?\n",
" * We are caching the results of web queries\n",
"* Can we assume inputs are valid or do we have to validate them?\n",
" * Assume they're valid\n",
"* Can we assume this fits memory?\n",
" * Yes"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Solution"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {
"collapsed": false
},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"Overwriting lru_cache.py\n"
]
}
],
"source": [
"%%writefile lru_cache.py\n",
"class Node(object):\n",
"\n",
" def __init__(self, results):\n",
" self.results = results\n",
" self.prev = None\n",
" self.next = None\n",
"\n",
"\n",
"class LinkedList(object):\n",
"\n",
" def __init__(self):\n",
" self.head = None\n",
" self.tail = None\n",
"\n",
" def move_to_front(self, node): # ...\n",
" def append_to_front(self, node): # ...\n",
" def remove_from_tail(self): # ...\n",
"\n",
"\n",
"class Cache(object):\n",
"\n",
" def __init__(self, MAX_SIZE):\n",
" self.MAX_SIZE = MAX_SIZE\n",
" self.size = 0\n",
" self.lookup = {} # key: query, value: node\n",
" self.linked_list = LinkedList()\n",
"\n",
" def get(self, query)\n",
" \"\"\"Get the stored query result from the cache.\n",
" \n",
" Accessing a node updates its position to the front of the LRU list.\n",
" \"\"\"\n",
" node = self.lookup.get(query)\n",
" if node is None:\n",
" return None\n",
" self.linked_list.move_to_front(node)\n",
" return node.results\n",
"\n",
" def set(self, results, query):\n",
" \"\"\"Set the result for the given query key in the cache.\n",
" \n",
" When updating an entry, updates its position to the front of the LRU list.\n",
" If the entry is new and the cache is at capacity, removes the oldest entry\n",
" before the new entry is added.\n",
" \"\"\"\n",
" node = self.lookup.get(query)\n",
" if node is not None:\n",
" # Key exists in cache, update the value\n",
" node.results = results\n",
" self.linked_list.move_to_front(node)\n",
" else:\n",
" # Key does not exist in cache\n",
" if self.size == self.MAX_SIZE:\n",
" # Remove the oldest entry from the linked list and lookup\n",
" self.lookup.pop(self.linked_list.tail.query, None)\n",
" self.linked_list.remove_from_tail()\n",
" else:\n",
" self.size += 1\n",
" # Add the new key and value\n",
" new_node = Node(results)\n",
" self.linked_list.append_to_front(new_node)\n",
" self.lookup[query] = new_node"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.4.3"
}
},
"nbformat": 4,
"nbformat_minor": 0
}