Skip to content

Added LFU Cache #2151

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 4 commits into from
Jun 25, 2020
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
186 changes: 186 additions & 0 deletions other/lfu_cache.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,186 @@
from typing import Callable, Optional


class DoubleLinkedListNode:
'''
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please change ''' to """ in docstrings

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

autoblack will do this automatically so let's not slow the review process on this.

Double Linked List Node built specifically for LFU Cache
'''

def __init__(self, key: int, val: int):
self.key = key
self.val = val
self.freq = 0
self.next = None
self.prev = None


class DoubleLinkedList:
'''
Double Linked List built specifically for LFU Cache
'''

def __init__(self):
self.head = DoubleLinkedListNode(None, None)
self.rear = DoubleLinkedListNode(None, None)
self.head.next, self.rear.prev = self.rear, self.head

def add(self, node: DoubleLinkedListNode) -> None:
'''
Adds the given node at the head of the list and shifting it to proper position
'''

temp = self.rear.prev

self.rear.prev, node.next = node, self.rear
temp.next, node.prev = node, temp
node.freq += 1
self._position_node(node)

def _position_node(self, node: DoubleLinkedListNode) -> None:
while node.prev.key and node.prev.freq > node.freq:
node1, node2 = node, node.prev
node1.prev, node2.next = node2.prev, node1.prev
node1.next, node2.prev = node2, node1

def remove(self, node: DoubleLinkedListNode) -> DoubleLinkedListNode:
'''
Removes and returns the given node from the list
'''

temp_last, temp_next = node.prev, node.next
node.prev, node.next = None, None
temp_last.next, temp_next.prev = temp_next, temp_last
return node


class LFUCache:
'''
LFU Cache to store a given capacity of data. Can be used as a stand-alone object
or as a function decorator.

>>> cache = LFUCache(2)
Copy link
Member

@l3str4nge l3str4nge Jun 25, 2020

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Could LFUCache take optional cache parameter? Would be easier to test.

 def __init__(self, capacity: int, cache: dict = None):
        self.list = DoubleLinkedList()
        self.capacity = capacity
        self.num_keys = 0
        self.hits = 0
        self.miss = 0
        self.cache = cache or {}

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

That seems like a big change.

>>> cache.set(1, 1)
>>> cache.set(2, 2)
>>> cache.get(1)
1
>>> cache.set(3, 3)
>>> cache.get(2) # None is returned
>>> cache.set(4, 4)
>>> cache.get(1) # None is returned
>>> cache.get(3)
3
>>> cache.get(4)
4
>>> cache
CacheInfo(hits=3, misses=2, capacity=2, current size=2)
>>> @LFUCache.decorator(100)
... def fib(num):
... if num in (1, 2):
... return 1
... return fib(num - 1) + fib(num - 2)

>>> for i in range(1, 101):
... res = fib(i)

>>> fib.cache_info()
CacheInfo(hits=196, misses=100, capacity=100, current size=100)
'''

# class variable to map the decorator functions to their respective instance
decorator_function_to_instance_map = {}

def __init__(self, capacity: int):
self.list = DoubleLinkedList()
self.capacity = capacity
self.num_keys = 0
self.hits = 0
self.miss = 0
self.cache = {}

def __repr__(self) -> str:
'''
Return the details for the cache instance
[hits, misses, capacity, current_size]
'''

return (f'CacheInfo(hits={self.hits}, misses={self.miss}, '
f'capacity={self.capacity}, current size={self.num_keys})')

def __contains__(self, key: int) -> bool:
'''
>>> cache = LFUCache(1)
>>> 1 in cache
False
>>> cache.set(1, 1)
>>> 1 in cache
True
'''
return key in self.cache

def get(self, key: int) -> Optional[int]:
'''
Returns the value for the input key and updates the Double Linked List. Returns
None if key is not present in cache
'''

if key in self.cache:
self.hits += 1
self.list.add(self.list.remove(self.cache[key]))
return self.cache[key].val
self.miss += 1
return None

def set(self, key: int, value: int) -> None:
'''
Sets the value for the input key and updates the Double Linked List
'''

if key not in self.cache:
if self.num_keys >= self.capacity:
key_to_delete = self.list.head.next.key
self.list.remove(self.cache[key_to_delete])
del self.cache[key_to_delete]
self.num_keys -= 1
self.cache[key] = DoubleLinkedListNode(key, value)
self.list.add(self.cache[key])
self.num_keys += 1

else:
node = self.list.remove(self.cache[key])
node.val = value
self.list.add(node)

@staticmethod
def decorator(size: int = 128):
'''
Decorator version of LFU Cache
'''

def cache_decorator_inner(func: Callable):

def cache_decorator_wrapper(*args, **kwargs):
if func not in LFUCache.decorator_function_to_instance_map:
LFUCache.decorator_function_to_instance_map[func] = LFUCache(size)

result = LFUCache.decorator_function_to_instance_map[func].get(args[0])
if result is None:
result = func(*args, **kwargs)
LFUCache.decorator_function_to_instance_map[func].set(
args[0], result
)
return result

def cache_info():
return LFUCache.decorator_function_to_instance_map[func]

cache_decorator_wrapper.cache_info = cache_info

return cache_decorator_wrapper

return cache_decorator_inner


if __name__ == "__main__":
import doctest

doctest.testmod()