From 50fd5e4ada1b482fc754b2d774be99f6a60e3052 Mon Sep 17 00:00:00 2001 From: Tapajyoti Bose Date: Sat, 20 Jun 2020 12:39:59 +0530 Subject: [PATCH 1/4] Added LRU Cache --- other/lru_cache.py | 127 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 127 insertions(+) create mode 100644 other/lru_cache.py diff --git a/other/lru_cache.py b/other/lru_cache.py new file mode 100644 index 000000000000..e8cef0d3f3d0 --- /dev/null +++ b/other/lru_cache.py @@ -0,0 +1,127 @@ +class Double_Linked_List_Node(): + ''' + Double Linked List Node for LRU Cache + ''' + + def __init__(self, key, val): + self.key = key + self.val = val + self.next = None + self.prev = None + + +class Double_Linked_List(): + ''' + Double Linked List for LRU Cache + + Methods: + add: Adds the given node to the end of the list + remove: Removes the given node from the list + ''' + + def __init__(self): + self.head = Double_Linked_List_Node(None, None) + self.rear = Double_Linked_List_Node(None, None) + self.head.next = self.rear + self.rear.prev = self.head + + def add(self, node: Double_Linked_List_Node) -> None: + temp = self.rear.prev + temp.next = node + node.prev = temp + self.rear.prev = node + node.next = self.rear + + def remove(self, node: Double_Linked_List_Node) -> Double_Linked_List_Node: + temp_last = node.prev + temp_next = node.next + node.prev = None + node.next = None + temp_last.next = temp_next + temp_next.prev = temp_last + + return node + + +class Lru_Cache: + ''' + LRU Cache to store a given capacity of data + + Methods: + get: Returns the value for the input key. Raises Value Error if key is + not present in cache + set: Sets the value for the input key + has_key: Checks if the input key is present in cache + + >>> cache = Lru_Cache(2) + + >>> cache.set(1, 1) + + >>> cache.set(2, 2) + + >>> cache.get(1) + 1 + + >>> cache.set(3, 3) + + >>> cache.get(2) + Traceback (most recent call last): + ... + ValueError: Key '2' not found in cache + + >>> cache.set(4, 4) + + >>> cache.get(1) + Traceback (most recent call last): + ... + ValueError: Key '1' not found in cache + + >>> cache.get(3) + 3 + + >>> cache.get(4) + 4 + + >>> cache.has_key(1) + False + + >>> cache.has_key(4) + True + ''' + + def __init__(self, capacity): + self.list = Double_Linked_List() + self.capacity = capacity + self.num_keys = 0 + self.cache = {} + + def get(self, key: int) -> int: + if key in self.cache: + self.list.add(self.list.remove(self.cache[key])) + return self.cache[key].val + raise ValueError(f"Key '{key}' not found in cache") + + def set(self, key: int, value: int) -> None: + if key not in self.cache: + if self.num_keys >= self.capacity: + key_to_delete = self.list.head.next.key + self.list.remove(self.cache[key_to_delete]) + del self.cache[key_to_delete] + self.num_keys -= 1 + self.cache[key] = Double_Linked_List_Node(key, value) + self.list.add(self.cache[key]) + self.num_keys += 1 + + else: + node = self.list.remove(self.cache[key]) + node.val = value + self.list.add(node) + + def has_key(self, key: int) -> bool: + return key in self.cache + + +if __name__ == "__main__": + import doctest + + doctest.testmod() From d216b9aaeea913ef5fe3813dde82f7c8f3ee87da Mon Sep 17 00:00:00 2001 From: Tapajyoti Bose Date: Sat, 20 Jun 2020 15:08:53 +0530 Subject: [PATCH 2/4] Optimized the program --- other/lru_cache.py | 48 +++++++++++++++++++++++----------------------- 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/other/lru_cache.py b/other/lru_cache.py index e8cef0d3f3d0..2e5403d9fb2d 100644 --- a/other/lru_cache.py +++ b/other/lru_cache.py @@ -1,6 +1,6 @@ class Double_Linked_List_Node(): ''' - Double Linked List Node for LRU Cache + Double Linked List Node built specifically for LRU Cache ''' def __init__(self, key, val): @@ -12,33 +12,29 @@ def __init__(self, key, val): class Double_Linked_List(): ''' - Double Linked List for LRU Cache - - Methods: - add: Adds the given node to the end of the list - remove: Removes the given node from the list + Double Linked List built specifically for LRU Cache ''' def __init__(self): self.head = Double_Linked_List_Node(None, None) self.rear = Double_Linked_List_Node(None, None) - self.head.next = self.rear - self.rear.prev = self.head + self.head.next, self.rear.prev = self.rear, self.head def add(self, node: Double_Linked_List_Node) -> None: + ''' + Adds the given node to the end of the list (before rear) + ''' temp = self.rear.prev - temp.next = node - node.prev = temp - self.rear.prev = node - node.next = self.rear + temp.next, node.prev = node, temp + self.rear.prev, node.next = node, self.rear def remove(self, node: Double_Linked_List_Node) -> Double_Linked_List_Node: - temp_last = node.prev - temp_next = node.next - node.prev = None - node.next = None - temp_last.next = temp_next - temp_next.prev = temp_last + ''' + Removes and returns the given node from the list + ''' + temp_last, temp_next = node.prev, node.next + node.prev, node.next = None, None + temp_last.next, temp_next.prev = temp_next, temp_last return node @@ -47,12 +43,6 @@ class Lru_Cache: ''' LRU Cache to store a given capacity of data - Methods: - get: Returns the value for the input key. Raises Value Error if key is - not present in cache - set: Sets the value for the input key - has_key: Checks if the input key is present in cache - >>> cache = Lru_Cache(2) >>> cache.set(1, 1) @@ -96,12 +86,19 @@ def __init__(self, capacity): self.cache = {} def get(self, key: int) -> int: + ''' + Returns the value for the input key and updates the Double Linked List. Raises + Value Error if key is not present in cache + ''' if key in self.cache: self.list.add(self.list.remove(self.cache[key])) return self.cache[key].val raise ValueError(f"Key '{key}' not found in cache") def set(self, key: int, value: int) -> None: + ''' + Sets the value for the input key and updates the Double Linked List + ''' if key not in self.cache: if self.num_keys >= self.capacity: key_to_delete = self.list.head.next.key @@ -118,6 +115,9 @@ def set(self, key: int, value: int) -> None: self.list.add(node) def has_key(self, key: int) -> bool: + ''' + Checks if the input key is present in cache + ''' return key in self.cache From add1749dd8d17f7e339f498f8c0da47b897e5c8f Mon Sep 17 00:00:00 2001 From: Tapajyoti Bose Date: Sun, 21 Jun 2020 12:01:17 +0530 Subject: [PATCH 3/4] Added Cache as Decorator + Implemented suggestions --- other/lru_cache.py | 117 ++++++++++++++++++++++++++++++++------------- 1 file changed, 85 insertions(+), 32 deletions(-) diff --git a/other/lru_cache.py b/other/lru_cache.py index 2e5403d9fb2d..df25bd19f96a 100644 --- a/other/lru_cache.py +++ b/other/lru_cache.py @@ -1,37 +1,42 @@ -class Double_Linked_List_Node(): +from typing import Optional, Callable + + +class DoubleLinkedListNode: ''' Double Linked List Node built specifically for LRU Cache ''' - def __init__(self, key, val): + def __init__(self, key: int, val: int): self.key = key self.val = val self.next = None self.prev = None -class Double_Linked_List(): +class DoubleLinkedList: ''' Double Linked List built specifically for LRU Cache ''' def __init__(self): - self.head = Double_Linked_List_Node(None, None) - self.rear = Double_Linked_List_Node(None, None) + self.head = DoubleLinkedListNode(None, None) + self.rear = DoubleLinkedListNode(None, None) self.head.next, self.rear.prev = self.rear, self.head - def add(self, node: Double_Linked_List_Node) -> None: + def add(self, node: DoubleLinkedListNode) -> None: ''' Adds the given node to the end of the list (before rear) ''' + temp = self.rear.prev temp.next, node.prev = node, temp self.rear.prev, node.next = node, self.rear - def remove(self, node: Double_Linked_List_Node) -> Double_Linked_List_Node: + def remove(self, node: DoubleLinkedListNode) -> DoubleLinkedListNode: ''' Removes and returns the given node from the list ''' + temp_last, temp_next = node.prev, node.next node.prev, node.next = None, None temp_last.next, temp_next.prev = temp_next, temp_last @@ -39,11 +44,12 @@ def remove(self, node: Double_Linked_List_Node) -> Double_Linked_List_Node: return node -class Lru_Cache: +class LruCache: ''' - LRU Cache to store a given capacity of data + LRU Cache to store a given capacity of data. Can be used as a stand-alone object + or as a function decorator. - >>> cache = Lru_Cache(2) + >>> cache = LruCache(2) >>> cache.set(1, 1) @@ -54,17 +60,11 @@ class Lru_Cache: >>> cache.set(3, 3) - >>> cache.get(2) - Traceback (most recent call last): - ... - ValueError: Key '2' not found in cache + >>> cache.get(2) # None returned >>> cache.set(4, 4) - >>> cache.get(1) - Traceback (most recent call last): - ... - ValueError: Key '1' not found in cache + >>> cache.get(1) # None returned >>> cache.get(3) 3 @@ -72,40 +72,58 @@ class Lru_Cache: >>> cache.get(4) 4 - >>> cache.has_key(1) - False + >>> cache.cache_info() + 'CacheInfo(hits=3, misses=2, capacity=2, current size=2)' + + >>> @LruCache.decorator(100) + ... def fib(num): + ... if num in (1, 2): + ... return 1 + ... return fib(num - 1) + fib(num - 2) - >>> cache.has_key(4) - True + >>> for i in range(1, 100): + ... res = fib(i) + + >>> fib.cache_info() + 'CacheInfo(hits=194, misses=99, capacity=100, current size=99)' ''' - def __init__(self, capacity): - self.list = Double_Linked_List() + # class variable to map the decorator functions to their respective instance + decorator_function_to_instance_map = {} + + def __init__(self, capacity: int): + self.list = DoubleLinkedList() self.capacity = capacity self.num_keys = 0 + self.hits = 0 + self.miss = 0 self.cache = {} - def get(self, key: int) -> int: + def get(self, key: int) -> Optional[int]: ''' - Returns the value for the input key and updates the Double Linked List. Raises - Value Error if key is not present in cache + Returns the value for the input key and updates the Double Linked List. Returns + None if key is not present in cache ''' + if key in self.cache: + self.hits += 1 self.list.add(self.list.remove(self.cache[key])) return self.cache[key].val - raise ValueError(f"Key '{key}' not found in cache") + self.miss += 1 + return None def set(self, key: int, value: int) -> None: ''' Sets the value for the input key and updates the Double Linked List ''' + if key not in self.cache: if self.num_keys >= self.capacity: key_to_delete = self.list.head.next.key self.list.remove(self.cache[key_to_delete]) del self.cache[key_to_delete] self.num_keys -= 1 - self.cache[key] = Double_Linked_List_Node(key, value) + self.cache[key] = DoubleLinkedListNode(key, value) self.list.add(self.cache[key]) self.num_keys += 1 @@ -114,11 +132,46 @@ def set(self, key: int, value: int) -> None: node.val = value self.list.add(node) - def has_key(self, key: int) -> bool: + def cache_info(self) -> str: ''' - Checks if the input key is present in cache + Returns the details for the cache instance + [hits, misses, capacity, current size] ''' - return key in self.cache + + return f'CacheInfo(hits={self.hits}, misses={self.miss}, \ +capacity={self.capacity}, current size={self.num_keys})' + + @staticmethod + def decorator(size: int = 128): + ''' + Decorator version of LRU Cache + ''' + + def cache_decorator_inner(func: Callable): + + def cache_decorator_wrapper(*args, **kwargs): + if func not in LruCache.decorator_function_to_instance_map: + LruCache.decorator_function_to_instance_map[func] = LruCache(size) + + result = LruCache.decorator_function_to_instance_map[func].get(args[0]) + + if result is not None: + return result + + result = func(*args, **kwargs) + LruCache.decorator_function_to_instance_map[func].set(args[0], result) + return result + + def cache_info(): + if func not in LruCache.decorator_function_to_instance_map: + return "Cache for function not initialized" + return LruCache.decorator_function_to_instance_map[func].cache_info() + + cache_decorator_wrapper.cache_info = cache_info + + return cache_decorator_wrapper + + return cache_decorator_inner if __name__ == "__main__": From 71d366170a15f1c14d7c3cbfab44b904efd75507 Mon Sep 17 00:00:00 2001 From: Tapajyoti Bose Date: Sun, 21 Jun 2020 15:24:15 +0530 Subject: [PATCH 4/4] Implemented suggestions --- other/lru_cache.py | 70 +++++++++++++++++++++++++++------------------- 1 file changed, 41 insertions(+), 29 deletions(-) diff --git a/other/lru_cache.py b/other/lru_cache.py index df25bd19f96a..7b5d16be66e5 100644 --- a/other/lru_cache.py +++ b/other/lru_cache.py @@ -1,4 +1,4 @@ -from typing import Optional, Callable +from typing import Callable, Optional class DoubleLinkedListNode: @@ -44,12 +44,12 @@ def remove(self, node: DoubleLinkedListNode) -> DoubleLinkedListNode: return node -class LruCache: +class LRUCache: ''' LRU Cache to store a given capacity of data. Can be used as a stand-alone object or as a function decorator. - >>> cache = LruCache(2) + >>> cache = LRUCache(2) >>> cache.set(1, 1) @@ -72,10 +72,10 @@ class LruCache: >>> cache.get(4) 4 - >>> cache.cache_info() - 'CacheInfo(hits=3, misses=2, capacity=2, current size=2)' + >>> cache + CacheInfo(hits=3, misses=2, capacity=2, current size=2) - >>> @LruCache.decorator(100) + >>> @LRUCache.decorator(100) ... def fib(num): ... if num in (1, 2): ... return 1 @@ -85,7 +85,7 @@ class LruCache: ... res = fib(i) >>> fib.cache_info() - 'CacheInfo(hits=194, misses=99, capacity=100, current size=99)' + CacheInfo(hits=194, misses=99, capacity=100, current size=99) ''' # class variable to map the decorator functions to their respective instance @@ -99,6 +99,30 @@ def __init__(self, capacity: int): self.miss = 0 self.cache = {} + def __repr__(self) -> str: + ''' + Return the details for the cache instance + [hits, misses, capacity, current_size] + ''' + + return (f'CacheInfo(hits={self.hits}, misses={self.miss}, ' + f'capacity={self.capacity}, current size={self.num_keys})') + + def __contains__(self, key: int) -> bool: + ''' + >>> cache = LRUCache(1) + + >>> 1 in cache + False + + >>> cache.set(1, 1) + + >>> 1 in cache + True + ''' + + return key in self.cache + def get(self, key: int) -> Optional[int]: ''' Returns the value for the input key and updates the Double Linked List. Returns @@ -132,15 +156,6 @@ def set(self, key: int, value: int) -> None: node.val = value self.list.add(node) - def cache_info(self) -> str: - ''' - Returns the details for the cache instance - [hits, misses, capacity, current size] - ''' - - return f'CacheInfo(hits={self.hits}, misses={self.miss}, \ -capacity={self.capacity}, current size={self.num_keys})' - @staticmethod def decorator(size: int = 128): ''' @@ -150,22 +165,19 @@ def decorator(size: int = 128): def cache_decorator_inner(func: Callable): def cache_decorator_wrapper(*args, **kwargs): - if func not in LruCache.decorator_function_to_instance_map: - LruCache.decorator_function_to_instance_map[func] = LruCache(size) - - result = LruCache.decorator_function_to_instance_map[func].get(args[0]) - - if result is not None: - return result - - result = func(*args, **kwargs) - LruCache.decorator_function_to_instance_map[func].set(args[0], result) + if func not in LRUCache.decorator_function_to_instance_map: + LRUCache.decorator_function_to_instance_map[func] = LRUCache(size) + + result = LRUCache.decorator_function_to_instance_map[func].get(args[0]) + if result is None: + result = func(*args, **kwargs) + LRUCache.decorator_function_to_instance_map[func].set( + args[0], result + ) return result def cache_info(): - if func not in LruCache.decorator_function_to_instance_map: - return "Cache for function not initialized" - return LruCache.decorator_function_to_instance_map[func].cache_info() + return LRUCache.decorator_function_to_instance_map[func] cache_decorator_wrapper.cache_info = cache_info