diff --git a/DIRECTORY.md b/DIRECTORY.md index 984744ad7800..f35f3906bff6 100644 --- a/DIRECTORY.md +++ b/DIRECTORY.md @@ -309,6 +309,7 @@ * [Scoring Functions](https://github.com/TheAlgorithms/Python/blob/master/machine_learning/scoring_functions.py) * [Sequential Minimum Optimization](https://github.com/TheAlgorithms/Python/blob/master/machine_learning/sequential_minimum_optimization.py) * [Support Vector Machines](https://github.com/TheAlgorithms/Python/blob/master/machine_learning/support_vector_machines.py) + * [Word Frequency Functions](https://github.com/TheAlgorithms/Python/blob/master/machine_learning/word_frequency_functions.py) ## Maths * [3N Plus 1](https://github.com/TheAlgorithms/Python/blob/master/maths/3n_plus_1.py) @@ -442,7 +443,9 @@ * [Integeration By Simpson Approx](https://github.com/TheAlgorithms/Python/blob/master/other/integeration_by_simpson_approx.py) * [Largest Subarray Sum](https://github.com/TheAlgorithms/Python/blob/master/other/largest_subarray_sum.py) * [Least Recently Used](https://github.com/TheAlgorithms/Python/blob/master/other/least_recently_used.py) + * [Lfu Cache](https://github.com/TheAlgorithms/Python/blob/master/other/lfu_cache.py) * [Linear Congruential Generator](https://github.com/TheAlgorithms/Python/blob/master/other/linear_congruential_generator.py) + * [Lru Cache](https://github.com/TheAlgorithms/Python/blob/master/other/lru_cache.py) * [Magicdiamondpattern](https://github.com/TheAlgorithms/Python/blob/master/other/magicdiamondpattern.py) * [Markov Chain](https://github.com/TheAlgorithms/Python/blob/master/other/markov_chain.py) * [Nested Brackets](https://github.com/TheAlgorithms/Python/blob/master/other/nested_brackets.py) @@ -566,6 +569,8 @@ * [Sol1](https://github.com/TheAlgorithms/Python/blob/master/project_euler/problem_40/sol1.py) * Problem 42 * [Solution42](https://github.com/TheAlgorithms/Python/blob/master/project_euler/problem_42/solution42.py) + * Problem 47 + * [Sol1](https://github.com/TheAlgorithms/Python/blob/master/project_euler/problem_47/sol1.py) * Problem 48 * [Sol1](https://github.com/TheAlgorithms/Python/blob/master/project_euler/problem_48/sol1.py) * Problem 52 diff --git a/data_structures/binary_tree/avl_tree.py b/data_structures/binary_tree/avl_tree.py index 71dede2ccacc..c6a45f1cbeb7 100644 --- a/data_structures/binary_tree/avl_tree.py +++ b/data_structures/binary_tree/avl_tree.py @@ -204,14 +204,16 @@ def del_node(root, data): if root is None: return root if get_height(root.get_right()) - get_height(root.get_left()) == 2: - if get_height(root.get_right().get_right()) > \ - get_height(root.get_right().get_left()): + if get_height(root.get_right().get_right()) > get_height( + root.get_right().get_left() + ): root = left_rotation(root) else: root = rl_rotation(root) elif get_height(root.get_right()) - get_height(root.get_left()) == -2: - if get_height(root.get_left().get_left()) > \ - get_height(root.get_left().get_right()): + if get_height(root.get_left().get_left()) > get_height( + root.get_left().get_right() + ): root = right_rotation(root) else: root = lr_rotation(root) @@ -253,6 +255,7 @@ class AVLtree: 2 * ************************************* """ + def __init__(self): self.root = None @@ -307,6 +310,7 @@ def __str__(self): # a level traversale, gives a more intuitive look on the tre def _test(): import doctest + doctest.testmod() diff --git a/machine_learning/word_frequency_functions.py b/machine_learning/word_frequency_functions.py index a105e30f5d3b..09c6d269ef0c 100644 --- a/machine_learning/word_frequency_functions.py +++ b/machine_learning/word_frequency_functions.py @@ -40,7 +40,7 @@ """ -def term_frequency(term : str, document : str) -> int: +def term_frequency(term: str, document: str) -> int: """ Return the number of times a term occurs within a given document. @@ -58,9 +58,7 @@ def term_frequency(term : str, document : str) -> int: str.maketrans("", "", string.punctuation) ).replace("\n", "") tokenize_document = document_without_punctuation.split(" ") # word tokenization - return len( - [word for word in tokenize_document if word.lower() == term.lower()] - ) + return len([word for word in tokenize_document if word.lower() == term.lower()]) def document_frequency(term: str, corpus: str) -> int: @@ -77,17 +75,18 @@ def document_frequency(term: str, corpus: str) -> int: the third document in the corpus.") (1, 3) """ - corpus_without_punctuation = corpus.translate( + corpus_without_punctuation = corpus.lower().translate( str.maketrans("", "", string.punctuation) ) # strip all punctuation and replace it with '' - documents = corpus_without_punctuation.split("\n") - lowercase_documents = [document.lower() for document in documents] - return len( - [document for document in lowercase_documents if term.lower() in document] - ), len(documents) + docs = corpus_without_punctuation.split("\n") + term = term.lower() + return ( + len([doc for doc in docs if term in doc]), + len(docs), + ) -def inverse_document_frequency(df : int, N: int) -> float: +def inverse_document_frequency(df: int, N: int) -> float: """ Return an integer denoting the importance of a word. This measure of importance is @@ -116,7 +115,7 @@ def inverse_document_frequency(df : int, N: int) -> float: return round(log10(N / df), 3) -def tf_idf(tf : int, idf: int) -> float: +def tf_idf(tf: int, idf: int) -> float: """ Combine the term frequency and inverse document frequency functions to diff --git a/other/lfu_cache.py b/other/lfu_cache.py index 0f128646d7a2..40268242f564 100644 --- a/other/lfu_cache.py +++ b/other/lfu_cache.py @@ -2,9 +2,9 @@ class DoubleLinkedListNode: - ''' + """ Double Linked List Node built specifically for LFU Cache - ''' + """ def __init__(self, key: int, val: int): self.key = key @@ -15,9 +15,9 @@ def __init__(self, key: int, val: int): class DoubleLinkedList: - ''' + """ Double Linked List built specifically for LFU Cache - ''' + """ def __init__(self): self.head = DoubleLinkedListNode(None, None) @@ -25,9 +25,9 @@ def __init__(self): self.head.next, self.rear.prev = self.rear, self.head def add(self, node: DoubleLinkedListNode) -> None: - ''' + """ Adds the given node at the head of the list and shifting it to proper position - ''' + """ temp = self.rear.prev @@ -43,9 +43,9 @@ def _position_node(self, node: DoubleLinkedListNode) -> None: node1.next, node2.prev = node2, node1 def remove(self, node: DoubleLinkedListNode) -> DoubleLinkedListNode: - ''' + """ Removes and returns the given node from the list - ''' + """ temp_last, temp_next = node.prev, node.next node.prev, node.next = None, None @@ -54,7 +54,7 @@ def remove(self, node: DoubleLinkedListNode) -> DoubleLinkedListNode: class LFUCache: - ''' + """ LFU Cache to store a given capacity of data. Can be used as a stand-alone object or as a function decorator. @@ -72,7 +72,7 @@ class LFUCache: >>> cache.get(4) 4 >>> cache - CacheInfo(hits=3, misses=2, capacity=2, current size=2) + CacheInfo(hits=3, misses=2, capacity=2, current_size=2) >>> @LFUCache.decorator(100) ... def fib(num): ... if num in (1, 2): @@ -83,8 +83,8 @@ class LFUCache: ... res = fib(i) >>> fib.cache_info() - CacheInfo(hits=196, misses=100, capacity=100, current size=100) - ''' + CacheInfo(hits=196, misses=100, capacity=100, current_size=100) + """ # class variable to map the decorator functions to their respective instance decorator_function_to_instance_map = {} @@ -98,30 +98,32 @@ def __init__(self, capacity: int): self.cache = {} def __repr__(self) -> str: - ''' + """ Return the details for the cache instance [hits, misses, capacity, current_size] - ''' + """ - return (f'CacheInfo(hits={self.hits}, misses={self.miss}, ' - f'capacity={self.capacity}, current size={self.num_keys})') + return ( + f"CacheInfo(hits={self.hits}, misses={self.miss}, " + f"capacity={self.capacity}, current_size={self.num_keys})" + ) def __contains__(self, key: int) -> bool: - ''' + """ >>> cache = LFUCache(1) >>> 1 in cache False >>> cache.set(1, 1) >>> 1 in cache True - ''' + """ return key in self.cache def get(self, key: int) -> Optional[int]: - ''' + """ Returns the value for the input key and updates the Double Linked List. Returns None if key is not present in cache - ''' + """ if key in self.cache: self.hits += 1 @@ -131,9 +133,9 @@ def get(self, key: int) -> Optional[int]: return None def set(self, key: int, value: int) -> None: - ''' + """ Sets the value for the input key and updates the Double Linked List - ''' + """ if key not in self.cache: if self.num_keys >= self.capacity: @@ -152,12 +154,11 @@ def set(self, key: int, value: int) -> None: @staticmethod def decorator(size: int = 128): - ''' + """ Decorator version of LFU Cache - ''' + """ def cache_decorator_inner(func: Callable): - def cache_decorator_wrapper(*args, **kwargs): if func not in LFUCache.decorator_function_to_instance_map: LFUCache.decorator_function_to_instance_map[func] = LFUCache(size) diff --git a/other/lru_cache.py b/other/lru_cache.py index 7b5d16be66e5..2a9d7e49b279 100644 --- a/other/lru_cache.py +++ b/other/lru_cache.py @@ -2,9 +2,9 @@ class DoubleLinkedListNode: - ''' + """ Double Linked List Node built specifically for LRU Cache - ''' + """ def __init__(self, key: int, val: int): self.key = key @@ -14,9 +14,9 @@ def __init__(self, key: int, val: int): class DoubleLinkedList: - ''' + """ Double Linked List built specifically for LRU Cache - ''' + """ def __init__(self): self.head = DoubleLinkedListNode(None, None) @@ -24,18 +24,18 @@ def __init__(self): self.head.next, self.rear.prev = self.rear, self.head def add(self, node: DoubleLinkedListNode) -> None: - ''' + """ Adds the given node to the end of the list (before rear) - ''' + """ temp = self.rear.prev temp.next, node.prev = node, temp self.rear.prev, node.next = node, self.rear def remove(self, node: DoubleLinkedListNode) -> DoubleLinkedListNode: - ''' + """ Removes and returns the given node from the list - ''' + """ temp_last, temp_next = node.prev, node.next node.prev, node.next = None, None @@ -45,7 +45,7 @@ def remove(self, node: DoubleLinkedListNode) -> DoubleLinkedListNode: class LRUCache: - ''' + """ LRU Cache to store a given capacity of data. Can be used as a stand-alone object or as a function decorator. @@ -86,7 +86,7 @@ class LRUCache: >>> fib.cache_info() CacheInfo(hits=194, misses=99, capacity=100, current size=99) - ''' + """ # class variable to map the decorator functions to their respective instance decorator_function_to_instance_map = {} @@ -100,16 +100,18 @@ def __init__(self, capacity: int): self.cache = {} def __repr__(self) -> str: - ''' + """ Return the details for the cache instance [hits, misses, capacity, current_size] - ''' + """ - return (f'CacheInfo(hits={self.hits}, misses={self.miss}, ' - f'capacity={self.capacity}, current size={self.num_keys})') + return ( + f"CacheInfo(hits={self.hits}, misses={self.miss}, " + f"capacity={self.capacity}, current size={self.num_keys})" + ) def __contains__(self, key: int) -> bool: - ''' + """ >>> cache = LRUCache(1) >>> 1 in cache @@ -119,15 +121,15 @@ def __contains__(self, key: int) -> bool: >>> 1 in cache True - ''' + """ return key in self.cache def get(self, key: int) -> Optional[int]: - ''' + """ Returns the value for the input key and updates the Double Linked List. Returns None if key is not present in cache - ''' + """ if key in self.cache: self.hits += 1 @@ -137,9 +139,9 @@ def get(self, key: int) -> Optional[int]: return None def set(self, key: int, value: int) -> None: - ''' + """ Sets the value for the input key and updates the Double Linked List - ''' + """ if key not in self.cache: if self.num_keys >= self.capacity: @@ -158,12 +160,11 @@ def set(self, key: int, value: int) -> None: @staticmethod def decorator(size: int = 128): - ''' + """ Decorator version of LRU Cache - ''' + """ def cache_decorator_inner(func: Callable): - def cache_decorator_wrapper(*args, **kwargs): if func not in LRUCache.decorator_function_to_instance_map: LRUCache.decorator_function_to_instance_map[func] = LRUCache(size) diff --git a/project_euler/problem_47/__init__.py b/project_euler/problem_47/__init__.py index 8b137891791f..e69de29bb2d1 100644 --- a/project_euler/problem_47/__init__.py +++ b/project_euler/problem_47/__init__.py @@ -1 +0,0 @@ - diff --git a/project_euler/problem_47/sol1.py b/project_euler/problem_47/sol1.py index fab8ffde9052..1287e0d9e107 100644 --- a/project_euler/problem_47/sol1.py +++ b/project_euler/problem_47/sol1.py @@ -28,9 +28,9 @@ def unique_prime_factors(n: int) -> set: not the order in which it is produced. >>> sorted(set(unique_prime_factors(14))) [2, 7] - >>> set(sorted(unique_prime_factors(644))) + >>> sorted(set(unique_prime_factors(644))) [2, 7, 23] - >>> set(sorted(unique_prime_factors(646))) + >>> sorted(set(unique_prime_factors(646))) [2, 17, 19] """ i = 2 @@ -64,7 +64,7 @@ def equality(iterable: list) -> bool: >>> equality([2, 2, 2, 2]) True >>> equality([1, 2, 3, 2, 1]) - True + False """ return len(set(iterable)) in (0, 1)