From a3b2bf2df93fdb745072f9ddcff4cd90111971ce Mon Sep 17 00:00:00 2001 From: Ashish Khare Date: Wed, 11 Oct 2023 20:03:06 +0530 Subject: [PATCH 01/89] algo: merge sort --- Algorithms/Sorting Techniques/Merge_Sort.py | 42 +++++++++++++++++++++ 1 file changed, 42 insertions(+) create mode 100644 Algorithms/Sorting Techniques/Merge_Sort.py diff --git a/Algorithms/Sorting Techniques/Merge_Sort.py b/Algorithms/Sorting Techniques/Merge_Sort.py new file mode 100644 index 0000000..e37ddde --- /dev/null +++ b/Algorithms/Sorting Techniques/Merge_Sort.py @@ -0,0 +1,42 @@ +# Merge sort is a highly efficient and stable sorting algorithm +# that follows the divide-and-conquer approach. It works by repeatedly +# dividing an unsorted list into two halves until individual elements +# are isolated, sorts them, and then merges them back together to form +# a fully sorted list. + +# For example, consider the list [38, 27, 43, 3, 9, 82, 10]. It's divided into two halves, sorted, and then merged: + +# 1. Divide: [38, 27, 43, 3, 9, 82, 10] → [38, 27, 43] and [3, 9, 82, 10] + +# 2. Conquer (Sort): Sort the sublists: [27, 38, 43] and [3, 9, 10, 82] + +# 3. Merge: Merge the sorted sublists: [27, 38, 43] and [3, 9, 10, 82] → [3, 9, 10, 27, 38, 43, 82] + +# Merge sort has a consistent time complexity of O(n log n), making it +# ideal for large datasets. However, it requires additional memory for +# merging, which can be a drawback in memory-constrained scenarios. + +def merge_sort(array): + if len(array) <= 1: + return array + + mid = len(array) // 2 + + left = merge_sort(array[:mid]) + right = merge_sort(array[mid:]) + + sorted_array = [] + i, j = 0, 0 + + while i < len(left) and j < len(right): + if left[i] < right[j]: + sorted_array.append(left[i]) + i += 1 + else: + sorted_array.append(right[j]) + j += 1 + + sorted_array.extend(left[i:]) + sorted_array.extend(right[j:]) + + return sorted_array \ No newline at end of file From 232a989ffe8ac49cda4267d003c8ae57047e56e0 Mon Sep 17 00:00:00 2001 From: Astha Tripathi <79215705+Astha369@users.noreply.github.com> Date: Wed, 11 Oct 2023 22:04:46 +0530 Subject: [PATCH 02/89] Create Bucket_Sort.py --- Codes/Sorting Techniques/Bucket_Sort.py | 29 +++++++++++++++++++++++++ 1 file changed, 29 insertions(+) create mode 100644 Codes/Sorting Techniques/Bucket_Sort.py diff --git a/Codes/Sorting Techniques/Bucket_Sort.py b/Codes/Sorting Techniques/Bucket_Sort.py new file mode 100644 index 0000000..d901d0c --- /dev/null +++ b/Codes/Sorting Techniques/Bucket_Sort.py @@ -0,0 +1,29 @@ +# Bucket Sort in Python + +def bucketSort(array): + bucket = [] + + # Create empty buckets + for i in range(len(array)): + bucket.append([]) + + for j in array: + index_b = int(10 * j) + bucket[index_b].append(j) + + # Sort the elements of each bucket + for i in range(len(array)): + bucket[i] = sorted(bucket[i]) + + # Get the sorted elements + k = 0 + for i in range(len(array)): + for j in range(len(bucket[i])): + array[k] = bucket[i][j] + k += 1 + return array + + +array = [.42, .32, .33, .52, .37, .47, .51] +print("Sorted Array in descending order is") +print(bucketSort(array)) From 5e65b37b7e0eb9271eb8719f4a7eb53b43c9ea9f Mon Sep 17 00:00:00 2001 From: Prateek Hebsur <54493047+Villain45@users.noreply.github.com> Date: Wed, 11 Oct 2023 22:54:43 +0530 Subject: [PATCH 03/89] Added Cycle Sort Technique --- Codes/Sorting Techniques/Cycle Sort.py | 39 ++++++++++++++++++++++++++ 1 file changed, 39 insertions(+) create mode 100644 Codes/Sorting Techniques/Cycle Sort.py diff --git a/Codes/Sorting Techniques/Cycle Sort.py b/Codes/Sorting Techniques/Cycle Sort.py new file mode 100644 index 0000000..484aff6 --- /dev/null +++ b/Codes/Sorting Techniques/Cycle Sort.py @@ -0,0 +1,39 @@ +def cycleSort(arr): + for idx in range(0,len(arr)-1): + cur_ele = arr[idx] + pos = idx + + # Find the pos where the element should be inserted + for j in range(idx+1,len(arr)): + if arr[j] < cur_ele: + pos += 1 + + # If element is in correct position then move to next cycle + if idx == pos: + continue + + # Finding the exact positions if any duplicates + while cur_ele == arr[pos]: + pos+=1 + + # Put the element to it's other position by swapping + arr[pos], cur_ele = cur_ele, arr[pos] + + # Traverse through rest of the cycle to rotate + while pos != idx: + pos = idx + + # Find the pos to insert + for k in range(idx+1,len(arr)): + if arr[k] < cur_ele: + pos += 1 + + # To check out for duplicates + while cur_ele == arr[pos]: + pos += 1 + + arr[pos], cur_ele = cur_ele, arr[pos] + +arr = [5, 2, 2, 7, 8, 9, 5, 1] +cycleSort(arr) +print(arr) From 761ba71a79d5e5c1b32947b2159c35336c296ed3 Mon Sep 17 00:00:00 2001 From: Khushi-Gupta13 <146571682+Khushi-Gupta13@users.noreply.github.com> Date: Wed, 11 Oct 2023 23:41:59 +0530 Subject: [PATCH 04/89] Update README.md made some changes in the description --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 7f7e81f..2a67634 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,13 @@ # Python-Data-Structures -A compilation of Data Structures in Python. It is a collection of Python code examples and implementations of various data structures. This repository aims to provide a comprehensive resource for understanding and utilizing different data structures efficiently in Python. +Here' an amazing repo regarding data structure using python. This is a compilation of python codes examples and implementations of various concepts of data structure. It comprises of various resources for efficient understanding and utilizion of data structure in python. ### Please read our [Contributing Guidelines](CONTRIBUTING.md) before contributing or starting with an issue. -> Make sure you are following all the steps given in the [Guidelines](CONTRIBUTING.md) for faster acceptance of your PR. +> Do not foget to follow these [Guidelines](CONTRIBUTING.md) for faster acceptance of your PR. ## 🪪 License -You may use this project freely at your own risk. See [LICENSE](https://choosealicense.com/licenses/mit/). +To make best use this project do check our [LICENSE](https://choosealicense.com/licenses/mit/). Copyright (c) 2022 Himanshu Agarwal From a6cb33a9cc660be82ab2679f1db3922d05af251a Mon Sep 17 00:00:00 2001 From: Prateek Hebsur <54493047+Villain45@users.noreply.github.com> Date: Thu, 12 Oct 2023 08:38:11 +0530 Subject: [PATCH 05/89] dynamic input implemented --- Codes/Sorting Techniques/Cycle Sort.py | 22 +++++++++++++--------- 1 file changed, 13 insertions(+), 9 deletions(-) diff --git a/Codes/Sorting Techniques/Cycle Sort.py b/Codes/Sorting Techniques/Cycle Sort.py index 484aff6..a4a1877 100644 --- a/Codes/Sorting Techniques/Cycle Sort.py +++ b/Codes/Sorting Techniques/Cycle Sort.py @@ -2,38 +2,42 @@ def cycleSort(arr): for idx in range(0,len(arr)-1): cur_ele = arr[idx] pos = idx - + # Find the pos where the element should be inserted for j in range(idx+1,len(arr)): if arr[j] < cur_ele: pos += 1 - + # If element is in correct position then move to next cycle if idx == pos: continue - + # Finding the exact positions if any duplicates while cur_ele == arr[pos]: pos+=1 - + # Put the element to it's other position by swapping arr[pos], cur_ele = cur_ele, arr[pos] - + # Traverse through rest of the cycle to rotate while pos != idx: pos = idx - + # Find the pos to insert for k in range(idx+1,len(arr)): if arr[k] < cur_ele: pos += 1 - + # To check out for duplicates while cur_ele == arr[pos]: pos += 1 - + arr[pos], cur_ele = cur_ele, arr[pos] -arr = [5, 2, 2, 7, 8, 9, 5, 1] +n=int(input("Enter the size of array to be sorted\n")) +arr = [] +for i in range(0,n): + arr.append(int(input())) cycleSort(arr) +print("Sorted Array is") print(arr) From 4f723702ccff916f9c03694a147e78091c0c4880 Mon Sep 17 00:00:00 2001 From: Astha Tripathi <79215705+Astha369@users.noreply.github.com> Date: Thu, 12 Oct 2023 11:32:02 +0530 Subject: [PATCH 06/89] Update Bucket_Sort.py --- Codes/Sorting Techniques/Bucket_Sort.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Codes/Sorting Techniques/Bucket_Sort.py b/Codes/Sorting Techniques/Bucket_Sort.py index d901d0c..9fd8386 100644 --- a/Codes/Sorting Techniques/Bucket_Sort.py +++ b/Codes/Sorting Techniques/Bucket_Sort.py @@ -23,7 +23,7 @@ def bucketSort(array): k += 1 return array - -array = [.42, .32, .33, .52, .37, .47, .51] +user_input = input("Enter a list of elements to be sorted, separated by spaces: ") +array = [int(x) for x in user_input.split()] print("Sorted Array in descending order is") print(bucketSort(array)) From d1439261c3f2bcf59f02ddbd0e2f7e41fa9f5a43 Mon Sep 17 00:00:00 2001 From: sarthak ganure Date: Thu, 12 Oct 2023 12:47:06 +0530 Subject: [PATCH 07/89] folder structure corrected --- Algorithms/graphs/dfs.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 Algorithms/graphs/dfs.py diff --git a/Algorithms/graphs/dfs.py b/Algorithms/graphs/dfs.py new file mode 100644 index 0000000..fb41911 --- /dev/null +++ b/Algorithms/graphs/dfs.py @@ -0,0 +1,24 @@ +def create_graph(): + graph = {} + n = int(input("Enter the number of nodes: ")) + for i in range(n): + node = str(i) + edges = set(input(f"Enter the edges for node {node} (space-separated): ").split()) + graph[node] = edges + return graph + +def dfs(graph, start, visited=None): + if visited is None: + visited = set() + visited.add(start) + + print(start) + + for next_node in graph[start] - visited: + dfs(graph, next_node, visited) + return visited + +graph = create_graph() +start_node = input("Enter the starting node: ") + +dfs(graph, start_node) From d1129dc1641991fab5b3fa815efae98f3b7beeec Mon Sep 17 00:00:00 2001 From: sarthak ganure Date: Thu, 12 Oct 2023 13:04:44 +0530 Subject: [PATCH 08/89] added dfs in codes section --- {Algorithms => Codes}/graphs/dfs.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename {Algorithms => Codes}/graphs/dfs.py (100%) diff --git a/Algorithms/graphs/dfs.py b/Codes/graphs/dfs.py similarity index 100% rename from Algorithms/graphs/dfs.py rename to Codes/graphs/dfs.py From 3e86016b194718bdb70d7a2c5c461a3886f27195 Mon Sep 17 00:00:00 2001 From: KallindSoni Date: Thu, 12 Oct 2023 13:08:58 +0530 Subject: [PATCH 09/89] added gbfs and astar search algorithms added gbfs and astar search algorithms using manhattan as the heuritstic function. Created a folder called graph for the same --- Codes/Graphs/A_star.py | 133 +++++++++++++++++++++++++++++++++++++++++ Codes/Graphs/GBFS.py | 108 +++++++++++++++++++++++++++++++++ 2 files changed, 241 insertions(+) create mode 100644 Codes/Graphs/A_star.py create mode 100644 Codes/Graphs/GBFS.py diff --git a/Codes/Graphs/A_star.py b/Codes/Graphs/A_star.py new file mode 100644 index 0000000..111755a --- /dev/null +++ b/Codes/Graphs/A_star.py @@ -0,0 +1,133 @@ +from collections import defaultdict + +class Graph: + def __init__(self): + self.graph = defaultdict(list) + + def add_edge(self, u, v): + self.graph[u].append(v) + + def bfs(self, start_node): + visited = [False] * (max(self.graph) + 1) + queue = [] + + queue.append(start_node) + visited[start_node] = True + + while queue: + node = queue.pop(0) + print(node, end=" ") + + for neighbor in self.graph[node]: + if not visited[neighbor]: + queue.append(neighbor) + visited[neighbor] = True + +class Node(): + def __init__(self, state, parent, action, heuristic, cost): + self.state = state + self.parent = parent + self.action = action + self.heuristic = heuristic + self.cost = cost + self.combined = heuristic + cost + +class StackFrontier(): + def __init__(self): + self.frontier = [] + + def add(self, node): + self.frontier.append(node) + + def contains_state(self, state): + return any(node.state == state for node in self.frontier) + + def empty(self): + return len(self.frontier) == 0 + + def remove(self): + if self.empty(): + raise Exception("Empty frontier") + else: + node = self.frontier.pop() + return node + +class QueueFrontier(StackFrontier): + def remove(self): + if self.empty(): + raise Exception("Empty frontier") + else: + node = self.frontier.pop(0) + return node + +class gbfsFrontier(StackFrontier): + def remove(self): + if self.empty(): + raise Exception("Empty Frontier") + else: + node = min(self.frontier, key=lambda x: x.heuristic) + self.frontier.remove(node) + return node + +class a_starFrontier(StackFrontier): + def remove(self): + if self.empty(): + raise Exception("Empty Frontier") + else: + node = min(self.frontier, key=lambda x: x.combined) + self.frontier.remove(node) + return node + +# Create a new class that extends the Graph class for A* search +class AStarGraph(Graph): + def a_star(self, start_node, goal_node): + visited = [False] * (max(self.graph) + 1) + frontier = a_starFrontier() + start_state = Node(start_node, None, None, self.manhattan(start_node, goal_node), 0) + frontier.add(start_state) + + while not frontier.empty(): + node = frontier.remove() + state = node.state + + if state == goal_node: + path = [] + while node.parent is not None: + path.append(node.state) + node = node.parent + path.append(start_node) + path.reverse() + return path + + visited[state] = True + + for neighbor in self.graph[state]: + if not visited[neighbor]: + cost = node.cost + 1 # Assuming unit cost for simplicity + heuristic = self.manhattan(neighbor, goal_node) + neighbor_state = Node(neighbor, node, None, heuristic, cost) + frontier.add(neighbor_state) + + return None + + def manhattan(self, node, goal_node): + x1, y1 = node % 3, node // 3 + x2, y2 = goal_node % 3, goal_node // 3 + return abs(x1 - x2) + abs(y1 - y2) + +# Example usage: +a_star_graph = AStarGraph() +a_star_graph.add_edge(0, 1) +a_star_graph.add_edge(0, 2) +a_star_graph.add_edge(1, 2) +a_star_graph.add_edge(2, 0) +a_star_graph.add_edge(2, 3) +a_star_graph.add_edge(3, 3) + +goal_node = 3 +print("A* search starting from node 2 to reach node 3:") +path = a_star_graph.a_star(2, goal_node) +if path: + print("Path:", path) +else: + print("No path found") diff --git a/Codes/Graphs/GBFS.py b/Codes/Graphs/GBFS.py new file mode 100644 index 0000000..f90e410 --- /dev/null +++ b/Codes/Graphs/GBFS.py @@ -0,0 +1,108 @@ +import sys +import numpy as np +from collections import defaultdict + +class Node(): + def __init__(self, state, parent, action, heuristic): + self.state = state + self.parent = parent + self.action = action + self.heuristic = heuristic + +class StackFrontier(): + def __init__(self): + self.frontier = [] + + def add(self, node): + self.frontier.append(node) + + def contains_state(self, state): + return any(node.state == state for node in self.frontier) + + def empty(self): + return len(self.frontier) == 0 + + def remove(self): + if self.empty(): + raise Exception("empty frontier") + else: + node = self.frontier[-1] + self.frontier = self.frontier[:-1] + return node + +class QueueFrontier(StackFrontier): + def remove(self): + if self.empty(): + raise Exception("empty frontier") + else: + node = self.frontier[0] + self.frontier = self.frontier[1:] + return node + +class gbfsFrontier(StackFrontier): + def remove(self): + if self.empty(): + raise Exception("Empty Frontier") + else: + node = min(self.frontier, key=lambda x: x.heuristic) + self.frontier.remove(node) + return node + +# Create a new class that extends the Graph class +class GBFSGraph: + def __init__(self): + self.graph = defaultdict(list) + + def add_edge(self, u, v): + self.graph[u].append(v) + + def gbfs(self, start_node, goal_node): + visited = [False] * (max(self.graph) + 1) + frontier = gbfsFrontier() + start_state = Node(start_node, None, None, self.manhattan(start_node, goal_node)) + frontier.add(start_state) + + while not frontier.empty(): + node = frontier.remove() + state = node.state + + if state == goal_node: + path = [] + while node.parent is not None: + path.append(node.state) + node = node.parent + path.append(start_node) + path.reverse() + return path + + visited[state] = True + + for neighbor in self.graph[state]: + if not visited[neighbor]: + heuristic = self.manhattan(neighbor, goal_node) + neighbor_state = Node(neighbor, node, None, heuristic) + frontier.add(neighbor_state) + + return None + + def manhattan(self, node, goal_node): + x1, y1 = node % 3, node // 3 + x2, y2 = goal_node % 3, goal_node // 3 + return abs(x1 - x2) + abs(y1 - y2) + +# Example usage: +gbfs_graph = GBFSGraph() +gbfs_graph.add_edge(0, 1) +gbfs_graph.add_edge(0, 2) +gbfs_graph.add_edge(1, 2) +gbfs_graph.add_edge(2, 0) +gbfs_graph.add_edge(2, 3) +gbfs_graph.add_edge(3, 3) + +goal_node = 3 +print("GBFS starting from node 2 to reach node 3:") +path = gbfs_graph.gbfs(2, goal_node) +if path: + print("Path:", path) +else: + print("No path found") From f1303e62771f6b8d5e35ce4727e788e107428993 Mon Sep 17 00:00:00 2001 From: Preyal Ameta Date: Thu, 12 Oct 2023 19:07:52 +0530 Subject: [PATCH 10/89] algo/code: 1) Added BFS traversal in graph. 2) Added Topological sort for Directed Acyclic graph --- Codes/Sorting Techniques/Topological_Sort.py | 54 ++++++++++++++++++++ Codes/graphs/bfs.py | 39 ++++++++++++++ 2 files changed, 93 insertions(+) create mode 100644 Codes/Sorting Techniques/Topological_Sort.py create mode 100644 Codes/graphs/bfs.py diff --git a/Codes/Sorting Techniques/Topological_Sort.py b/Codes/Sorting Techniques/Topological_Sort.py new file mode 100644 index 0000000..0edd3e0 --- /dev/null +++ b/Codes/Sorting Techniques/Topological_Sort.py @@ -0,0 +1,54 @@ +#Python program to print topological sorting of a DAG(Directed Acyclic Graph) +from collections import defaultdict + +#Class to represent a graph +class Graph: + def __init__(self,vertices): + self.graph = defaultdict(list) #dictionary containing adjacency List + self.V = vertices #No. of vertices + + # function to add an edge to graph + def addEdge(self,u,v): + self.graph[u].append(v) + + # A recursive function used by topologicalSort + def topologicalSortUtil(self,v,visited,stack): + + # Mark the current node as visited. + visited[v] = True + + # Recur for all the vertices adjacent to this vertex + for i in self.graph[v]: + if visited[i] == False: + self.topologicalSortUtil(i,visited,stack) + + # Push current vertex to stack which stores result + stack.insert(0,v) + + # The function to do Topological Sort. It uses recursive + # topologicalSortUtil() + def topologicalSort(self): + # Mark all the vertices as not visited + visited = [False]*self.V + stack =[] + + # Call the recursive helper function to store Topological + # Sort starting from all vertices one by one + for i in range(self.V): + if visited[i] == False: + self.topologicalSortUtil(i,visited,stack) + + # Print contents of stack + print (stack) + +g= Graph(6) +g.addEdge(5, 2); +g.addEdge(5, 0); +g.addEdge(4, 0); +g.addEdge(4, 1); +g.addEdge(2, 3); +g.addEdge(3, 1); + +print ("Following is a Topological Sort of the given graph") +g.topologicalSort() + diff --git a/Codes/graphs/bfs.py b/Codes/graphs/bfs.py new file mode 100644 index 0000000..dc2d152 --- /dev/null +++ b/Codes/graphs/bfs.py @@ -0,0 +1,39 @@ +# Create a graph given in the above diagram. +graph = { + 'A': ['B', 'C', 'D'], + 'B': ['A'], + 'C': ['A', 'D'], + 'D': ['A', 'C', 'E'], + 'E': ['D'], +} + +# to print a BFS of a graph +def bfs(node): + + # mark vertices as False means not visited + visited = [False] * (len(graph)) + + # make an empty queue for bfs + queue = [] + + # mark gave node as visited and add it to the queue + visited.append(node) + queue.append(node) + + while queue: + # Remove the front vertex or the vertex at the 0th index from the queue and print that vertex. + v = queue.pop(0) + print(v, end=" ") + + # Get all adjacent nodes of the removed node v from the graph hash table. + # If an adjacent node has not been visited yet, + # then mark it as visited and add it to the queue. + for neigh in graph[v]: + if neigh not in visited: + visited.append(neigh) + queue.append(neigh) + + +# Driver Code +if __name__ == "__main__": + bfs('A') From 2150ce215949bc96a19cca269748ce11f0ae61ae Mon Sep 17 00:00:00 2001 From: stanleyedward Date: Thu, 12 Oct 2023 20:36:51 +0530 Subject: [PATCH 11/89] Add Shell Sort --- Codes/Sorting Techniques/Shell_Sort.py | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 Codes/Sorting Techniques/Shell_Sort.py diff --git a/Codes/Sorting Techniques/Shell_Sort.py b/Codes/Sorting Techniques/Shell_Sort.py new file mode 100644 index 0000000..6200f55 --- /dev/null +++ b/Codes/Sorting Techniques/Shell_Sort.py @@ -0,0 +1,26 @@ +def shell_sort(arr): + n = len(arr) + gap = n // 2 # Initial gap size + + while gap > 0: + for i in range(gap, n): + temp = arr[i] + j = i + + while j >= gap and arr[j - gap] > temp: + arr[j] = arr[j - gap] + j -= gap + + arr[j] = temp + + gap //= 2 # Reduce the gap size + +# Input a list of numbers from the user +user_input = input("Enter a list of numbers separated by spaces: ") +user_list = [int(x) for x in user_input.split()] + +# Apply Shell sort to the user's list +shell_sort(user_list) + +# Display the sorted list +print("Sorted list:", user_list) From d355e4569e84f6bfbfd3a339fe8a29a6089495e0 Mon Sep 17 00:00:00 2001 From: KallindSoni Date: Thu, 12 Oct 2023 22:32:48 +0530 Subject: [PATCH 12/89] added dynamic functionality added user input(dynamic funcionailty ) to both gbfs and astar algorithms --- Codes/Graphs/A_star.py | 48 ++++++++++++++++++++++++------------------ Codes/Graphs/GBFS.py | 27 ++++++++++++++---------- 2 files changed, 43 insertions(+), 32 deletions(-) diff --git a/Codes/Graphs/A_star.py b/Codes/Graphs/A_star.py index 111755a..ae13169 100644 --- a/Codes/Graphs/A_star.py +++ b/Codes/Graphs/A_star.py @@ -1,11 +1,11 @@ from collections import defaultdict class Graph: - def __init__(self): + def __init(self): self.graph = defaultdict(list) - def add_edge(self, u, v): - self.graph[u].append(v) + def add_edge(self, u, v, cost=1): + self.graph[u].append((v, cost)) def bfs(self, start_node): visited = [False] * (max(self.graph) + 1) @@ -19,9 +19,10 @@ def bfs(self, start_node): print(node, end=" ") for neighbor in self.graph[node]: - if not visited[neighbor]: - queue.append(neighbor) - visited[neighbor] = True + neighbor_node, _ = neighbor + if not visited[neighbor_node]: + queue.append(neighbor_node) + visited[neighbor_node] = True class Node(): def __init__(self, state, parent, action, heuristic, cost): @@ -78,7 +79,6 @@ def remove(self): self.frontier.remove(node) return node -# Create a new class that extends the Graph class for A* search class AStarGraph(Graph): def a_star(self, start_node, goal_node): visited = [False] * (max(self.graph) + 1) @@ -101,11 +101,11 @@ def a_star(self, start_node, goal_node): visited[state] = True - for neighbor in self.graph[state]: + for neighbor, cost in self.graph[state]: if not visited[neighbor]: - cost = node.cost + 1 # Assuming unit cost for simplicity + new_cost = node.cost + cost heuristic = self.manhattan(neighbor, goal_node) - neighbor_state = Node(neighbor, node, None, heuristic, cost) + neighbor_state = Node(neighbor, node, None, heuristic, new_cost) frontier.add(neighbor_state) return None @@ -115,18 +115,24 @@ def manhattan(self, node, goal_node): x2, y2 = goal_node % 3, goal_node // 3 return abs(x1 - x2) + abs(y1 - y2) -# Example usage: +# Example usage with user input: a_star_graph = AStarGraph() -a_star_graph.add_edge(0, 1) -a_star_graph.add_edge(0, 2) -a_star_graph.add_edge(1, 2) -a_star_graph.add_edge(2, 0) -a_star_graph.add_edge(2, 3) -a_star_graph.add_edge(3, 3) - -goal_node = 3 -print("A* search starting from node 2 to reach node 3:") -path = a_star_graph.a_star(2, goal_node) + +# Get user input for defining the graph +while True: + u = int(input("Enter edge source node (or -1 to stop): ")) + if u == -1: + break + v = int(input("Enter edge target node: ")) + cost = int(input("Enter edge cost: ")) + a_star_graph.add_edge(u, v, cost) + +# Get user input for the start and goal nodes +start_node = int(input("Enter the start node: ")) +goal_node = int(input("Enter the goal node: ")) + +print(f"A* search starting from node {start_node} to reach node {goal_node}:") +path = a_star_graph.a_star(start_node, goal_node) if path: print("Path:", path) else: diff --git a/Codes/Graphs/GBFS.py b/Codes/Graphs/GBFS.py index f90e410..f569fae 100644 --- a/Codes/Graphs/GBFS.py +++ b/Codes/Graphs/GBFS.py @@ -90,18 +90,23 @@ def manhattan(self, node, goal_node): x2, y2 = goal_node % 3, goal_node // 3 return abs(x1 - x2) + abs(y1 - y2) -# Example usage: +# Example usage with user input: gbfs_graph = GBFSGraph() -gbfs_graph.add_edge(0, 1) -gbfs_graph.add_edge(0, 2) -gbfs_graph.add_edge(1, 2) -gbfs_graph.add_edge(2, 0) -gbfs_graph.add_edge(2, 3) -gbfs_graph.add_edge(3, 3) - -goal_node = 3 -print("GBFS starting from node 2 to reach node 3:") -path = gbfs_graph.gbfs(2, goal_node) + +# Get user input for defining the graph +while True: + u = int(input("Enter edge source node (or -1 to stop): ")) + if u == -1: + break + v = int(input("Enter edge target node: ")) + gbfs_graph.add_edge(u, v) + +# Get user input for the start and goal nodes +start_node = int(input("Enter the start node: ")) +goal_node = int(input("Enter the goal node: ")) + +print(f"GBFS starting from node {start_node} to reach node {goal_node}:") +path = gbfs_graph.gbfs(start_node, goal_node) if path: print("Path:", path) else: From 6907793ace231eec61cf14ecd305bc96f650b787 Mon Sep 17 00:00:00 2001 From: Avdhesh-Varshney <114330097+Avdhesh-Varshney@users.noreply.github.com> Date: Fri, 13 Oct 2023 12:24:54 +0530 Subject: [PATCH 13/89] =?UTF-8?q?Fibonacci=20Searching=20Algorithm=20?= =?UTF-8?q?=F0=9F=98=8E?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../Fibonacci_Searching.py | 61 +++++++++++++++++++ 1 file changed, 61 insertions(+) create mode 100644 Codes/Searching Techniques/Fibonacci_Searching.py diff --git a/Codes/Searching Techniques/Fibonacci_Searching.py b/Codes/Searching Techniques/Fibonacci_Searching.py new file mode 100644 index 0000000..6bf65d9 --- /dev/null +++ b/Codes/Searching Techniques/Fibonacci_Searching.py @@ -0,0 +1,61 @@ +# Fibonacci searching algorithm only works on the sorted array with time complexity O(log(n)). + +# Function to find minimum out of two element +def min(x, y): + return x if x <= y else y + +# Returns the index of x if present, else returns -1 +def fibonacciSearch(array, target, n): + # If target is greater than last element of the array or smaller than first element of the array + if target > array[n-1] or target < array[0]: + return -1 + + # Initialize Fibonacci numbers + fiboMMm2 = 0 # (m-2)'th Fibonacci No. + fiboMMm1 = 1 # (m-1)'th Fibonacci No. + fiboM = fiboMMm2 + fiboMMm1 # m'th Fibonacci + + # fiboM is going to store the smallest Fibonacci Number greater than or equal to n + while fiboM < n: + fiboMMm2, fiboMMm1 = fiboMMm1, fiboM + fiboM = fiboMMm2 + fiboMMm1 + + # Marks the eliminated range from the front + offset = -1 + + # While there are elements to be inspected. + # Note that we compare array[fiboMm2] with target. + # When fiboM becomes 1, fiboMm2 becomes 0 + while fiboM > 1: + # Check if fiboMm2 is a valid location + i = min(offset + fiboMMm2, n - 1) + + # If target is greater than the value at index fiboMm2, cut the subarray array from offset to i + if array[i] < target: + fiboM, fiboMMm1, fiboMMm2 = fiboMMm1, fiboMMm2, fiboM - fiboMMm1 + offset = i + + # If target is greater than the value at index fiboMm2, cut the subarray after i+1 + elif array[i] > target: + fiboM, fiboMMm1, fiboMMm2 = fiboMMm2, fiboMMm1 - fiboMMm2, fiboM - fiboMMm1 + + # Element found, return index + else: + return i + + # Comparing the last element with target + if fiboMMm1 and array[offset + 1] == target: + return offset + 1 + + # Element not found, return -1 + return -1 + +if __name__ == "__main__": + array = [5, 6, 7, 8, 17, 19, 20, 21, 23, 34, 67, 97, 675] + n = len(array) + target = 31 + index = fibonacciSearch(array, target, n) + if index != -1: + print(target, "is present at index:", index) + else: + print(target, "isn't present in the array") From bd9625fbbce2f8241d7805d16b67382e638797e0 Mon Sep 17 00:00:00 2001 From: anshul-2010 Date: Fri, 13 Oct 2023 13:41:32 +0530 Subject: [PATCH 14/89] this fixes issue #44 --- Algorithms/Sorting Techniques/Bucket_Sort.py | 31 ++++++++++++++++++++ 1 file changed, 31 insertions(+) create mode 100644 Algorithms/Sorting Techniques/Bucket_Sort.py diff --git a/Algorithms/Sorting Techniques/Bucket_Sort.py b/Algorithms/Sorting Techniques/Bucket_Sort.py new file mode 100644 index 0000000..77904c2 --- /dev/null +++ b/Algorithms/Sorting Techniques/Bucket_Sort.py @@ -0,0 +1,31 @@ +def Insertion_Sort(array): + n = len(array) + for i in range(1,n): + for j in range(i,0,-1): + if(array[j] < array[j-1]): + array[j], array[j-1] = array[j-1], array[j] + else: + break + return array + + +def bucket_sort(array): + bucket_array = [] + slot_number = 10 + for i in range(slot_number): + bucket_array.append([]) + + for j in array: + index_bucket = int(slot_number*j) + bucket_array[index_bucket].append(j) + + for i in range(slot_number): + bucket_array[i] = Insertion_Sort(bucket_array[i]) + + k = 0 + for i in range(slot_number): + for j in range(len(bucket_array[i])): + array[k] = bucket_array[i][j] + k += 1 + + return array \ No newline at end of file From 515a2f2e3f55f23d264832d075aaa42a78b32971 Mon Sep 17 00:00:00 2001 From: Preyal Ameta Date: Fri, 13 Oct 2023 14:03:38 +0530 Subject: [PATCH 15/89] Committed requested changes : Add dynamic user input. --- Codes/Sorting Techniques/Topological_Sort.py | 99 ++++++++++---------- Codes/graphs/bfs.py | 68 ++++++++------ 2 files changed, 88 insertions(+), 79 deletions(-) diff --git a/Codes/Sorting Techniques/Topological_Sort.py b/Codes/Sorting Techniques/Topological_Sort.py index 0edd3e0..7e358fe 100644 --- a/Codes/Sorting Techniques/Topological_Sort.py +++ b/Codes/Sorting Techniques/Topological_Sort.py @@ -1,54 +1,53 @@ -#Python program to print topological sorting of a DAG(Directed Acyclic Graph) from collections import defaultdict -#Class to represent a graph +# Class to represent a graph class Graph: - def __init__(self,vertices): - self.graph = defaultdict(list) #dictionary containing adjacency List - self.V = vertices #No. of vertices - - # function to add an edge to graph - def addEdge(self,u,v): - self.graph[u].append(v) - - # A recursive function used by topologicalSort - def topologicalSortUtil(self,v,visited,stack): - - # Mark the current node as visited. - visited[v] = True - - # Recur for all the vertices adjacent to this vertex - for i in self.graph[v]: - if visited[i] == False: - self.topologicalSortUtil(i,visited,stack) - - # Push current vertex to stack which stores result - stack.insert(0,v) - - # The function to do Topological Sort. It uses recursive - # topologicalSortUtil() - def topologicalSort(self): - # Mark all the vertices as not visited - visited = [False]*self.V - stack =[] - - # Call the recursive helper function to store Topological - # Sort starting from all vertices one by one - for i in range(self.V): - if visited[i] == False: - self.topologicalSortUtil(i,visited,stack) - - # Print contents of stack - print (stack) - -g= Graph(6) -g.addEdge(5, 2); -g.addEdge(5, 0); -g.addEdge(4, 0); -g.addEdge(4, 1); -g.addEdge(2, 3); -g.addEdge(3, 1); - -print ("Following is a Topological Sort of the given graph") -g.topologicalSort() + def __init__(self, vertices): + self.graph = defaultdict(list) # Dictionary containing adjacency List + self.V = vertices # No. of vertices + + # Function to add an edge to the graph + def addEdge(self, u, v): + self.graph[u].append(v) + + # A recursive function used by topologicalSort + def topologicalSortUtil(self, v, visited, stack): + # Mark the current node as visited. + visited[v] = True + + # Recur for all the vertices adjacent to this vertex + for i in self.graph[v]: + if visited[i] == False: + self.topologicalSortUtil(i, visited, stack) + + # Push the current vertex to the stack which stores the result + stack.insert(0, v) + + # The function to do Topological Sort. It uses recursive topologicalSortUtil() + def topologicalSort(self): + # Mark all the vertices as not visited + visited = [False] * self.V + stack = [] + + # Call the recursive helper function to store Topological + # Sort starting from all vertices one by one + for i in range(self.V): + if visited[i] == False: + self.topologicalSortUtil(i, visited, stack) + + # Print the contents of the stack + print("Following is a Topological Sort of the given graph:") + print(stack) + +# Input from the user +num_vertices = int(input("Enter the number of vertices: ")) +num_edges = int(input("Enter the number of edges: ")) + +g = Graph(num_vertices) + +print("Enter the edges :") +for _ in range(num_edges): + u, v = map(int, input().split()) + g.addEdge(u, v) +g.topologicalSort() diff --git a/Codes/graphs/bfs.py b/Codes/graphs/bfs.py index dc2d152..065d887 100644 --- a/Codes/graphs/bfs.py +++ b/Codes/graphs/bfs.py @@ -1,39 +1,49 @@ -# Create a graph given in the above diagram. -graph = { - 'A': ['B', 'C', 'D'], - 'B': ['A'], - 'C': ['A', 'D'], - 'D': ['A', 'C', 'E'], - 'E': ['D'], -} - -# to print a BFS of a graph -def bfs(node): - - # mark vertices as False means not visited - visited = [False] * (len(graph)) - - # make an empty queue for bfs +# Initialize an empty graph +graph = {} + +# Function to add an edge to the graph +def add_edge(graph, u, v): + if u in graph: + graph[u].append(v) + else: + graph[u] = [v] + +# Function to print a BFS of a graph +def bfs(graph, start_node): + if start_node not in graph: + print("Start node not found in the graph.") + return + + # Mark vertices as False means not visited + visited = {node: False for node in graph} + + # Make an empty queue for BFS queue = [] - # mark gave node as visited and add it to the queue - visited.append(node) - queue.append(node) + # Mark the given start node as visited and add it to the queue + visited[start_node] = True + queue.append(start_node) while queue: - # Remove the front vertex or the vertex at the 0th index from the queue and print that vertex. + # Remove the front vertex (or the vertex at the 0th index) from the queue and print it. v = queue.pop(0) print(v, end=" ") - # Get all adjacent nodes of the removed node v from the graph hash table. - # If an adjacent node has not been visited yet, - # then mark it as visited and add it to the queue. + # Get all adjacent nodes of the removed node v from the graph dictionary. + # If an adjacent node has not been visited yet, mark it as visited and add it to the queue. for neigh in graph[v]: - if neigh not in visited: - visited.append(neigh) + if not visited[neigh]: + visited[neigh] = True queue.append(neigh) - -# Driver Code -if __name__ == "__main__": - bfs('A') +# Input from the user to create the graph +while True: + u = input("Enter the source node (or 'quit' to finish): ") + if u == 'quit': + break + v = input(f"Enter the destination node(s) for node {u} (comma-separated): ") + for dest in v.split(','): + add_edge(graph, u, dest) + +start_node = input("Enter the start node for BFS: ") +bfs(graph, start_node) From 9318b6de30cc922b49781329bae622f4c3d29915 Mon Sep 17 00:00:00 2001 From: Aaryadotpy <91911418+AaryaNale@users.noreply.github.com> Date: Fri, 13 Oct 2023 17:51:18 +0530 Subject: [PATCH 16/89] Create Bubble_sort.py Bubble sort --- Algorithms/Sorting Techniques/Bubble_sort.py | 25 ++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 Algorithms/Sorting Techniques/Bubble_sort.py diff --git a/Algorithms/Sorting Techniques/Bubble_sort.py b/Algorithms/Sorting Techniques/Bubble_sort.py new file mode 100644 index 0000000..5a60fdb --- /dev/null +++ b/Algorithms/Sorting Techniques/Bubble_sort.py @@ -0,0 +1,25 @@ +#Bubble Sort is a simple comparison-based sorting algorithm. It repeatedly steps through the list, compares adjacent elements, and swaps them if they are in the wrong order. +#This process continues until no swaps are needed, indicating the list is sorted. +#The algorithm iterates through the list for 'n' elements (length of the list). +#Within each iteration, it compares adjacent elements from the beginning of the list to the (n - i - 1)-th element. +#If an element is greater than the one next to it, a swap is performed. +#This process is repeated until the largest unsorted element "bubbles up" to its correct position at the end of the list. +#The next iteration is then performed on the remaining unsorted portion. +#The process repeats until no more swaps are needed, indicating the list is sorted. +#It has a time complexity of O(n^2) in the worst case, making it impractical for large datasets. + +def bubble_sort(arr): + """ + Sort a list using Bubble Sort. + + >>> bubble_sort([4, 2, 7, 1, 9, 3]) + [1, 2, 3, 4, 7, 9] + + >>> bubble_sort([5, 4, 3, 2, 1]) + [1, 2, 3, 4, 5] + """ + n = len(arr) + for i in range(n): + for j in range(0, n - i - 1): + if arr[j] > arr[j + 1]: + arr[j], arr[j + 1] = arr[j + 1], arr[j] From 4cf3891dbd6ad15b6607c497b3840e34e7cee93b Mon Sep 17 00:00:00 2001 From: Aaryadotpy <91911418+AaryaNale@users.noreply.github.com> Date: Fri, 13 Oct 2023 17:54:24 +0530 Subject: [PATCH 17/89] Create Selection_sort.py selection sort --- .../Sorting Techniques/Selection_sort.py | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 Algorithms/Sorting Techniques/Selection_sort.py diff --git a/Algorithms/Sorting Techniques/Selection_sort.py b/Algorithms/Sorting Techniques/Selection_sort.py new file mode 100644 index 0000000..45b1577 --- /dev/null +++ b/Algorithms/Sorting Techniques/Selection_sort.py @@ -0,0 +1,27 @@ +#The selection_sort function is an in-place comparison-based sorting algorithm. +#It takes an input list `arr` and determines its length `n`. +#It iterates through the list using an outer loop with `i` ranging from 0 to `n-1`. +#Inside the outer loop, it initializes `min_index` to `i`, assuming the current element is the minimum. +#It then uses an inner loop with `j` ranging from `i+1` to `n-1` to find the index of the minimum element in the unsorted part of the list. +#If it finds an element at index `j` that is smaller than the element at `min_index`, it updates `min_index` to `j`. +#After the inner loop completes, it swaps the element at index `i` with the element at `min_index`, effectively moving the minimum element to its correct position in the sorted part of the list. +#It repeats this process for each element in the list until the entire list is sorted. +#The sorted list is returned as the result, and the original list is now sorted in ascending order. + +def selection_sort(arr): + """ + Sort a list using Selection Sort. + + >>> selection_sort([4, 2, 7, 1, 9, 3]) + [1, 2, 3, 4, 7, 9] + + >>> selection_sort([5, 4, 3, 2, 1]) + [1, 2, 3, 4, 5] + """ + n = len(arr) + for i in range(n): + min_index = i + for j in range(i + 1, n): + if arr[j] < arr[min_index]: + min_index = j + arr[i], arr[min_index] = arr[min_index], arr[i] From 3c7efe7120973876f8414609d3c421e951926ab6 Mon Sep 17 00:00:00 2001 From: Aaryadotpy <91911418+AaryaNale@users.noreply.github.com> Date: Fri, 13 Oct 2023 17:56:05 +0530 Subject: [PATCH 18/89] Create Insertion_sort.py insertion sort --- .../Sorting Techniques/Insertion_sort.py | 27 +++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 Algorithms/Sorting Techniques/Insertion_sort.py diff --git a/Algorithms/Sorting Techniques/Insertion_sort.py b/Algorithms/Sorting Techniques/Insertion_sort.py new file mode 100644 index 0000000..c7afb05 --- /dev/null +++ b/Algorithms/Sorting Techniques/Insertion_sort.py @@ -0,0 +1,27 @@ +#Insertion Sort is a simple comparison-based sorting algorithm that builds the final sorted list one element at a time. +#The algorithm starts with the second element (index 1) and iterates through the list (of length 'n'). +#It selects the current element as the 'key' and compares it with the elements to its left in the sorted portion of the list (elements before the current position). +#The algorithm moves elements in the sorted portion to the right to create space for the 'key' if the 'key' is smaller than the element being compared. +#This process continues until the 'key' is in its correct position in the sorted portion. +#The algorithm repeats this process for each element in the list, gradually expanding the sorted portion from left to right. +#Insertion Sort is efficient for small lists or lists that are already partially sorted. +#It has a time complexity of O(n^2) in the worst case, similar to Bubble and Selection Sort, making it less efficient for large datasets. +#It minimizes the number of comparisons and swaps, making it a suitable choice for small datasets or nearly sorted lists. + +def insertion_sort(arr): + """ + Sort a list using Insertion Sort. + + >>> insertion_sort([4, 2, 7, 1, 9, 3]) + [1, 2, 3, 4, 7, 9] + + >>> insertion_sort([5, 4, 3, 2, 1]) + [1, 2, 3, 4, 5] + """ + for i in range(1, len(arr)): + key = arr[i] + j = i - 1 + while j >= 0 and key < arr[j]: + arr[j + 1] = arr[j] + j -= 1 + arr[j + 1] = key From 225a0e35c453b7c9743a8cf34f5f7174e4bdfeab Mon Sep 17 00:00:00 2001 From: Himanshu Agarwal Date: Fri, 13 Oct 2023 20:09:27 +0530 Subject: [PATCH 19/89] [MODIFIED] Directory Structure --- Codes/Sorting Techniques/Bucket_Sort.py | 29 ---------- Codes/Sorting Techniques/Merge_Sort.py | 58 ------------------- Codes/Graphs/A_star.py => Graphs/A_Star.py | 0 .../Breadth_First_Search (BFS).py | 0 .../Depth_First_Search (DFS).py | 0 .../Greedy_Best_First_Search (GBFS).py | 0 ...ven_Code_for_Circular_Doubly_LinkedList.py | 0 ...enu_Driven_Code_for_Circular_LinkedList.py | 0 .../Menu_Driven_Code_for_Doubly_LinkedList.py | 0 ...r_Dynamic_Linear_Queue_using_LinkedList.py | 0 ...Code_for_Dynamic_Stack_using_LinkedList.py | 0 .../Menu_Driven_Code_for_Linear_LinkedList.py | 0 .../Menu_Driven_Code_for_Circular_Queue.py | 0 ...r_Dynamic_Linear_Queue_using_LinkedList.py | 0 .../Menu_Driven_Code_for_Linear_Queue.py | 0 .../Menu_Driven_Code_for_Priority_Queue.py | 0 .../Binary_Search_Iterative.py | 0 .../Binary_Search_Recursive.py | 0 .../Exponetial_Search.py | 0 .../Linear_Search.py | 0 .../Sequential_Search.py | 0 .../Bubble_Sort.py | 0 .../Bucket_Sort.py | 0 .../Counting_Sort.py | 56 +++++++++--------- .../Cycle_Sort.py | 0 .../Heap_Sort.py | 0 .../Insertion_Sort.py | 0 .../Merge_Sort.py | 0 .../Quick_Sort.py | 0 .../Radix_Sort.py | 0 .../Selection_Sort.py | 0 .../Shell_Sort.py | 0 .../Topological_Sort.py | 0 .../Wave_Sort.py | 0 ...Code_for_Dynamic_Stack_using_LinkedList.py | 0 .../Menu_Driven_Code_for_Stack.py | 0 36 files changed, 28 insertions(+), 115 deletions(-) delete mode 100644 Codes/Sorting Techniques/Bucket_Sort.py delete mode 100644 Codes/Sorting Techniques/Merge_Sort.py rename Codes/Graphs/A_star.py => Graphs/A_Star.py (100%) rename Codes/graphs/bfs.py => Graphs/Breadth_First_Search (BFS).py (100%) rename Codes/graphs/dfs.py => Graphs/Depth_First_Search (DFS).py (100%) rename Codes/Graphs/GBFS.py => Graphs/Greedy_Best_First_Search (GBFS).py (100%) rename {Codes/Linked List => Linked List}/Menu_Driven_Code_for_Circular_Doubly_LinkedList.py (100%) rename {Codes/Linked List => Linked List}/Menu_Driven_Code_for_Circular_LinkedList.py (100%) rename {Codes/Linked List => Linked List}/Menu_Driven_Code_for_Doubly_LinkedList.py (100%) rename {Codes/Linked List => Linked List}/Menu_Driven_Code_for_Dynamic_Linear_Queue_using_LinkedList.py (100%) rename {Codes/Linked List => Linked List}/Menu_Driven_Code_for_Dynamic_Stack_using_LinkedList.py (100%) rename {Codes/Linked List => Linked List}/Menu_Driven_Code_for_Linear_LinkedList.py (100%) rename {Codes/Queue => Queue}/Menu_Driven_Code_for_Circular_Queue.py (100%) rename {Codes/Queue => Queue}/Menu_Driven_Code_for_Dynamic_Linear_Queue_using_LinkedList.py (100%) rename {Codes/Queue => Queue}/Menu_Driven_Code_for_Linear_Queue.py (100%) rename {Codes/Queue => Queue}/Menu_Driven_Code_for_Priority_Queue.py (100%) rename {Codes/Searching Techniques => Searching Techniques}/Binary_Search_Iterative.py (100%) rename {Codes/Searching Techniques => Searching Techniques}/Binary_Search_Recursive.py (100%) rename Codes/Searching Techniques/Exponetial__search_program.py => Searching Techniques/Exponetial_Search.py (100%) rename {Codes/Searching Techniques => Searching Techniques}/Linear_Search.py (100%) rename {Codes/Searching Techniques => Searching Techniques}/Sequential_Search.py (100%) rename {Codes/Sorting Techniques => Sorting Techniques}/Bubble_Sort.py (100%) rename {Algorithms/Sorting Techniques => Sorting Techniques}/Bucket_Sort.py (100%) rename Codes/Sorting Techniques/counting_sort.py => Sorting Techniques/Counting_Sort.py (96%) rename Codes/Sorting Techniques/Cycle Sort.py => Sorting Techniques/Cycle_Sort.py (100%) rename Codes/Sorting Techniques/Heapsort.py => Sorting Techniques/Heap_Sort.py (100%) rename {Codes/Sorting Techniques => Sorting Techniques}/Insertion_Sort.py (100%) rename {Algorithms/Sorting Techniques => Sorting Techniques}/Merge_Sort.py (100%) rename {Codes/Sorting Techniques => Sorting Techniques}/Quick_Sort.py (100%) rename {Algorithms/Sorting Techniques => Sorting Techniques}/Radix_Sort.py (100%) rename {Codes/Sorting Techniques => Sorting Techniques}/Selection_Sort.py (100%) rename {Codes/Sorting Techniques => Sorting Techniques}/Shell_Sort.py (100%) rename {Codes/Sorting Techniques => Sorting Techniques}/Topological_Sort.py (100%) rename Codes/Sorting Techniques/wave_sort.py => Sorting Techniques/Wave_Sort.py (100%) rename {Codes/Stack => Stack}/Menu_Driven_Code_for_Dynamic_Stack_using_LinkedList.py (100%) rename {Codes/Stack => Stack}/Menu_Driven_Code_for_Stack.py (100%) diff --git a/Codes/Sorting Techniques/Bucket_Sort.py b/Codes/Sorting Techniques/Bucket_Sort.py deleted file mode 100644 index 9fd8386..0000000 --- a/Codes/Sorting Techniques/Bucket_Sort.py +++ /dev/null @@ -1,29 +0,0 @@ -# Bucket Sort in Python - -def bucketSort(array): - bucket = [] - - # Create empty buckets - for i in range(len(array)): - bucket.append([]) - - for j in array: - index_b = int(10 * j) - bucket[index_b].append(j) - - # Sort the elements of each bucket - for i in range(len(array)): - bucket[i] = sorted(bucket[i]) - - # Get the sorted elements - k = 0 - for i in range(len(array)): - for j in range(len(bucket[i])): - array[k] = bucket[i][j] - k += 1 - return array - -user_input = input("Enter a list of elements to be sorted, separated by spaces: ") -array = [int(x) for x in user_input.split()] -print("Sorted Array in descending order is") -print(bucketSort(array)) diff --git a/Codes/Sorting Techniques/Merge_Sort.py b/Codes/Sorting Techniques/Merge_Sort.py deleted file mode 100644 index cad46f0..0000000 --- a/Codes/Sorting Techniques/Merge_Sort.py +++ /dev/null @@ -1,58 +0,0 @@ -# -*- coding: utf-8 -*- -"""Untitled0.ipynb - -Automatically generated by Colaboratory. - -Original file is located at - https://colab.research.google.com/drive/1r_ESNZfmsamM3vf802CW01GUvfLfgmjs - -# Merge Sort -""" - -def merger(a,start,mid,end): - temp=[] - for i in range(len(a)): - temp.append(0) - i = start - j = mid + 1 - ti = start - while i <= mid and j <= end: - if a[i] < a[j]: - temp[ti] = a[i] - ti+=1 - i+=1 - else: - temp[ti] = a[j] - ti+=1 - j+=1 - while i <= mid: - temp[ti] = a[i] - ti+=1 - i+=1 - while j <= end: - temp[ti] = a[j] - ti+=1 - j+=1 - - for i in range(start,end+1): - a[i] = temp[i] - -def mergesort(a,start,end): - if start < end: - mid = (start+end)//2 - mergesort(a,start,mid) - mergesort(a,mid+1,end) - merger(a,start,mid,end) - - -a = [] -size = int(input('Enter size of array: ')) -print('') -for i in range(size): - data = int(input('Enter element: ')) - a.append(data) -print('') -print('Elements are: ',a) - -mergesort(a,0,len(a)-1) -print('After sorting elements are: ',a) \ No newline at end of file diff --git a/Codes/Graphs/A_star.py b/Graphs/A_Star.py similarity index 100% rename from Codes/Graphs/A_star.py rename to Graphs/A_Star.py diff --git a/Codes/graphs/bfs.py b/Graphs/Breadth_First_Search (BFS).py similarity index 100% rename from Codes/graphs/bfs.py rename to Graphs/Breadth_First_Search (BFS).py diff --git a/Codes/graphs/dfs.py b/Graphs/Depth_First_Search (DFS).py similarity index 100% rename from Codes/graphs/dfs.py rename to Graphs/Depth_First_Search (DFS).py diff --git a/Codes/Graphs/GBFS.py b/Graphs/Greedy_Best_First_Search (GBFS).py similarity index 100% rename from Codes/Graphs/GBFS.py rename to Graphs/Greedy_Best_First_Search (GBFS).py diff --git a/Codes/Linked List/Menu_Driven_Code_for_Circular_Doubly_LinkedList.py b/Linked List/Menu_Driven_Code_for_Circular_Doubly_LinkedList.py similarity index 100% rename from Codes/Linked List/Menu_Driven_Code_for_Circular_Doubly_LinkedList.py rename to Linked List/Menu_Driven_Code_for_Circular_Doubly_LinkedList.py diff --git a/Codes/Linked List/Menu_Driven_Code_for_Circular_LinkedList.py b/Linked List/Menu_Driven_Code_for_Circular_LinkedList.py similarity index 100% rename from Codes/Linked List/Menu_Driven_Code_for_Circular_LinkedList.py rename to Linked List/Menu_Driven_Code_for_Circular_LinkedList.py diff --git a/Codes/Linked List/Menu_Driven_Code_for_Doubly_LinkedList.py b/Linked List/Menu_Driven_Code_for_Doubly_LinkedList.py similarity index 100% rename from Codes/Linked List/Menu_Driven_Code_for_Doubly_LinkedList.py rename to Linked List/Menu_Driven_Code_for_Doubly_LinkedList.py diff --git a/Codes/Linked List/Menu_Driven_Code_for_Dynamic_Linear_Queue_using_LinkedList.py b/Linked List/Menu_Driven_Code_for_Dynamic_Linear_Queue_using_LinkedList.py similarity index 100% rename from Codes/Linked List/Menu_Driven_Code_for_Dynamic_Linear_Queue_using_LinkedList.py rename to Linked List/Menu_Driven_Code_for_Dynamic_Linear_Queue_using_LinkedList.py diff --git a/Codes/Linked List/Menu_Driven_Code_for_Dynamic_Stack_using_LinkedList.py b/Linked List/Menu_Driven_Code_for_Dynamic_Stack_using_LinkedList.py similarity index 100% rename from Codes/Linked List/Menu_Driven_Code_for_Dynamic_Stack_using_LinkedList.py rename to Linked List/Menu_Driven_Code_for_Dynamic_Stack_using_LinkedList.py diff --git a/Codes/Linked List/Menu_Driven_Code_for_Linear_LinkedList.py b/Linked List/Menu_Driven_Code_for_Linear_LinkedList.py similarity index 100% rename from Codes/Linked List/Menu_Driven_Code_for_Linear_LinkedList.py rename to Linked List/Menu_Driven_Code_for_Linear_LinkedList.py diff --git a/Codes/Queue/Menu_Driven_Code_for_Circular_Queue.py b/Queue/Menu_Driven_Code_for_Circular_Queue.py similarity index 100% rename from Codes/Queue/Menu_Driven_Code_for_Circular_Queue.py rename to Queue/Menu_Driven_Code_for_Circular_Queue.py diff --git a/Codes/Queue/Menu_Driven_Code_for_Dynamic_Linear_Queue_using_LinkedList.py b/Queue/Menu_Driven_Code_for_Dynamic_Linear_Queue_using_LinkedList.py similarity index 100% rename from Codes/Queue/Menu_Driven_Code_for_Dynamic_Linear_Queue_using_LinkedList.py rename to Queue/Menu_Driven_Code_for_Dynamic_Linear_Queue_using_LinkedList.py diff --git a/Codes/Queue/Menu_Driven_Code_for_Linear_Queue.py b/Queue/Menu_Driven_Code_for_Linear_Queue.py similarity index 100% rename from Codes/Queue/Menu_Driven_Code_for_Linear_Queue.py rename to Queue/Menu_Driven_Code_for_Linear_Queue.py diff --git a/Codes/Queue/Menu_Driven_Code_for_Priority_Queue.py b/Queue/Menu_Driven_Code_for_Priority_Queue.py similarity index 100% rename from Codes/Queue/Menu_Driven_Code_for_Priority_Queue.py rename to Queue/Menu_Driven_Code_for_Priority_Queue.py diff --git a/Codes/Searching Techniques/Binary_Search_Iterative.py b/Searching Techniques/Binary_Search_Iterative.py similarity index 100% rename from Codes/Searching Techniques/Binary_Search_Iterative.py rename to Searching Techniques/Binary_Search_Iterative.py diff --git a/Codes/Searching Techniques/Binary_Search_Recursive.py b/Searching Techniques/Binary_Search_Recursive.py similarity index 100% rename from Codes/Searching Techniques/Binary_Search_Recursive.py rename to Searching Techniques/Binary_Search_Recursive.py diff --git a/Codes/Searching Techniques/Exponetial__search_program.py b/Searching Techniques/Exponetial_Search.py similarity index 100% rename from Codes/Searching Techniques/Exponetial__search_program.py rename to Searching Techniques/Exponetial_Search.py diff --git a/Codes/Searching Techniques/Linear_Search.py b/Searching Techniques/Linear_Search.py similarity index 100% rename from Codes/Searching Techniques/Linear_Search.py rename to Searching Techniques/Linear_Search.py diff --git a/Codes/Searching Techniques/Sequential_Search.py b/Searching Techniques/Sequential_Search.py similarity index 100% rename from Codes/Searching Techniques/Sequential_Search.py rename to Searching Techniques/Sequential_Search.py diff --git a/Codes/Sorting Techniques/Bubble_Sort.py b/Sorting Techniques/Bubble_Sort.py similarity index 100% rename from Codes/Sorting Techniques/Bubble_Sort.py rename to Sorting Techniques/Bubble_Sort.py diff --git a/Algorithms/Sorting Techniques/Bucket_Sort.py b/Sorting Techniques/Bucket_Sort.py similarity index 100% rename from Algorithms/Sorting Techniques/Bucket_Sort.py rename to Sorting Techniques/Bucket_Sort.py diff --git a/Codes/Sorting Techniques/counting_sort.py b/Sorting Techniques/Counting_Sort.py similarity index 96% rename from Codes/Sorting Techniques/counting_sort.py rename to Sorting Techniques/Counting_Sort.py index 56138b6..2ad28b5 100644 --- a/Codes/Sorting Techniques/counting_sort.py +++ b/Sorting Techniques/Counting_Sort.py @@ -1,28 +1,28 @@ -def counting_sort(arr): - # Find the maximum and minimum values in the input array - max_val = max(arr) - min_val = min(arr) - - # Create a counting array with a size equal to the range of values - count_array = [0] * (max_val - min_val + 1) - - # Count the occurrences of each element in the input array - for num in arr: - count_array[num - min_val] += 1 - - # Reconstruct the sorted array from the counting array - sorted_array = [] - for i in range(len(count_array)): - sorted_array.extend([i + min_val] * count_array[i]) - - return sorted_array - -try: - input_str = input("Enter a list of numbers separated by spaces: ") - arr = list(map(int, input_str.split())) - - sorted_arr = counting_sort(arr) - print("Original array:", arr) - print("Sorted array:", sorted_arr) -except ValueError: - print("Please enter valid integers separated by spaces.") +def counting_sort(arr): + # Find the maximum and minimum values in the input array + max_val = max(arr) + min_val = min(arr) + + # Create a counting array with a size equal to the range of values + count_array = [0] * (max_val - min_val + 1) + + # Count the occurrences of each element in the input array + for num in arr: + count_array[num - min_val] += 1 + + # Reconstruct the sorted array from the counting array + sorted_array = [] + for i in range(len(count_array)): + sorted_array.extend([i + min_val] * count_array[i]) + + return sorted_array + +try: + input_str = input("Enter a list of numbers separated by spaces: ") + arr = list(map(int, input_str.split())) + + sorted_arr = counting_sort(arr) + print("Original array:", arr) + print("Sorted array:", sorted_arr) +except ValueError: + print("Please enter valid integers separated by spaces.") diff --git a/Codes/Sorting Techniques/Cycle Sort.py b/Sorting Techniques/Cycle_Sort.py similarity index 100% rename from Codes/Sorting Techniques/Cycle Sort.py rename to Sorting Techniques/Cycle_Sort.py diff --git a/Codes/Sorting Techniques/Heapsort.py b/Sorting Techniques/Heap_Sort.py similarity index 100% rename from Codes/Sorting Techniques/Heapsort.py rename to Sorting Techniques/Heap_Sort.py diff --git a/Codes/Sorting Techniques/Insertion_Sort.py b/Sorting Techniques/Insertion_Sort.py similarity index 100% rename from Codes/Sorting Techniques/Insertion_Sort.py rename to Sorting Techniques/Insertion_Sort.py diff --git a/Algorithms/Sorting Techniques/Merge_Sort.py b/Sorting Techniques/Merge_Sort.py similarity index 100% rename from Algorithms/Sorting Techniques/Merge_Sort.py rename to Sorting Techniques/Merge_Sort.py diff --git a/Codes/Sorting Techniques/Quick_Sort.py b/Sorting Techniques/Quick_Sort.py similarity index 100% rename from Codes/Sorting Techniques/Quick_Sort.py rename to Sorting Techniques/Quick_Sort.py diff --git a/Algorithms/Sorting Techniques/Radix_Sort.py b/Sorting Techniques/Radix_Sort.py similarity index 100% rename from Algorithms/Sorting Techniques/Radix_Sort.py rename to Sorting Techniques/Radix_Sort.py diff --git a/Codes/Sorting Techniques/Selection_Sort.py b/Sorting Techniques/Selection_Sort.py similarity index 100% rename from Codes/Sorting Techniques/Selection_Sort.py rename to Sorting Techniques/Selection_Sort.py diff --git a/Codes/Sorting Techniques/Shell_Sort.py b/Sorting Techniques/Shell_Sort.py similarity index 100% rename from Codes/Sorting Techniques/Shell_Sort.py rename to Sorting Techniques/Shell_Sort.py diff --git a/Codes/Sorting Techniques/Topological_Sort.py b/Sorting Techniques/Topological_Sort.py similarity index 100% rename from Codes/Sorting Techniques/Topological_Sort.py rename to Sorting Techniques/Topological_Sort.py diff --git a/Codes/Sorting Techniques/wave_sort.py b/Sorting Techniques/Wave_Sort.py similarity index 100% rename from Codes/Sorting Techniques/wave_sort.py rename to Sorting Techniques/Wave_Sort.py diff --git a/Codes/Stack/Menu_Driven_Code_for_Dynamic_Stack_using_LinkedList.py b/Stack/Menu_Driven_Code_for_Dynamic_Stack_using_LinkedList.py similarity index 100% rename from Codes/Stack/Menu_Driven_Code_for_Dynamic_Stack_using_LinkedList.py rename to Stack/Menu_Driven_Code_for_Dynamic_Stack_using_LinkedList.py diff --git a/Codes/Stack/Menu_Driven_Code_for_Stack.py b/Stack/Menu_Driven_Code_for_Stack.py similarity index 100% rename from Codes/Stack/Menu_Driven_Code_for_Stack.py rename to Stack/Menu_Driven_Code_for_Stack.py From df30e947490546715f2a79573780c8dcb2c45e3f Mon Sep 17 00:00:00 2001 From: Himanshu Agarwal Date: Fri, 13 Oct 2023 20:10:54 +0530 Subject: [PATCH 20/89] [UPDATED] README.md --- CONTRIBUTING.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 8c470af..f5acf43 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -67,7 +67,7 @@ You can contribute by: - Create `.py` file for a particular algorithm or code
- > If data structure folder already exists inside subfolder, kindly add your code in the respective folder + > If data structure folder already exists in the repo, kindly add your code in the respective folder. 6. Add all the changes that you have made From 61465ef0c2bd2980b6b6302fdcabc7dbb6be5a1f Mon Sep 17 00:00:00 2001 From: Avdhesh-Varshney <114330097+Avdhesh-Varshney@users.noreply.github.com> Date: Fri, 13 Oct 2023 20:11:44 +0530 Subject: [PATCH 21/89] Changes done --- .../Fibonacci_Searching.py | 21 ++++++++++++++----- 1 file changed, 16 insertions(+), 5 deletions(-) diff --git a/Codes/Searching Techniques/Fibonacci_Searching.py b/Codes/Searching Techniques/Fibonacci_Searching.py index 6bf65d9..cef5305 100644 --- a/Codes/Searching Techniques/Fibonacci_Searching.py +++ b/Codes/Searching Techniques/Fibonacci_Searching.py @@ -51,11 +51,22 @@ def fibonacciSearch(array, target, n): return -1 if __name__ == "__main__": - array = [5, 6, 7, 8, 17, 19, 20, 21, 23, 34, 67, 97, 675] - n = len(array) - target = 31 + n = int(input("\nEnter number of elements in the array: ")) + array = [] + print('\n') + for i in range(n): + array.append(int(input(f"Enter element {i+1}: "))) + array.sort() + target = int(input("\nEnter target element: ")) + index = fibonacciSearch(array, target, n) + + print('\nEntered elements are: ', end='') + for i in range(n): + print(array[i], end=' ') + print('\n') + if index != -1: - print(target, "is present at index:", index) + print(f"\n{target} is present at index: {index}\n") else: - print(target, "isn't present in the array") + print(f"\n{target} isn't present in the array.\n") From fdc7afbd67fe6963064371e772b0ea2548558577 Mon Sep 17 00:00:00 2001 From: Himanshu Agarwal Date: Fri, 13 Oct 2023 20:16:31 +0530 Subject: [PATCH 22/89] [UPDATED] Contributing Guidelines --- README.md | 101 +++++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 100 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 2a67634..9af0899 100644 --- a/README.md +++ b/README.md @@ -1,9 +1,108 @@ # Python-Data-Structures Here' an amazing repo regarding data structure using python. This is a compilation of python codes examples and implementations of various concepts of data structure. It comprises of various resources for efficient understanding and utilizion of data structure in python. +
+ +## Contrubuting Guidelines + ### Please read our [Contributing Guidelines](CONTRIBUTING.md) before contributing or starting with an issue. -> Do not foget to follow these [Guidelines](CONTRIBUTING.md) for faster acceptance of your PR. +## Mandatory ‼️ + +> 1. You need to create an issue first and wait till you are assigned the issue before creating a Pull Request. +> +> 2. Give a proper description about your proposed and implemented changes in your issues and pull requests. + +## Issues + +```bash +add: Description # if you want to add any algorithm/code +update: Description # if an update is required for a algorithm/code +suggestion: Description # if you want to suggest a better way to implement a algorithm/code +``` + +## Commit Messages + +The commit messages should follow the following pattern: + +```bash +algo/code: Description # if a new algorithm/code is added +docs: Description # if documentation is added +update: Description # if documentation is updated +``` + +## Pull Requests + +Make sure to document the contributions well in the pull request. +Pull requests should have: + +- A concise commit message. +- A description of what was changed/added. + +Others will give constructive feedback. +This is a time for discussion and improvements, +and making the necessary changes will be required before we can +merge the contribution. + +## How Can I Contribute + +You can contribute by: + +- Reporting Bugs +- Suggesting Enhancements +- Code Contribution +- Pull Requests + + +## Getting started 🤟: + +1. Fork this repo (button on top). + +2. Clone on your local machine. + + ``` + git clone https://github.com/himanshu-03/Python-Data-Structures.git + ``` + +3. Navigate to the project directory. + + ``` + cd Python-Data-Structures + ``` + +4. Create a new *branch* + + ``` + git checkout -b + ``` + +5. Adding New Data Strcture + + - Create a folder named by the Data Structure + - Create `.py` file for a particular algorithm or code +
+ + > If data structure folder already exists in the repo, kindly add your code in the respective folder. + +6. Add all the changes that you have made + + ``` + git add . + ``` + +7. Commit your changes + + ``` + git commit -m "{Message}" + ``` + +8. Then push + + ``` + git push -u origin + ``` + +9. Submit a pull request :sunglasses: ## 🪪 License From 326178168e2ab0fc8c44b903b9d979c2a4a4172d Mon Sep 17 00:00:00 2001 From: Avdhesh-Varshney <114330097+Avdhesh-Varshney@users.noreply.github.com> Date: Fri, 13 Oct 2023 20:29:53 +0530 Subject: [PATCH 23/89] =?UTF-8?q?Fibonacci=20searching=20algorithm=20?= =?UTF-8?q?=F0=9F=98=8E?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../Fibonacci_Searching.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename {Codes/Searching Techniques => Searching Techniques}/Fibonacci_Searching.py (100%) diff --git a/Codes/Searching Techniques/Fibonacci_Searching.py b/Searching Techniques/Fibonacci_Searching.py similarity index 100% rename from Codes/Searching Techniques/Fibonacci_Searching.py rename to Searching Techniques/Fibonacci_Searching.py From d3f038c51813e2f72c908e645d4b7d15bd5e4abc Mon Sep 17 00:00:00 2001 From: Himanshu Agarwal Date: Fri, 13 Oct 2023 20:53:01 +0530 Subject: [PATCH 24/89] Update README.md --- README.md | 88 ++++++++++++------------------------------------------- 1 file changed, 19 insertions(+), 69 deletions(-) diff --git a/README.md b/README.md index 9af0899..706aafe 100644 --- a/README.md +++ b/README.md @@ -1,81 +1,32 @@ +
+ # Python-Data-Structures Here' an amazing repo regarding data structure using python. This is a compilation of python codes examples and implementations of various concepts of data structure. It comprises of various resources for efficient understanding and utilizion of data structure in python. +
-
- -## Contrubuting Guidelines - -### Please read our [Contributing Guidelines](CONTRIBUTING.md) before contributing or starting with an issue. - -## Mandatory ‼️ - -> 1. You need to create an issue first and wait till you are assigned the issue before creating a Pull Request. -> -> 2. Give a proper description about your proposed and implemented changes in your issues and pull requests. - -## Issues - -```bash -add: Description # if you want to add any algorithm/code -update: Description # if an update is required for a algorithm/code -suggestion: Description # if you want to suggest a better way to implement a algorithm/code -``` - -## Commit Messages - -The commit messages should follow the following pattern: - -```bash -algo/code: Description # if a new algorithm/code is added -docs: Description # if documentation is added -update: Description # if documentation is updated -``` - -## Pull Requests - -Make sure to document the contributions well in the pull request. -Pull requests should have: - -- A concise commit message. -- A description of what was changed/added. - -Others will give constructive feedback. -This is a time for discussion and improvements, -and making the necessary changes will be required before we can -merge the contribution. - -## How Can I Contribute - -You can contribute by: +## Mandatory‼️ -- Reporting Bugs -- Suggesting Enhancements -- Code Contribution -- Pull Requests + 1. You need to create an issue first and wait till you are assigned the issue before creating a Pull Request. + 2. Give a proper description about your proposed and implemented changes in your issues and pull requests. +## Contributing Guidelines +You can find our Contributing Guidelines [here](CONTRIBUTING.md). -## Getting started 🤟: - +
+

Getting started 🤟

1. Fork this repo (button on top). - 2. Clone on your local machine. - ``` git clone https://github.com/himanshu-03/Python-Data-Structures.git ``` - 3. Navigate to the project directory. - ``` cd Python-Data-Structures ``` - 4. Create a new *branch* - ``` git checkout -b ``` - 5. Adding New Data Strcture - Create a folder named by the Data Structure @@ -83,41 +34,40 @@ You can contribute by:
> If data structure folder already exists in the repo, kindly add your code in the respective folder. - 6. Add all the changes that you have made - ``` git add . ``` - 7. Commit your changes - ``` git commit -m "{Message}" ``` - 8. Then push - ``` git push -u origin ``` - 9. Submit a pull request :sunglasses: +
## 🪪 License +This project follows the [MIT LICENSE](https://choosealicense.com/licenses/mit/). + +## Contributors ✨ -To make best use this project do check our [LICENSE](https://choosealicense.com/licenses/mit/). + + + - Copyright (c) 2022 Himanshu Agarwal +

(Back to top)

Connect with me

+ Github     LinkedIn     Instagram     Facebook     Gmail    - Whatsapp

From 7d800c9621c6c1704e869d5f999cc1dd10a4ce7c Mon Sep 17 00:00:00 2001 From: Himanshu Agarwal Date: Fri, 13 Oct 2023 20:56:08 +0530 Subject: [PATCH 25/89] Update README.md --- README.md | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/README.md b/README.md index 706aafe..006abfc 100644 --- a/README.md +++ b/README.md @@ -58,8 +58,6 @@ This project follows the [MIT LICENSE](https://choosealicense.com/licenses/mit/) -

(Back to top)

-

Connect with me

@@ -69,5 +67,5 @@ This project follows the [MIT LICENSE](https://choosealicense.com/licenses/mit/) Instagram     Facebook     Gmail    - +

(Back to top)

From aaf023e75a6ac5cd6260379335b53b3d368f5970 Mon Sep 17 00:00:00 2001 From: Himanshu Agarwal Date: Fri, 13 Oct 2023 20:57:29 +0530 Subject: [PATCH 26/89] Update README.md --- README.md | 66 ++++++++++++++++++++++++++++--------------------------- 1 file changed, 34 insertions(+), 32 deletions(-) diff --git a/README.md b/README.md index 006abfc..0c6d18c 100644 --- a/README.md +++ b/README.md @@ -14,39 +14,41 @@ You can find our Contributing Guidelines [here](CONTRIBUTING.md).

Getting started 🤟

-1. Fork this repo (button on top). -2. Clone on your local machine. - ``` - git clone https://github.com/himanshu-03/Python-Data-Structures.git - ``` -3. Navigate to the project directory. - ``` - cd Python-Data-Structures - ``` -4. Create a new *branch* - ``` - git checkout -b - ``` -5. Adding New Data Strcture + + 1. Fork this repo (button on top). - - Create a folder named by the Data Structure - - Create `.py` file for a particular algorithm or code -
- - > If data structure folder already exists in the repo, kindly add your code in the respective folder. -6. Add all the changes that you have made - ``` - git add . - ``` -7. Commit your changes - ``` - git commit -m "{Message}" - ``` -8. Then push - ``` - git push -u origin - ``` -9. Submit a pull request :sunglasses: + 2. Clone on your local machine. + ``` + git clone https://github.com/himanshu-03/Python-Data-Structures.git + ``` + 3. Navigate to the project directory. + ``` + cd Python-Data-Structures + ``` + 4. Create a new *branch* + ``` + git checkout -b + ``` + 5. Adding New Data Strcture + + - Create a folder named by the Data Structure + - Create `.py` file for a particular algorithm or code +
+ + > If data structure folder already exists in the repo, kindly add your code in the respective folder. + 6. Add all the changes that you have made + ``` + git add . + ``` + 7. Commit your changes + ``` + git commit -m "{Message}" + ``` + 8. Then push + ``` + git push -u origin + ``` + 9. Submit a pull request :sunglasses:
## 🪪 License From 1eb71822f92ddb8cfaba515aa5e44b3801f051ee Mon Sep 17 00:00:00 2001 From: Aaryadotpy <91911418+AaryaNale@users.noreply.github.com> Date: Fri, 13 Oct 2023 23:53:12 +0530 Subject: [PATCH 27/89] Create tsp_solver.py The Traveling Salesman Problem (TSP) is a classic optimization challenge, where a salesperson seeks the shortest path to visit a set of cities once, returning to the starting point. --- Traveling_salesman_algorithm/tsp_solver.py | 150 +++++++++++++++++++++ 1 file changed, 150 insertions(+) create mode 100644 Traveling_salesman_algorithm/tsp_solver.py diff --git a/Traveling_salesman_algorithm/tsp_solver.py b/Traveling_salesman_algorithm/tsp_solver.py new file mode 100644 index 0000000..db79c29 --- /dev/null +++ b/Traveling_salesman_algorithm/tsp_solver.py @@ -0,0 +1,150 @@ +from itertools import permutations + +def calculate_total_distance(path, distances): + """ + Calculate the total distance of a path through cities. + + Args: + path (list of int): A list representing the order of cities to visit. + distances (list of lists of int): A distance matrix between cities. + + Returns: + int: The total distance of the path. + + Example: + >>> distances = [[0, 2, 9, 10], [1, 0, 6, 4], [15, 7, 0, 8], [6, 3, 12, 0]] + >>> calculate_total_distance([0, 1, 2, 3, 0], distances) + 21 + """ + total_distance = 0 + for i in range(len(path) - 1): + total_distance += distances[path[i]][path[i + 1]] + total_distance += distances[path[-1]][path[0]] + return total_distance + +def traveling_salesman_bruteforce(distances): + """ + Find the shortest path through cities using brute force. + + Args: + distances (list of lists of int): A distance matrix between cities. + + Returns: + tuple: A tuple containing the shortest path (list of int) and the shortest distance (int). + + Example: + >>> distances = [[0, 2, 9, 10], [1, 0, 6, 4], [15, 7, 0, 8], [6, 3, 12, 0]] + >>> traveling_salesman_bruteforce(distances) + ([0, 1, 3, 2], 21) + """ + num_cities = len(distances) + all_cities = list(range(num_cities)) + shortest_path = None + shortest_distance = float('inf') + + for path in permutations(all_cities): + distance = calculate_total_distance(path, distances) + if distance < shortest_distance: + shortest_distance = distance + shortest_path = path + + return shortest_path, shortest_distance + +def create_distance_matrix(num_cities): + """ + Create a distance matrix for a given number of cities. + + Args: + num_cities (int): The number of cities. + + Returns: + list of lists of int: A distance matrix between cities. + + Example: + >>> create_distance_matrix(4) + [[0, 2, 9, 10], [2, 0, 6, 4], [9, 6, 0, 8], [10, 4, 8, 0]] + """ + distances = [] + for i in range(num_cities): + row = [] + for j in range(num_cities): + if i == j: + row.append(0) + elif j > i: + distance = int(input(f"Enter distance between City {i + 1} and City {j + 1}: ")) + row.append(distance) + else: + row.append(distances[j][i]) + distances.append(row) + return distances + +def print_distance_matrix(distances): + """ + Print the distance matrix. + + Args: + distances (list of lists of int): A distance matrix between cities. + + Example: + >>> distances = [[0, 2, 9, 10], [1, 0, 6, 4], [15, 7, 0, 8], [6, 3, 12, 0]] + >>> print_distance_matrix(distances) + Distance Matrix: + [0, 2, 9, 10] + [2, 0, 6, 4] + [9, 6, 0, 8] + [10, 4, 8, 0] + """ + print("Distance Matrix:") + for row in distances: + print(row) + +def print_city_path(path): + """ + Print the shortest path through cities. + + Args: + path (list of int): A list representing the order of cities to visit. + + Example: + >>> print_city_path([0, 1, 3, 2]) + Shortest Path: City 1 -> City 2 -> City 4 -> City 3 + """ + city_path = [f"City {city + 1}" for city in path] + print("Shortest Path:", " -> ".join(city_path) + +def main_menu(): + """ + Display a menu for the traveling salesman problem and handle user input. + + Example: + >>> # This function interacts with the user, so no specific doctests are provided. + >>> # You can run this function manually to test the menu functionality. + >>> main_menu() + """ + while True: + print("\nMenu:") + print("1. Create a distance matrix") + print("2. Find the shortest path") + print("3. Exit") + + choice = input("Enter your choice: ") + + if choice == '1': + num_cities = int(input("Enter the number of cities: ")) + distances = create_distance_matrix(num_cities) + print_distance_matrix(distances) + elif choice == '2': + if 'distances' in locals(): + shortest_path, shortest_distance = traveling_salesman_bruteforce(distances) + print_city_path(shortest_path) + print("Shortest Distance:", shortest_distance) + else: + print("Please create a distance matrix first.") + elif choice == '3': + print("Goodbye!") + break + else: + print("Invalid choice. Please select a valid option.") + +if __name__ == "__main__": + main_menu() From 1cb6d4be4b76a587dcafe2536aadd7935680ebdf Mon Sep 17 00:00:00 2001 From: Aaryadotpy <91911418+AaryaNale@users.noreply.github.com> Date: Sat, 14 Oct 2023 00:00:01 +0530 Subject: [PATCH 28/89] Create README.md readme.md --- Traveling_salesman_algorithm/README.md | 37 ++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 Traveling_salesman_algorithm/README.md diff --git a/Traveling_salesman_algorithm/README.md b/Traveling_salesman_algorithm/README.md new file mode 100644 index 0000000..c2074aa --- /dev/null +++ b/Traveling_salesman_algorithm/README.md @@ -0,0 +1,37 @@ +# Traveling Salesman Problem Solver 📬🏃 + +This Python program solves the Traveling Salesman Problem (TSP), where a salesman needs to find the shortest path to visit a list of cities exactly once and return to the starting city. + +## Features + +- Create a distance matrix by inputting distances between cities. 🏙⬅🏙 +- Find the shortest path using the brute-force TSP algorithm. 🛣 +- Display the result with city numbers in the path. 📍 +- Menu-driven interface for user interaction. 🔢 + +## Usage + +1. Clone this repository to your local machine: + +```git clone https://github.com/your-username/traveling-salesman-solver.git``` + + +2. Change the working directory to the project folder: + +```cd traveling-salesman-solver``` + + +3. Run the program: + +```python tsp_solver.py``` + + +4. Follow the menu options to create a distance matrix and find the shortest path. + +## Requirements + +- Python 3.x + +## Author + +[Aarya Nale](github.com/https://github.com/AaryaNale) From 3176c5907204817d4866428820d3c0f1e14d4a74 Mon Sep 17 00:00:00 2001 From: Aaryadotpy <91911418+AaryaNale@users.noreply.github.com> Date: Sat, 14 Oct 2023 00:04:06 +0530 Subject: [PATCH 29/89] Delete Algorithms/Sorting Techniques/Bubble_sort.py removed file --- Algorithms/Sorting Techniques/Bubble_sort.py | 25 -------------------- 1 file changed, 25 deletions(-) delete mode 100644 Algorithms/Sorting Techniques/Bubble_sort.py diff --git a/Algorithms/Sorting Techniques/Bubble_sort.py b/Algorithms/Sorting Techniques/Bubble_sort.py deleted file mode 100644 index 5a60fdb..0000000 --- a/Algorithms/Sorting Techniques/Bubble_sort.py +++ /dev/null @@ -1,25 +0,0 @@ -#Bubble Sort is a simple comparison-based sorting algorithm. It repeatedly steps through the list, compares adjacent elements, and swaps them if they are in the wrong order. -#This process continues until no swaps are needed, indicating the list is sorted. -#The algorithm iterates through the list for 'n' elements (length of the list). -#Within each iteration, it compares adjacent elements from the beginning of the list to the (n - i - 1)-th element. -#If an element is greater than the one next to it, a swap is performed. -#This process is repeated until the largest unsorted element "bubbles up" to its correct position at the end of the list. -#The next iteration is then performed on the remaining unsorted portion. -#The process repeats until no more swaps are needed, indicating the list is sorted. -#It has a time complexity of O(n^2) in the worst case, making it impractical for large datasets. - -def bubble_sort(arr): - """ - Sort a list using Bubble Sort. - - >>> bubble_sort([4, 2, 7, 1, 9, 3]) - [1, 2, 3, 4, 7, 9] - - >>> bubble_sort([5, 4, 3, 2, 1]) - [1, 2, 3, 4, 5] - """ - n = len(arr) - for i in range(n): - for j in range(0, n - i - 1): - if arr[j] > arr[j + 1]: - arr[j], arr[j + 1] = arr[j + 1], arr[j] From 08508e19b0b90b2bfcfa7d74d982bc0d020a376a Mon Sep 17 00:00:00 2001 From: Aaryadotpy <91911418+AaryaNale@users.noreply.github.com> Date: Sat, 14 Oct 2023 00:04:29 +0530 Subject: [PATCH 30/89] Delete Algorithms/Sorting Techniques/Insertion_sort.py removed file --- .../Sorting Techniques/Insertion_sort.py | 27 ------------------- 1 file changed, 27 deletions(-) delete mode 100644 Algorithms/Sorting Techniques/Insertion_sort.py diff --git a/Algorithms/Sorting Techniques/Insertion_sort.py b/Algorithms/Sorting Techniques/Insertion_sort.py deleted file mode 100644 index c7afb05..0000000 --- a/Algorithms/Sorting Techniques/Insertion_sort.py +++ /dev/null @@ -1,27 +0,0 @@ -#Insertion Sort is a simple comparison-based sorting algorithm that builds the final sorted list one element at a time. -#The algorithm starts with the second element (index 1) and iterates through the list (of length 'n'). -#It selects the current element as the 'key' and compares it with the elements to its left in the sorted portion of the list (elements before the current position). -#The algorithm moves elements in the sorted portion to the right to create space for the 'key' if the 'key' is smaller than the element being compared. -#This process continues until the 'key' is in its correct position in the sorted portion. -#The algorithm repeats this process for each element in the list, gradually expanding the sorted portion from left to right. -#Insertion Sort is efficient for small lists or lists that are already partially sorted. -#It has a time complexity of O(n^2) in the worst case, similar to Bubble and Selection Sort, making it less efficient for large datasets. -#It minimizes the number of comparisons and swaps, making it a suitable choice for small datasets or nearly sorted lists. - -def insertion_sort(arr): - """ - Sort a list using Insertion Sort. - - >>> insertion_sort([4, 2, 7, 1, 9, 3]) - [1, 2, 3, 4, 7, 9] - - >>> insertion_sort([5, 4, 3, 2, 1]) - [1, 2, 3, 4, 5] - """ - for i in range(1, len(arr)): - key = arr[i] - j = i - 1 - while j >= 0 and key < arr[j]: - arr[j + 1] = arr[j] - j -= 1 - arr[j + 1] = key From 40d1b084fa9cffb1617a4879677b4e4c7e6d4365 Mon Sep 17 00:00:00 2001 From: Aaryadotpy <91911418+AaryaNale@users.noreply.github.com> Date: Sat, 14 Oct 2023 00:04:55 +0530 Subject: [PATCH 31/89] Delete Algorithms/Sorting Techniques/Selection_sort.py removed file --- .../Sorting Techniques/Selection_sort.py | 27 ------------------- 1 file changed, 27 deletions(-) delete mode 100644 Algorithms/Sorting Techniques/Selection_sort.py diff --git a/Algorithms/Sorting Techniques/Selection_sort.py b/Algorithms/Sorting Techniques/Selection_sort.py deleted file mode 100644 index 45b1577..0000000 --- a/Algorithms/Sorting Techniques/Selection_sort.py +++ /dev/null @@ -1,27 +0,0 @@ -#The selection_sort function is an in-place comparison-based sorting algorithm. -#It takes an input list `arr` and determines its length `n`. -#It iterates through the list using an outer loop with `i` ranging from 0 to `n-1`. -#Inside the outer loop, it initializes `min_index` to `i`, assuming the current element is the minimum. -#It then uses an inner loop with `j` ranging from `i+1` to `n-1` to find the index of the minimum element in the unsorted part of the list. -#If it finds an element at index `j` that is smaller than the element at `min_index`, it updates `min_index` to `j`. -#After the inner loop completes, it swaps the element at index `i` with the element at `min_index`, effectively moving the minimum element to its correct position in the sorted part of the list. -#It repeats this process for each element in the list until the entire list is sorted. -#The sorted list is returned as the result, and the original list is now sorted in ascending order. - -def selection_sort(arr): - """ - Sort a list using Selection Sort. - - >>> selection_sort([4, 2, 7, 1, 9, 3]) - [1, 2, 3, 4, 7, 9] - - >>> selection_sort([5, 4, 3, 2, 1]) - [1, 2, 3, 4, 5] - """ - n = len(arr) - for i in range(n): - min_index = i - for j in range(i + 1, n): - if arr[j] < arr[min_index]: - min_index = j - arr[i], arr[min_index] = arr[min_index], arr[i] From d6a5dd3ef9e3297c1ed0c92a3db8e221a5464a72 Mon Sep 17 00:00:00 2001 From: Aaryadotpy <91911418+AaryaNale@users.noreply.github.com> Date: Sat, 14 Oct 2023 00:50:48 +0530 Subject: [PATCH 32/89] Delete Traveling_salesman_algorithm/README.md --- Traveling_salesman_algorithm/README.md | 37 -------------------------- 1 file changed, 37 deletions(-) delete mode 100644 Traveling_salesman_algorithm/README.md diff --git a/Traveling_salesman_algorithm/README.md b/Traveling_salesman_algorithm/README.md deleted file mode 100644 index c2074aa..0000000 --- a/Traveling_salesman_algorithm/README.md +++ /dev/null @@ -1,37 +0,0 @@ -# Traveling Salesman Problem Solver 📬🏃 - -This Python program solves the Traveling Salesman Problem (TSP), where a salesman needs to find the shortest path to visit a list of cities exactly once and return to the starting city. - -## Features - -- Create a distance matrix by inputting distances between cities. 🏙⬅🏙 -- Find the shortest path using the brute-force TSP algorithm. 🛣 -- Display the result with city numbers in the path. 📍 -- Menu-driven interface for user interaction. 🔢 - -## Usage - -1. Clone this repository to your local machine: - -```git clone https://github.com/your-username/traveling-salesman-solver.git``` - - -2. Change the working directory to the project folder: - -```cd traveling-salesman-solver``` - - -3. Run the program: - -```python tsp_solver.py``` - - -4. Follow the menu options to create a distance matrix and find the shortest path. - -## Requirements - -- Python 3.x - -## Author - -[Aarya Nale](github.com/https://github.com/AaryaNale) From 59cad6168c017f57fa73fa18ddc2fac484909d70 Mon Sep 17 00:00:00 2001 From: Aaryadotpy <91911418+AaryaNale@users.noreply.github.com> Date: Sat, 14 Oct 2023 00:52:30 +0530 Subject: [PATCH 33/89] Rename tsp_solver.py to travelling_salesman_problem_solver.py moved to graphs folder. --- .../tsp_solver.py => Graphs/travelling_salesman_problem_solver.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename Traveling_salesman_algorithm/tsp_solver.py => Graphs/travelling_salesman_problem_solver.py (100%) diff --git a/Traveling_salesman_algorithm/tsp_solver.py b/Graphs/travelling_salesman_problem_solver.py similarity index 100% rename from Traveling_salesman_algorithm/tsp_solver.py rename to Graphs/travelling_salesman_problem_solver.py From 91e1706d0c4aeb9593b31e50ed0591859761ee59 Mon Sep 17 00:00:00 2001 From: Prateek Date: Sat, 14 Oct 2023 12:40:22 +0530 Subject: [PATCH 34/89] algo: Added cocktail Sort --- Sorting Techniques/Cocktail_Sort.py | 50 +++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 Sorting Techniques/Cocktail_Sort.py diff --git a/Sorting Techniques/Cocktail_Sort.py b/Sorting Techniques/Cocktail_Sort.py new file mode 100644 index 0000000..6ee34be --- /dev/null +++ b/Sorting Techniques/Cocktail_Sort.py @@ -0,0 +1,50 @@ +# Cocktail Shaker Sort, also known as Bidirectional Bubble Sort +# is a variation of the Bubble Sort algorithm. It works by repeatedly +# sorting the list in both directions, from left to right and then +# from right to left, which reduces the number of passes required +# for sorting. Elements "bubble" up and down until the list becomes +# fully sorted. + + +def cocktailSort(a): + n = len(a) + swapped = True + start = 0 + end = n-1 + while (swapped == True): + + # reset the swapped flag on entering the loop, + # because it might be true from a previous + # iteration. + swapped = False + + # loop from left to right same as the bubble + # sort + for i in range(start, end): + if (a[i] > a[i + 1]): + a[i], a[i + 1] = a[i + 1], a[i] + swapped = True + + # if nothing moved, then array is sorted. + if (swapped == False): + break + + # otherwise, reset the swapped flag so that it + # can be used in the next stage + swapped = False + + # move the end point back by one, because + # item at the end is in its rightful spot + end = end-1 + + # from right to left, doing the same + # comparison as in the previous stage + for i in range(end-1, start-1, -1): + if (a[i] > a[i + 1]): + a[i], a[i + 1] = a[i + 1], a[i] + swapped = True + + # increase the starting point, because + # the last stage would have moved the next + # smallest number to its rightful spot. + start = start + 1 From 74b747bf56cf4d70062a66fb35ad6784538ab356 Mon Sep 17 00:00:00 2001 From: Prateek Date: Sat, 14 Oct 2023 18:02:11 +0530 Subject: [PATCH 35/89] algo: Added Jump Search --- Searching Techniques/Jump_Search.py | 33 +++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) create mode 100644 Searching Techniques/Jump_Search.py diff --git a/Searching Techniques/Jump_Search.py b/Searching Techniques/Jump_Search.py new file mode 100644 index 0000000..a5f9cb1 --- /dev/null +++ b/Searching Techniques/Jump_Search.py @@ -0,0 +1,33 @@ +# Jump Search is an efficient searching algorithm for ordered lists. It divides the list into smaller blocks and jumps ahead by fixed steps to quickly reach a block that might contain the target value. It then performs a linear search within that block, providing a balance between the efficiency of binary search and the simplicity of linear search. Jump Search is especially useful for large datasets where binary search might be less efficient due to its logarithmic time complexity. + +import math + +def jumpSearch(arr, x, n): + + # Finding block size to be jumped + step = math.sqrt(n) + + # Finding the block where element is + # present (if it is present) + prev = 0 + while arr[int(min(step, n)-1)] < x: + prev = step + step += math.sqrt(n) + if prev >= n: + return -1 + + # Doing a linear search for x in + # block beginning with prev. + while arr[int(prev)] < x: + prev += 1 + + # If we reached next block or end + # of array, element is not present. + if prev == min(step, n): + return -1 + + # If element is found + if arr[int(prev)] == x: + return prev + + return -1 From 3743b23673dce0ad18fb06a0c4bc322f415b9c74 Mon Sep 17 00:00:00 2001 From: sbdkdlalit0112 Date: Sat, 14 Oct 2023 23:05:04 +0530 Subject: [PATCH 36/89] algo: Added a menu driven implementation of Binary Search Tree --- .DS_Store | Bin 0 -> 6148 bytes Tree/Meny_Driven_Binary_Search_Tree.py | 140 +++++++++++++++++++++++++ 2 files changed, 140 insertions(+) create mode 100644 .DS_Store create mode 100644 Tree/Meny_Driven_Binary_Search_Tree.py diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..8ed769433262ae859c0ded4d522cc7f6db118b3b GIT binary patch literal 6148 zcmeHK-D(p-6h4zCW@8Y8ROpSc;BBdCs|E2UZW5%R6}wR{RAM$8+9lhan%$U-5XiN@ zgRkJNPvX0HrQeyE!fx7vHx-pRaORtzbI$C1^S47p!kdQML=7Txpp50~NG=d=XFVq+ zQ^S5j#W`Abu?%3z$HZgk z)=wqK5mo6ac6*z;h~QIq#ro@tbsg3bB{ZfHh2*MIN?FlM+*$Fe&J27uxYUy}4$ks};8ir5^&9JL znPh_~8BLT!6pmo>_H~qma^}ft5~j-6*9}h1sr4Hh^ZCwRbJN{@u)WxH=MQ&~zrVY; zSk#=`ckewq?2qG_K*G~iDhzxTK1@6zPgx?4m|^2FjwLS ztnzcUju{%!fF?AgV>J&)n1debxSXKogW@?YvRvXG^Kot=jzpB~_hYDa^Ch=#Du|Kc zTZS|)_C~yB@|GxCxdH~vs&ndB5Zi{d3|I#KZ3g&!aG{JXgG-I-=s=>L0KhV;l_1YQ z3GCwY{SDUI3-dw|s&~Y9l{tx)Mw?m& zECWRb)^xGK_kZ{D_kYpJmMjC7f&YpDQSSI14?{9{>$$=4UF$;MLs{5vsZqHgG1sxm f@KwA4Rf0B`Gr*R?rAE|1?2mw=!6uf0Kgz%_OnbR5 literal 0 HcmV?d00001 diff --git a/Tree/Meny_Driven_Binary_Search_Tree.py b/Tree/Meny_Driven_Binary_Search_Tree.py new file mode 100644 index 0000000..0d89c18 --- /dev/null +++ b/Tree/Meny_Driven_Binary_Search_Tree.py @@ -0,0 +1,140 @@ +class Node: + def __init__(self,data): + self.data=data + self.left=None + self.right=None + +def insert(node,data): + if node is None: + return Node(data) + else: + if data",end=" ") + printInorder(node.right) + +def printPreorder(node): + if node is not None: + print(node.data,"->",end=" ") + printPreorder(node.left) + printPreorder(node.right) + +def printPostorder(node): + if node is not None: + printPostorder(node.left) + printPostorder(node.right) + print(node.data,"->",end=" ") + +def search(node,data): + while node!=None: + if data==node.data: + return "FOUND" + else: + if data Date: Sun, 15 Oct 2023 12:59:54 +0530 Subject: [PATCH 37/89] Create Kruskals_Algorithm.py Added code for kruskal's algorithm. Kruskal's algorithm is a minimum spanning tree algorithm that finds the smallest set of edges that connects all vertices in a graph without forming cycles. Adding this to the "Graphs" folder. --- Graphs/Kruskals_Algorithm.py | 104 +++++++++++++++++++++++++++++++++++ 1 file changed, 104 insertions(+) create mode 100644 Graphs/Kruskals_Algorithm.py diff --git a/Graphs/Kruskals_Algorithm.py b/Graphs/Kruskals_Algorithm.py new file mode 100644 index 0000000..34a2f14 --- /dev/null +++ b/Graphs/Kruskals_Algorithm.py @@ -0,0 +1,104 @@ +#Kruskal's Algorithm seeks a minimum spanning tree using a greedy approach. +#Greedy Approach: Selects edges in ascending order of weight, adding them to the MST if they avoid cycles. +#Edge Sorting: Begins by sorting edges by weight in non-decreasing order. +#Disjoint Set Data Structure: Utilizes Union-Find to efficiently manage connected components and prevent cycles. +#Iterative Process: Adds edges to the MST iteratively, starting with the smallest weight edges, until V-1 edges are included (V is the number of vertices). +#Safe Edge Selection: Ensures edges don't create cycles before adding them to the MST. +#Efficiency: Kruskal's Algorithm has O(E log E) time complexity, making it suitable for sparse graphs. +#Applications: Widely used in network design for road networks, electrical circuits, data center connections, and also in clustering and image segmentation. + +class KruskalMST: + def __init__(self, vertices): + """ + Initialize a KruskalMST object with the given number of vertices. + + Args: + vertices (int): The number of vertices in the graph. + """ + self.V = vertices + self.graph = [] + + def add_edge(self, u, v, w): + """ + Add an edge to the graph. + + Args: + u (int): The source vertex. + v (int): The destination vertex. + w (int): The weight of the edge. + """ + self.graph.append([u, v, w]) + + def find(self, parent, i): + """ + Find the parent of a vertex using the union-find algorithm. + + Args: + parent (list): A list representing the parent of each vertex. + i (int): The vertex to find the parent of. + + Returns: + int: The parent of the vertex. + """ + if parent[i] == i: + return i + return self.find(parent, parent[i]) + + def union(self, parent, rank, x, y): + """ + Union operation to merge two subsets into one. + + Args: + parent (list): A list representing the parent of each vertex. + rank (list): A list representing the rank of each subset. + x (int): The root of the first subset. + y (int): The root of the second subset. + """ + root_x = self.find(parent, x) + root_y = self.find(parent, y) + + if rank[root_x] < rank[root_y]: + parent[root_x] = root_y + elif rank[root_x] > rank[root_y]: + parent[root_y] = root_x + else: + parent[root_x] = root_y + rank[root_y] += 1 + + def kruskal(self): + """ + Find the minimum spanning tree using Kruskal's algorithm. + + Returns: + list: A list of edges in the minimum spanning tree, represented as [u, v, w], where u and v are vertices + and w is the edge weight. + """ + result = [] + self.graph = sorted(self.graph, key=lambda item: item[2]) + parent = [i for i in range(self.V)] + rank = [0] * self.V + i = 0 + e = 0 + while e < self.V - 1: + u, v, w = self.graph[i] + i += 1 + x = self.find(parent, u) + y = self.find(parent, v) + if x != y: + e += 1 + result.append([u, v, w]) + self.union(parent, rank, x, y) + return result + + +# Example usage: +g = KruskalMST(4) +g.add_edge(0, 1, 10) +g.add_edge(0, 2, 6) +g.add_edge(0, 3, 5) +g.add_edge(1, 3, 15) +g.add_edge(2, 3, 4) +mst = g.kruskal() +print("Edges in Minimum Spanning Tree:") +for u, v, w in mst: + print(f"{u} - {v}: {w}") From 3efe589f8c5b5a1516e737ffe8178e3a220d3d14 Mon Sep 17 00:00:00 2001 From: Aaryadotpy <91911418+AaryaNale@users.noreply.github.com> Date: Sun, 15 Oct 2023 18:57:19 +0530 Subject: [PATCH 38/89] Update Kruskals_Algorithm.py User input is now available --- Graphs/Kruskals_Algorithm.py | 36 +++++++++++++++++++++++++----------- 1 file changed, 25 insertions(+), 11 deletions(-) diff --git a/Graphs/Kruskals_Algorithm.py b/Graphs/Kruskals_Algorithm.py index 34a2f14..7a3bd34 100644 --- a/Graphs/Kruskals_Algorithm.py +++ b/Graphs/Kruskals_Algorithm.py @@ -91,14 +91,28 @@ def kruskal(self): return result -# Example usage: -g = KruskalMST(4) -g.add_edge(0, 1, 10) -g.add_edge(0, 2, 6) -g.add_edge(0, 3, 5) -g.add_edge(1, 3, 15) -g.add_edge(2, 3, 4) -mst = g.kruskal() -print("Edges in Minimum Spanning Tree:") -for u, v, w in mst: - print(f"{u} - {v}: {w}") +def main(): + """ + Main function to execute the Kruskal's algorithm for finding the Minimum Spanning Tree. + + The function prompts the user for input regarding the number of vertices, number of edges, and edge details. + It then prints the edges of the Minimum Spanning Tree. + """ + num_vertices = int(input("Enter the number of vertices: ")) + g = KruskalMST(num_vertices) + + num_edges = int(input("Enter the number of edges: ")) + for _ in range(num_edges): + u, v, w = map(int, input("Enter edge (u v w): ").split()) + g.add_edge(u, v, w) + + mst = g.kruskal() + + print("Edges in Minimum Spanning Tree:") + for u, v, w in mst: + print(f"{u} - {v}: {w}") + + +if __name__ == "__main__": + main() + From b816fd3227dce5bad6a68e10fb8c21e9919b7ff8 Mon Sep 17 00:00:00 2001 From: Amad Siddiqui <80278397+Amad-Ahmed@users.noreply.github.com> Date: Sun, 15 Oct 2023 21:17:04 +0500 Subject: [PATCH 39/89] Add kmeans algorithm --- Clustering Techniques/kmeans.py | 79 +++++++++++++++++++++++++++++++++ 1 file changed, 79 insertions(+) create mode 100644 Clustering Techniques/kmeans.py diff --git a/Clustering Techniques/kmeans.py b/Clustering Techniques/kmeans.py new file mode 100644 index 0000000..06a6911 --- /dev/null +++ b/Clustering Techniques/kmeans.py @@ -0,0 +1,79 @@ +import numpy as np +import matplotlib.pyplot as plt + +# Function to accept data from the user + + +def get_user_data(): + num_points = int(input("Enter the number of data points: ")) + data = [] + for i in range(num_points): + x = float(input(f"Enter x-coordinate for data point {i+1}: ")) + y = float(input(f"Enter y-coordinate for data point {i+1}: ")) + data.append([x, y]) + return np.array(data) + +# Function to plot data points and centroids + + +def plot(data, centroids, cluster): + plt.scatter(data[:, 0], data[:, 1], c=cluster) + plt.scatter(centroids[:, 0], centroids[:, 1], c='r') + plt.show() + +# Function to assign each data point to the closest centroid + + +def assign_cluster(data, centroids): + cluster = [] + # Looping through each data point + for i in range(len(data)): + distances = [] + # Looping through each centroid + for j in range(len(centroids)): + # Calculating the distance between the data point and the centroid + distances.append(np.linalg.norm(data[i] - centroids[j])) + # Assigning the data point to the closest centroid + cluster.append(np.argmin(distances)) + return cluster + +# Function to update centroids based on the assigned clusters + + +def update_centroids(data, cluster, k): + centroids = [] + # Looping through each cluster + for i in range(k): + # Calculating the new centroid + centroids.append(np.mean(data[np.array(cluster) == i], axis=0)) + return np.array(centroids) + + +# Get user input for data points +data = get_user_data() + +# Plot the initial data points +plt.scatter(data[:, 0], data[:, 1]) + +# Get user input for the number of clusters (k) +k = int(input("Enter the number of clusters (k): ")) + +# Get user input for initial centroids +centroids = [] +for i in range(k): + x = float(input(f"Enter initial x-coordinate for centroid {i+1}: ")) + y = float(input(f"Enter initial y-coordinate for centroid {i+1}: ")) + centroids.append([x, y]) +centroids = np.array(centroids) + +# Run the k-means algorithm for a specified number of iterations +num_iterations = 10 +for i in range(num_iterations): + cluster = assign_cluster(data, centroids) + centroids = update_centroids(data, cluster, k) + +# Plot the final clusters and centroids +plot(data, centroids, cluster) + +# Display the final clusters +print("Final clusters:", cluster) From c36398c1e83803d84bb03f5f6c44260f7806ffec Mon Sep 17 00:00:00 2001 From: Himanshu Agarwal Date: Sun, 15 Oct 2023 22:29:23 +0530 Subject: [PATCH 40/89] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 0c6d18c..9d35d27 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -
+
# Python-Data-Structures Here' an amazing repo regarding data structure using python. This is a compilation of python codes examples and implementations of various concepts of data structure. It comprises of various resources for efficient understanding and utilizion of data structure in python. From 02ba3a2906466b0b091038ca26b93f524c5d94ef Mon Sep 17 00:00:00 2001 From: Himanshu Agarwal Date: Sun, 15 Oct 2023 22:35:32 +0530 Subject: [PATCH 41/89] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 9d35d27..f1927d1 100644 --- a/README.md +++ b/README.md @@ -57,7 +57,7 @@ This project follows the [MIT LICENSE](https://choosealicense.com/licenses/mit/) ## Contributors ✨ - +
From 6f162dc6bdd473fb785e53539b876d6a46f3ac36 Mon Sep 17 00:00:00 2001 From: Himanshu Agarwal Date: Sun, 15 Oct 2023 22:50:59 +0530 Subject: [PATCH 42/89] Update README.md --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index f1927d1..7ca53f4 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,10 @@
-# Python-Data-Structures +# Python Data Structures Here' an amazing repo regarding data structure using python. This is a compilation of python codes examples and implementations of various concepts of data structure. It comprises of various resources for efficient understanding and utilizion of data structure in python.
-## Mandatory‼️ +## Mandatory ‼️ 1. You need to create an issue first and wait till you are assigned the issue before creating a Pull Request. 2. Give a proper description about your proposed and implemented changes in your issues and pull requests. From bd9ee82e3c65e59cac8cb86f5fa26d50b27078eb Mon Sep 17 00:00:00 2001 From: Surya <101094019+surya-mu@users.noreply.github.com> Date: Mon, 16 Oct 2023 11:44:23 +0530 Subject: [PATCH 43/89] [Algo/Code] Added Prim's Algorithm to Find MST. Added a file called Prims_Algorithm_Minimum_Spanning_Tree.py to the Graphs section. --- .../Prims_Algorithm_Minimum_Spanning_Tree.py | 59 +++++++++++++++++++ 1 file changed, 59 insertions(+) create mode 100644 Graphs/Prims_Algorithm_Minimum_Spanning_Tree.py diff --git a/Graphs/Prims_Algorithm_Minimum_Spanning_Tree.py b/Graphs/Prims_Algorithm_Minimum_Spanning_Tree.py new file mode 100644 index 0000000..3063708 --- /dev/null +++ b/Graphs/Prims_Algorithm_Minimum_Spanning_Tree.py @@ -0,0 +1,59 @@ +//Prim's Algo to calculate the minimum spanning tree with user-input, using the Fibonacci Heap Method. + +import sys +import heapq + +class UniqueGraph: + def __init(self, vertices_count): + self.V = vertices_count + self.edges = [[] for _ in range(vertices_count)] + + def find_min_edge(self, key_values, mst_set): + min_value = sys.maxsize + min_index = 0 + + for vertex in range(self.V): + if key_values[vertex] < min_value and not mst_set[vertex]: + min_value = key_values[vertex] + min_index = vertex + + return min_index + + def find_minimum_spanning_tree(self): + parents = [None] * self.V + key_values = [sys.maxsize] * self.V + key_values[0] = 0 + mst_set = [False] * self.V + + parents[0] = -1 + min_heap = [(0, 0)] + + while min_heap: + current_value, current_vertex = heapq.heappop(min_heap) + mst_set[current_vertex] = True + + for edge in self.edges[current_vertex]: + adjacent_vertex, weight = edge + if not mst_set[adjacent_vertex] and key_values[adjacent_vertex] > weight: + key_values[adjacent_vertex] = weight + parents[adjacent_vertex] = current_vertex + heapq.heappush(min_heap, (key_values[adjacent_vertex], adjacent_vertex)) + + self.print_minimum_spanning_tree(parents, key_values) + + def print_minimum_spanning_tree(self, parents, key_values): + print("Edge \tWeight") + for vertex in range(1, self.V): + print(f"{parents[vertex]} - {vertex}\t{key_values[vertex]}") + +# Input the graph from the user +V = int(input("Enter the number of vertices: ")) +g = UniqueGraph(V) +print("Enter the edges and their weights (e.g., '1 2 3' means an edge from vertex 1 to vertex 2 with weight 3):") + +for _ in range(V - 1): + u, v, w = map(int, input().split()) + g.edges[u].append((v, w)) + g.edges[v].append((u, w)) + +g.find_minimum_spanning_tree() From 37919462f672a3d51bd2f0e9a54c8f7a4371ac97 Mon Sep 17 00:00:00 2001 From: Jyoti Shinde Date: Mon, 16 Oct 2023 10:10:43 -0400 Subject: [PATCH 44/89] added trie data structure --- Trie/Menu_Driven_Code_for_Tries.py | 50 ++++++++++++++++++++++++++++++ 1 file changed, 50 insertions(+) create mode 100644 Trie/Menu_Driven_Code_for_Tries.py diff --git a/Trie/Menu_Driven_Code_for_Tries.py b/Trie/Menu_Driven_Code_for_Tries.py new file mode 100644 index 0000000..d66e28c --- /dev/null +++ b/Trie/Menu_Driven_Code_for_Tries.py @@ -0,0 +1,50 @@ +""" +# Menu Driven Code for Tries also known as a prefix tree, a tree-like data structure +used for efficient retrieval of a key in a dataset of strings. +""" + +class TrieNode: + def __init__(self): + self.children = {} + self.is_end_of_word = False + +class Trie: + def __init__(self): + self.root = TrieNode() + + def insert(self, word): + node = self.root + for char in word: + if char not in node.children: + node.children[char] = TrieNode() + node = node.children[char] + node.is_end_of_word = True + + def search(self, word): + node = self.root + for char in word: + if char not in node.children: + return False + node = node.children[char] + return node.is_end_of_word + + def starts_with(self, prefix): + node = self.root + for char in prefix: + if char not in node.children: + return False + node = node.children[char] + return True + +# Example usage +trie = Trie() +words = ["apple", "banana", "orange", "app", "ban", "or"] +for word in words: + trie.insert(word) + +print(trie.search("apple")) # True +print(trie.search("appl")) # False +print(trie.starts_with("app")) # True +print(trie.search("ban")) # True +print(trie.search("banana")) # True +print(trie.search("band")) # False From 5c925445a7c1a202be49a481b2c71ee5675b0fae Mon Sep 17 00:00:00 2001 From: ConradKash Date: Mon, 16 Oct 2023 20:20:55 +0300 Subject: [PATCH 45/89] naive_search_pattern algorithm --- .../Naive_Pattern_Searching.py | 36 +++++++++++++++++++ 1 file changed, 36 insertions(+) create mode 100644 Pattern Searching Algorithm/Naive_Pattern_Searching.py diff --git a/Pattern Searching Algorithm/Naive_Pattern_Searching.py b/Pattern Searching Algorithm/Naive_Pattern_Searching.py new file mode 100644 index 0000000..16e3dea --- /dev/null +++ b/Pattern Searching Algorithm/Naive_Pattern_Searching.py @@ -0,0 +1,36 @@ +# Python3 program for Naive Pattern +# Searching algorithm + + +def search(pat, txt): + M = len(pat) + N = len(txt) + + # A loop to slide pat[] one by one */ + for i in range(N - M + 1): + j = 0 + + # For current index i, check + # for pattern match */ + while(j < M): + if (txt[i + j] != pat[j]): + break + j += 1 + + if (j == M): + print("Pattern found at index ", i) + + +# Driver's Code +if __name__ == '__main__': + txt = "AABAACAADAABAAABAA" + pat = "AABA" + + # Function call + search(pat, txt) + #try it yourself + txt = input("Enter the text: ") + pat = input("Enter the pattern: ") + + + search(pat, txt) From 1ce90b6f0c16045a3ec2eb3cee81ed868fa25baf Mon Sep 17 00:00:00 2001 From: pankaj kumar Date: Mon, 16 Oct 2023 23:13:59 +0530 Subject: [PATCH 46/89] I have written full code of Tarjan's Algorithm inside graph folder with proper explanations. --- Graphs/Tarjan's_Algorithm.py | 82 ++++++++++++++++++++++++++++++++++++ 1 file changed, 82 insertions(+) create mode 100644 Graphs/Tarjan's_Algorithm.py diff --git a/Graphs/Tarjan's_Algorithm.py b/Graphs/Tarjan's_Algorithm.py new file mode 100644 index 0000000..cf9a356 --- /dev/null +++ b/Graphs/Tarjan's_Algorithm.py @@ -0,0 +1,82 @@ +""" "defaultdict" in Python + is a dictionary-like container from the collections +module that provides a default value for keys that do not exist.""" + +from collections import defaultdict + +# Function to run Tarjan's algorithm +def tarjan(graph): + + index = 0 + stack = [] + components = [] + + # Track visited and index for each node + indexes = {} + lowlinks = {} + + def strongconnect(node): + + # Set the depth index for this node to the smallest unused index + nonlocal index + indexes[node] = index + lowlinks[node] = index + index += 1 + stack.append(node) + + # Consider successors of `node` + try: + successors = graph[node] + except: + + successors = [] + for successor in successors: + if successor not in indexes: + # Successor has not yet been visited; recurse on it + strongconnect(successor) + lowlinks[node] = min(lowlinks[node],lowlinks[successor]) + elif successor in stack: + # Successor is in stack, hence in current SCC + lowlinks[node] = min(lowlinks[node],indexes[successor]) + + # If `node` is a root node, pop the stack and generate an SCC + if lowlinks[node] == indexes[node]: + connected_component = [] + + while True: + successor = stack.pop() + connected_component.append(successor) + if successor == node: break + components.append(connected_component) + + for node in graph: + if node not in indexes: + strongconnect(node) + + return components + +# Sample graph +graph = { + 0: [1], + 1: [2], + 2: [0, 3], + 3: [4], + 4: [5], + 5: [3, 6], + 6: [] +} + +print(tarjan(graph)) + + +""" Explanation:-> + +1) Tarjan's algorithm performs a DFS on the graph to find strongly connected components. + +2) It maintains an index (incremented for each visited node), a stack of visited nodes, and a lowlink value for each node (lowest index reachable from that node). + +3) When visiting a node, if any successor is in the stack, the lowlink is updated to be the minimum of its current value and the successor's index. + +4) If the lowlink of a node equals its own index, it is a root node and the current stack represents an SCC. This SCC is popped from the stack and added to the final components list. + +5) After Tarjan's finishes, the components list contains all the SCCs in the graph.""" \ No newline at end of file From ac25c2589ae2c01f408b57a8aef798b91a730567 Mon Sep 17 00:00:00 2001 From: Jyoti Shinde Date: Mon, 16 Oct 2023 13:44:57 -0400 Subject: [PATCH 47/89] updated code to accept dynamic input --- Trie/Menu_Driven_Code_for_Tries.py | 30 +++++++++++++++++++----------- 1 file changed, 19 insertions(+), 11 deletions(-) diff --git a/Trie/Menu_Driven_Code_for_Tries.py b/Trie/Menu_Driven_Code_for_Tries.py index d66e28c..15210c5 100644 --- a/Trie/Menu_Driven_Code_for_Tries.py +++ b/Trie/Menu_Driven_Code_for_Tries.py @@ -2,7 +2,6 @@ # Menu Driven Code for Tries also known as a prefix tree, a tree-like data structure used for efficient retrieval of a key in a dataset of strings. """ - class TrieNode: def __init__(self): self.children = {} @@ -38,13 +37,22 @@ def starts_with(self, prefix): # Example usage trie = Trie() -words = ["apple", "banana", "orange", "app", "ban", "or"] -for word in words: - trie.insert(word) - -print(trie.search("apple")) # True -print(trie.search("appl")) # False -print(trie.starts_with("app")) # True -print(trie.search("ban")) # True -print(trie.search("banana")) # True -print(trie.search("band")) # False + +# Dynamic input for words +while True: + user_input = raw_input("Enter a word (or 'q' to quit): ") # Use raw_input for Python 2.x + if user_input == 'q': + break + trie.insert(user_input) + +# Dynamic input for searching +while True: + user_input = raw_input("Enter a word to search (or 'q' to quit): ") # Use raw_input for Python 2.x + if user_input == 'q': + break + if trie.search(user_input): + print("'{0}' is found in the trie.".format(user_input)) + else: + print("'{0}' is not found in the trie.".format(user_input)) + + From 8fa7a8e14dcde637d70f042dd5bd908d7292519e Mon Sep 17 00:00:00 2001 From: Achintya Bhat Date: Tue, 17 Oct 2023 11:38:13 +0530 Subject: [PATCH 48/89] algo/code: Priority queue using max heap --- ..._for_Priority_Queue_Heap_implementation.py | 117 ++++++++++++++++++ 1 file changed, 117 insertions(+) create mode 100644 Queue/Menu_Driven_Code_for_Priority_Queue_Heap_implementation.py diff --git a/Queue/Menu_Driven_Code_for_Priority_Queue_Heap_implementation.py b/Queue/Menu_Driven_Code_for_Priority_Queue_Heap_implementation.py new file mode 100644 index 0000000..c6e19d3 --- /dev/null +++ b/Queue/Menu_Driven_Code_for_Priority_Queue_Heap_implementation.py @@ -0,0 +1,117 @@ +class Q: + queue = [] + MaxSize = 0 + currSize = 0 + + def createQueue(self, size): + Q.MaxSize = size + Q.currSize = 0 + for i in range(0, Q.MaxSize): + Q.queue.append(0) + print('\nQueue created of size: ', len(Q.queue)) + print(Q.queue) + + def enqueue(self, e): + Q.currSize += 1 + Q.queue[Q.currSize-1] = e + Q.shiftUp(Q.currSize-1) + print(e, 'enqueued in Queue') + print('') + + def dequeue(self): + temp = Q.queue[0] + Q.currSize -= 1 + Q.queue[0] = Q.queue[Q.currSize] + Q.shiftDown(0) + print(temp, 'dequeued from Queue') + print('') + + def isFull(self): + if Q.currSize == Q.MaxSize: + return True + else: + return False + + def isEmpty(self): + if Q.currSize == 0: + return True + else: + return False + + def printQueue(self): + print('Position', '\tData') + for i in range(Q.currSize): + print(i+1,'\t\t',Q.queue[i]) + + def shiftUp(i) : + parent = (i - 1) // 2 + while (i > 0 and Q.queue[parent] < Q.queue[i]) : + + # Swap parent and current node + (Q.queue[i], Q.queue[parent]) = (Q.queue[parent], Q.queue[i]) # swap + + # Update i to parent of i + i = parent + parent = (i - 1) // 2 + + def shiftDown(i): + largest = i # Initialize largest as root + l = 2 * i + 1 # left = 2*i + 1 + r = 2 * i + 2 # right = 2*i + 2 + + # See if left child of root exists and is + # greater than root + + if l < Q.currSize and Q.queue[i] < Q.queue[l]: + largest = l + + # See if right child of root exists and is + # greater than root + + if r < Q.currSize and Q.queue[largest] < Q.queue[r]: + largest = r + + # Change root, if needed + + if largest != i: + (Q.queue[i], Q.queue[largest]) = (Q.queue[largest], Q.queue[i]) # swap + Q.shiftDown(largest) + + + +# Main Code: + +o = Q() +o.createQueue(int(input('Enter size of the queue: '))) + +while True: + print('------------') + print('1.Enqueue\n2.Dequeue\n3.Print\n0.Exit') + print('------------') + + ch = int(input('\nEnter your choice: ')) + + if ch == 1: + if o.isFull() != True: + data = int(input('\nEnter data to be enqueued: ')) + o.enqueue(data) + else: + print('\nQueue is full..\n') + + elif ch == 2: + if o.isEmpty() != True: + o.dequeue() + else: + print('\nQueue is empty..\n') + + elif ch == 3: + if o.isEmpty() != True: + o.printQueue() + else: + print('\nQueue is empty..\n') + + elif ch == 0: + break + + else: + print('\nWrong Input..\nEnter the correct choice..!!\n') \ No newline at end of file From 71f7b22b70df7f0bda1b02b01115a33e0bcf0df4 Mon Sep 17 00:00:00 2001 From: Aditya D Date: Tue, 17 Oct 2023 15:52:52 +0530 Subject: [PATCH 49/89] added Gaussain-Mixture-Model.py --- .../Gaussain-Mixture-Model.py | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 Clustering Techniques/Gaussain-Mixture-Model.py diff --git a/Clustering Techniques/Gaussain-Mixture-Model.py b/Clustering Techniques/Gaussain-Mixture-Model.py new file mode 100644 index 0000000..edfcd26 --- /dev/null +++ b/Clustering Techniques/Gaussain-Mixture-Model.py @@ -0,0 +1,37 @@ +from numpy import unique +from numpy import where +from matplotlib import pyplot +from sklearn.datasets import make_classification +from sklearn.mixture import GaussianMixture + +# initialize the data set we'll work with +training_data, _ = make_classification( + n_samples=1000, + n_features=2, + n_informative=2, + n_redundant=0, + n_clusters_per_class=1, + random_state=4 +) + +# define the model +gaussian_model = GaussianMixture(n_components=2) + +# train the model +gaussian_model.fit(training_data) + +# assign each data point to a cluster +gaussian_result = gaussian_model.predict(training_data) + +# get all of the unique clusters +gaussian_clusters = unique(gaussian_result) + +# plot Gaussian Mixture the clusters +for gaussian_cluster in gaussian_clusters: + # get data points that fall in this cluster + index = where(gaussian_result == gaussian_clusters) + # make the plot + pyplot.scatter(training_data[index, 0], training_data[index, 1]) + +# show the Gaussian Mixture plot +pyplot.show() From c7ca7c810f14cebd70322a657987b0e112e182e5 Mon Sep 17 00:00:00 2001 From: Aditya D Date: Tue, 17 Oct 2023 16:04:23 +0530 Subject: [PATCH 50/89] added DB-Scan-Model.py --- Clustering Techniques/DBSCAN-Model.py | 37 +++++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 Clustering Techniques/DBSCAN-Model.py diff --git a/Clustering Techniques/DBSCAN-Model.py b/Clustering Techniques/DBSCAN-Model.py new file mode 100644 index 0000000..06b9af2 --- /dev/null +++ b/Clustering Techniques/DBSCAN-Model.py @@ -0,0 +1,37 @@ +from numpy import unique +from numpy import where +from matplotlib import pyplot +from sklearn.datasets import make_classification +from sklearn.cluster import DBSCAN + +# initialize the data set we'll work with +training_data, _ = make_classification( + n_samples=1000, + n_features=2, + n_informative=2, + n_redundant=0, + n_clusters_per_class=1, + random_state=4 +) + +# define the model +dbscan_model = DBSCAN(eps=0.25, min_samples=9) + +# train the model +dbscan_model.fit(training_data) + +# assign each data point to a cluster +dbscan_result = dbscan_model.predict(training_data) + +# get all of the unique clusters +dbscan_cluster = unique(dbscan_result) + +# plot the DBSCAN clusters +for dbscan_cluster in dbscan_clusters: + # get data points that fall in this cluster + index = where(dbscan_result == dbscan_clusters) + # make the plot + pyplot.scatter(training_data[index, 0], training_data[index, 1]) + +# show the DBSCAN plot +pyplot.show() From d9030315d0a90202dca82b02a84bf60844d2368f Mon Sep 17 00:00:00 2001 From: Aditya D Date: Tue, 17 Oct 2023 16:15:06 +0530 Subject: [PATCH 51/89] added Birch-Model.py --- Clustering Techniques/BIRCH-Algorithm.py | 37 ++++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 Clustering Techniques/BIRCH-Algorithm.py diff --git a/Clustering Techniques/BIRCH-Algorithm.py b/Clustering Techniques/BIRCH-Algorithm.py new file mode 100644 index 0000000..c209678 --- /dev/null +++ b/Clustering Techniques/BIRCH-Algorithm.py @@ -0,0 +1,37 @@ +from numpy import unique +from numpy import where +from matplotlib import pyplot +from sklearn.datasets import make_classification +from sklearn.cluster import Birch + +# initialize the data set we'll work with +training_data, _ = make_classification( + n_samples=1000, + n_features=2, + n_informative=2, + n_redundant=0, + n_clusters_per_class=1, + random_state=4 +) + +# define the model +birch_model = Birch(threshold=0.03, n_clusters=2) + +# train the model +birch_model.fit(training_data) + +# assign each data point to a cluster +birch_result = birch_model.predict(training_data) + +# get all of the unique clusters +birch_clusters = unique(birch_result) + +# plot the BIRCH clusters +for birch_cluster in birch_clusters: + # get data points that fall in this cluster + index = where(birch_result == birch_clusters) + # make the plot + pyplot.scatter(training_data[index, 0], training_data[index, 1]) + +# show the BIRCH plot +pyplot.show() From 327205948907bde7f28f9279c1c840c1d225db91 Mon Sep 17 00:00:00 2001 From: Aditya D Date: Tue, 17 Oct 2023 17:47:33 +0530 Subject: [PATCH 52/89] added Affinity-Propagation-Clustering-Algorithm.py --- ...finity-Propagation-Clustering-ALgorithm.py | 37 +++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 Clustering Techniques/Affinity-Propagation-Clustering-ALgorithm.py diff --git a/Clustering Techniques/Affinity-Propagation-Clustering-ALgorithm.py b/Clustering Techniques/Affinity-Propagation-Clustering-ALgorithm.py new file mode 100644 index 0000000..f111ff9 --- /dev/null +++ b/Clustering Techniques/Affinity-Propagation-Clustering-ALgorithm.py @@ -0,0 +1,37 @@ +from numpy import unique +from numpy import where +from matplotlib import pyplot +from sklearn.datasets import make_classification +from sklearn.cluster import AffinityPropagation + +# initialize the data set we'll work with +training_data, _ = make_classification( + n_samples=1000, + n_features=2, + n_informative=2, + n_redundant=0, + n_clusters_per_class=1, + random_state=4 +) + +# define the model +model = AffinityPropagation(damping=0.7) + +# train the model +model.fit(training_data) + +# assign each data point to a cluster +result = model.predict(training_data) + +# get all of the unique clusters +clusters = unique(result) + +# plot the clusters +for cluster in clusters: + # get data points that fall in this cluster + index = where(result == cluster) + # make the plot + pyplot.scatter(training_data[index, 0], training_data[index, 1]) + +# show the plot +pyplot.show() From 7cbc2f3260498cb0ef48e47f6b17453384d6a56a Mon Sep 17 00:00:00 2001 From: Aditya D Date: Tue, 17 Oct 2023 18:09:33 +0530 Subject: [PATCH 53/89] added Mean-Shift-Clustering-Algorithm.py --- .../Mean-Shift-Clustering-algorithm.py | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 Clustering Techniques/Mean-Shift-Clustering-algorithm.py diff --git a/Clustering Techniques/Mean-Shift-Clustering-algorithm.py b/Clustering Techniques/Mean-Shift-Clustering-algorithm.py new file mode 100644 index 0000000..230c67b --- /dev/null +++ b/Clustering Techniques/Mean-Shift-Clustering-algorithm.py @@ -0,0 +1,34 @@ +from numpy import unique +from numpy import where +from matplotlib import pyplot +from sklearn.datasets import make_classification +from sklearn.cluster import MeanShift + +# initialize the data set we'll work with +training_data, _ = make_classification( + n_samples=1000, + n_features=2, + n_informative=2, + n_redundant=0, + n_clusters_per_class=1, + random_state=4 +) + +# define the model +mean_model = MeanShift() + +# assign each data point to a cluster +mean_result = mean_model.fit_predict(training_data) + +# get all of the unique clusters +mean_clusters = unique(mean_result) + +# plot Mean-Shift the clusters +for mean_cluster in mean_clusters: + # get data points that fall in this cluster + index = where(mean_result == mean_cluster) + # make the plot + pyplot.scatter(training_data[index, 0], training_data[index, 1]) + +# show the Mean-Shift plot +pyplot.show() From b4dba196dffe26780ad17fba304bcbe8d99f942c Mon Sep 17 00:00:00 2001 From: Aditya D Date: Tue, 17 Oct 2023 18:24:41 +0530 Subject: [PATCH 54/89] added OPTICS-algorithm.py --- Clustering Techniques/OPTICS-algorithm.py | 34 +++++++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 Clustering Techniques/OPTICS-algorithm.py diff --git a/Clustering Techniques/OPTICS-algorithm.py b/Clustering Techniques/OPTICS-algorithm.py new file mode 100644 index 0000000..c198fa3 --- /dev/null +++ b/Clustering Techniques/OPTICS-algorithm.py @@ -0,0 +1,34 @@ +from numpy import unique +from numpy import where +from matplotlib import pyplot +from sklearn.datasets import make_classification +from sklearn.cluster import OPTICS + +# initialize the data set we'll work with +training_data, _ = make_classification( + n_samples=1000, + n_features=2, + n_informative=2, + n_redundant=0, + n_clusters_per_class=1, + random_state=4 +) + +# define the model +optics_model = OPTICS(eps=0.75, min_samples=10) + +# assign each data point to a cluster +optics_result = optics_model.fit_predict(training_data) + +# get all of the unique clusters +optics_clusters = unique(optics_clusters) + +# plot OPTICS the clusters +for optics_cluster in optics_clusters: + # get data points that fall in this cluster + index = where(optics_result == optics_clusters) + # make the plot + pyplot.scatter(training_data[index, 0], training_data[index, 1]) + +# show the OPTICS plot +pyplot.show() From 56d3e9eed7fbb5c71b7f50b6c50a4db3b3370888 Mon Sep 17 00:00:00 2001 From: Aditya D Date: Tue, 17 Oct 2023 18:56:39 +0530 Subject: [PATCH 55/89] added Agglomerative-Clustering-Algorithm.py --- .../Agglomerative-Clustering-Algorithm.py | 34 +++++++++++++++++++ 1 file changed, 34 insertions(+) create mode 100644 Clustering Techniques/Agglomerative-Clustering-Algorithm.py diff --git a/Clustering Techniques/Agglomerative-Clustering-Algorithm.py b/Clustering Techniques/Agglomerative-Clustering-Algorithm.py new file mode 100644 index 0000000..9786ab7 --- /dev/null +++ b/Clustering Techniques/Agglomerative-Clustering-Algorithm.py @@ -0,0 +1,34 @@ +from numpy import unique +from numpy import where +from matplotlib import pyplot +from sklearn.datasets import make_classification +from sklearn.cluster import AgglomerativeClustering + +# initialize the data set we'll work with +training_data, _ = make_classification( + n_samples=1000, + n_features=2, + n_informative=2, + n_redundant=0, + n_clusters_per_class=1, + random_state=4 +) + +# define the model +agglomerative_model = AgglomerativeClustering(n_clusters=2) + +# assign each data point to a cluster +agglomerative_result = agglomerative_model.fit_predict(training_data) + +# get all of the unique clusters +agglomerative_clusters = unique(agglomerative_result) + +# plot the clusters +for agglomerative_cluster in agglomerative_clusters: + # get data points that fall in this cluster + index = where(agglomerative_result == agglomerative_clusters) + # make the plot + pyplot.scatter(training_data[index, 0], training_data[index, 1]) + +# show the Agglomerative Hierarchy plot +pyplot.show() From 46b945e67887776d9a5bc2e5938523f410e5d413 Mon Sep 17 00:00:00 2001 From: Aditya D Date: Tue, 17 Oct 2023 19:08:18 +0530 Subject: [PATCH 56/89] added Regression folder and added Simple Linear Regression --- .../simple-linear-regression.py | 54 +++++++++++++++++++ 1 file changed, 54 insertions(+) create mode 100644 Regression-Techniques/simple-linear-regression.py diff --git a/Regression-Techniques/simple-linear-regression.py b/Regression-Techniques/simple-linear-regression.py new file mode 100644 index 0000000..0f6da43 --- /dev/null +++ b/Regression-Techniques/simple-linear-regression.py @@ -0,0 +1,54 @@ +import numpy as np +import matplotlib.pyplot as plt + +def estimate_coef(x, y): + # number of observations/points + n = np.size(x) + + # mean of x and y vector + m_x = np.mean(x) + m_y = np.mean(y) + + # calculating cross-deviation and deviation about x + SS_xy = np.sum(y*x) - n*m_y*m_x + SS_xx = np.sum(x*x) - n*m_x*m_x + + # calculating regression coefficients + b_1 = SS_xy / SS_xx + b_0 = m_y - b_1*m_x + + return (b_0, b_1) + +def plot_regression_line(x, y, b): + # plotting the actual points as scatter plot + plt.scatter(x, y, color = "m", + marker = "o", s = 30) + + # predicted response vector + y_pred = b[0] + b[1]*x + + # plotting the regression line + plt.plot(x, y_pred, color = "g") + + # putting labels + plt.xlabel('x') + plt.ylabel('y') + + # function to show plot + plt.show() + +def main(): + # observations / data + x = np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]) + y = np.array([1, 3, 2, 5, 7, 8, 8, 9, 10, 12]) + + # estimating coefficients + b = estimate_coef(x, y) + print("Estimated coefficients:\nb_0 = {} \ + \nb_1 = {}".format(b[0], b[1])) + + # plotting regression line + plot_regression_line(x, y, b) + +if __name__ == "__main__": + main() From f53094a1f4d7dfad2b8d40e011060335d2fcb638 Mon Sep 17 00:00:00 2001 From: Aditya D Date: Tue, 17 Oct 2023 19:18:03 +0530 Subject: [PATCH 57/89] added Regression folder and added Multiple Linear Regression --- .../Multiple-Linear-Regression.py | 49 +++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 Regression-Techniques/Multiple-Linear-Regression.py diff --git a/Regression-Techniques/Multiple-Linear-Regression.py b/Regression-Techniques/Multiple-Linear-Regression.py new file mode 100644 index 0000000..e2904fe --- /dev/null +++ b/Regression-Techniques/Multiple-Linear-Regression.py @@ -0,0 +1,49 @@ +def mse(coef, x, y): + return np.mean((np.dot(x, coef) - y)**2)/2 + + +def gradients(coef, x, y): + return np.mean(x.transpose()*(np.dot(x, coef) - y), axis=1) + + +def multilinear_regression(coef, x, y, lr, b1=0.9, b2=0.999, epsilon=1e-8): + prev_error = 0 + m_coef = np.zeros(coef.shape) + v_coef = np.zeros(coef.shape) + moment_m_coef = np.zeros(coef.shape) + moment_v_coef = np.zeros(coef.shape) + t = 0 + + while True: + error = mse(coef, x, y) + if abs(error - prev_error) <= epsilon: + break + prev_error = error + grad = gradients(coef, x, y) + t += 1 + m_coef = b1 * m_coef + (1-b1)*grad + v_coef = b2 * v_coef + (1-b2)*grad**2 + moment_m_coef = m_coef / (1-b1**t) + moment_v_coef = v_coef / (1-b2**t) + + delta = ((lr / moment_v_coef**0.5 + 1e-8) * + (b1 * moment_m_coef + (1-b1)*grad/(1-b1**t))) + + coef = np.subtract(coef, delta) + return coef + + +coef = np.array([0, 0, 0]) +c = multilinear_regression(coef, x, y, 1e-1) +fig = plt.figure() +ax = fig.add_subplot(projection='3d') + +ax.scatter(x[:, 1], x[:, 2], y, label='y', + s=5, color="dodgerblue") + +ax.scatter(x[:, 1], x[:, 2], c[0] + c[1]*x[:, 1] + c[2]*x[:, 2], + label='regression', s=5, color="orange") + +ax.view_init(45, 0) +ax.legend() +plt.show() From 0e0e6fa54e01e033df6ca7be29934d80727888fa Mon Sep 17 00:00:00 2001 From: Aditya D Date: Tue, 17 Oct 2023 19:23:44 +0530 Subject: [PATCH 58/89] added Regression folder and added Polynomial Regression --- .../Polynomial-Regression.py | 137 ++++++++++++++++++ 1 file changed, 137 insertions(+) create mode 100644 Regression-Techniques/Polynomial-Regression.py diff --git a/Regression-Techniques/Polynomial-Regression.py b/Regression-Techniques/Polynomial-Regression.py new file mode 100644 index 0000000..d56f243 --- /dev/null +++ b/Regression-Techniques/Polynomial-Regression.py @@ -0,0 +1,137 @@ +# Importing libraries + +import numpy as np + +import math + +import matplotlib.pyplot as plt + +# Univariate Polynomial Regression + +class PolynomailRegression() : + + def __init__( self, degree, learning_rate, iterations ) : + + self.degree = degree + + self.learning_rate = learning_rate + + self.iterations = iterations + + # function to transform X + + def transform( self, X ) : + + # initialize X_transform + + X_transform = np.ones( ( self.m, 1 ) ) + + j = 0 + + for j in range( self.degree + 1 ) : + + if j != 0 : + + x_pow = np.power( X, j ) + + # append x_pow to X_transform + + X_transform = np.append( X_transform, x_pow.reshape( -1, 1 ), axis = 1 ) + + return X_transform + + # function to normalize X_transform + + def normalize( self, X ) : + + X[:, 1:] = ( X[:, 1:] - np.mean( X[:, 1:], axis = 0 ) ) / np.std( X[:, 1:], axis = 0 ) + + return X + + # model training + + def fit( self, X, Y ) : + + self.X = X + + self.Y = Y + + self.m, self.n = self.X.shape + + # weight initialization + + self.W = np.zeros( self.degree + 1 ) + + # transform X for polynomial h( x ) = w0 * x^0 + w1 * x^1 + w2 * x^2 + ........+ wn * x^n + + X_transform = self.transform( self.X ) + + # normalize X_transform + + X_normalize = self.normalize( X_transform ) + + # gradient descent learning + + for i in range( self.iterations ) : + + h = self.predict( self.X ) + + error = h - self.Y + + # update weights + + self.W = self.W - self.learning_rate * ( 1 / self.m ) * np.dot( X_normalize.T, error ) + + return self + + # predict + + def predict( self, X ) : + + # transform X for polynomial h( x ) = w0 * x^0 + w1 * x^1 + w2 * x^2 + ........+ wn * x^n + + X_transform = self.transform( X ) + + X_normalize = self.normalize( X_transform ) + + return np.dot( X_transform, self.W ) + + +# Driver code + +def main() : + + # Create dataset + + X = np.array( [ [1], [2], [3], [4], [5], [6], [7] ] ) + + Y = np.array( [ 45000, 50000, 60000, 80000, 110000, 150000, 200000 ] ) + + # model training + + model = PolynomailRegression( degree = 2, learning_rate = 0.01, iterations = 500 ) + + model.fit( X, Y ) + + # Prediction on training set + + Y_pred = model.predict( X ) + + # Visualization + + plt.scatter( X, Y, color = 'blue' ) + + plt.plot( X, Y_pred, color = 'orange' ) + + plt.title( 'X vs Y' ) + + plt.xlabel( 'X' ) + + plt.ylabel( 'Y' ) + + plt.show() + + +if __name__ == "__main__" : + + main() From 6580d7df98cdb45e5cdb9ac7d5b98540f1acce56 Mon Sep 17 00:00:00 2001 From: Aditya D Date: Tue, 17 Oct 2023 19:29:27 +0530 Subject: [PATCH 59/89] added Regression folder and added Bayesian Regression --- Regression-Techniques/Bayesian-Regression.py | 106 +++++++++++++++++++ 1 file changed, 106 insertions(+) create mode 100644 Regression-Techniques/Bayesian-Regression.py diff --git a/Regression-Techniques/Bayesian-Regression.py b/Regression-Techniques/Bayesian-Regression.py new file mode 100644 index 0000000..09c97a2 --- /dev/null +++ b/Regression-Techniques/Bayesian-Regression.py @@ -0,0 +1,106 @@ +#Import the necessary libraries +import torch +import pyro +import pyro.distributions as dist +from pyro.infer import SVI, Trace_ELBO, Predictive +from pyro.optim import Adam +import matplotlib.pyplot as plt +import seaborn as sns + + +# Generate some sample data +torch.manual_seed(0) +X = torch.linspace(0, 10, 100) +true_slope = 2 +true_intercept = 1 +Y = true_intercept + true_slope * X + torch.randn(100) + +# Define the Bayesian regression model +def model(X, Y): + # Priors for the parameters + slope = pyro.sample("slope", dist.Normal(0, 10)) + intercept = pyro.sample("intercept", dist.Normal(0, 10)) + sigma = pyro.sample("sigma", dist.HalfNormal(1)) + + # Expected value of the outcome + mu = intercept + slope * X + + # Likelihood (sampling distribution) of the observations + with pyro.plate("data", len(X)): + pyro.sample("obs", dist.Normal(mu, sigma), obs=Y) + +# Run Bayesian inference using SVI (Stochastic Variational Inference) +def guide(X, Y): + # Approximate posterior distributions for the parameters + slope_loc = pyro.param("slope_loc", torch.tensor(0.0)) + slope_scale = pyro.param("slope_scale", torch.tensor(1.0), + constraint=dist.constraints.positive) + intercept_loc = pyro.param("intercept_loc", torch.tensor(0.0)) + intercept_scale = pyro.param("intercept_scale", torch.tensor(1.0), + constraint=dist.constraints.positive) + sigma_loc = pyro.param("sigma_loc", torch.tensor(1.0), + constraint=dist.constraints.positive) + + # Sample from the approximate posterior distributions + slope = pyro.sample("slope", dist.Normal(slope_loc, slope_scale)) + intercept = pyro.sample("intercept", dist.Normal(intercept_loc, + intercept_scale)) + sigma = pyro.sample("sigma", dist.HalfNormal(sigma_loc)) + +# Initialize the SVI and optimizer +optim = Adam({"lr": 0.01}) +svi = SVI(model, guide, optim, loss=Trace_ELBO()) + +# Run the inference loop +num_iterations = 1000 +for i in range(num_iterations): + loss = svi.step(X, Y) + if (i + 1) % 100 == 0: + print(f"Iteration {i + 1}/{num_iterations} - Loss: {loss}") + +# Obtain posterior samples using Predictive +predictive = Predictive(model, guide=guide, num_samples=1000) +posterior = predictive(X, Y) + +# Extract the parameter samples +slope_samples = posterior["slope"] +intercept_samples = posterior["intercept"] +sigma_samples = posterior["sigma"] + +# Compute the posterior means +slope_mean = slope_samples.mean() +intercept_mean = intercept_samples.mean() +sigma_mean = sigma_samples.mean() + +# Print the estimated parameters +print("Estimated Slope:", slope_mean.item()) +print("Estimated Intercept:", intercept_mean.item()) +print("Estimated Sigma:", sigma_mean.item()) + + +# Create subplots +fig, axs = plt.subplots(1, 3, figsize=(15, 5)) + +# Plot the posterior distribution of the slope +sns.kdeplot(slope_samples, shade=True, ax=axs[0]) +axs[0].set_title("Posterior Distribution of Slope") +axs[0].set_xlabel("Slope") +axs[0].set_ylabel("Density") + +# Plot the posterior distribution of the intercept +sns.kdeplot(intercept_samples, shade=True, ax=axs[1]) +axs[1].set_title("Posterior Distribution of Intercept") +axs[1].set_xlabel("Intercept") +axs[1].set_ylabel("Density") + +# Plot the posterior distribution of sigma +sns.kdeplot(sigma_samples, shade=True, ax=axs[2]) +axs[2].set_title("Posterior Distribution of Sigma") +axs[2].set_xlabel("Sigma") +axs[2].set_ylabel("Density") + +# Adjust the layout +plt.tight_layout() + +# Show the plot +plt.show() From 705c6a283cf632ceaa25d0586d7affa76926e2ca Mon Sep 17 00:00:00 2001 From: Aditya D Date: Tue, 17 Oct 2023 19:35:10 +0530 Subject: [PATCH 60/89] added Quantile Regression --- Regression-Techniques/Quantile-Regression.py | 47 ++++++++++++++++++++ 1 file changed, 47 insertions(+) create mode 100644 Regression-Techniques/Quantile-Regression.py diff --git a/Regression-Techniques/Quantile-Regression.py b/Regression-Techniques/Quantile-Regression.py new file mode 100644 index 0000000..7429810 --- /dev/null +++ b/Regression-Techniques/Quantile-Regression.py @@ -0,0 +1,47 @@ +# Python program to visualize quantile regression + +# Importing libraries +import numpy as np +import pandas as pd +import statsmodels.api as sm +import statsmodels.formula.api as smf +import matplotlib.pyplot as plt + +np.random.seed(0) + +# Number of rows +rows = 20 + +# Constructing Distance column +Distance = np.random.uniform(1, 10, rows) + +# Constructing Emission column +Emission = 40 + Distance + np.random.normal(loc=0, + scale=.25*Distance, + size=20) + +# Creating a dataset +df = pd.DataFrame({'Distance': Distance, + 'Emission': Emission}) + +# #fit the model +model = smf.quantreg('Emission ~ Distance', + df).fit(q=0.7) + +# define figure and axis +fig, ax = plt.subplots(figsize=(10, 8)) + +# get y values +y_line = lambda a, b: a + Distance +y = y_line(model.params['Intercept'], + model.params['Distance']) + +# Plotting data points with the help +# pf quantile regression equation +ax.plot(Distance, y, color='black') +ax.scatter(Distance, Emission, alpha=.3) +ax.set_xlabel('Distance Traveled', fontsize=20) +ax.set_ylabel('Emission Generated', fontsize=20) + +# Save the plot +fig.savefig('quantile_regression.png') From c5905e6826a238cd2754d1e88d37ef5296635c67 Mon Sep 17 00:00:00 2001 From: Aditya D Date: Tue, 17 Oct 2023 19:40:11 +0530 Subject: [PATCH 61/89] added Isotonic Regression --- Regression-Techniques/Isotonic-Regression.py | 23 ++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 Regression-Techniques/Isotonic-Regression.py diff --git a/Regression-Techniques/Isotonic-Regression.py b/Regression-Techniques/Isotonic-Regression.py new file mode 100644 index 0000000..b2fc215 --- /dev/null +++ b/Regression-Techniques/Isotonic-Regression.py @@ -0,0 +1,23 @@ +from sklearn.isotonic import IsotonicRegression +import matplotlib.pyplot as plt +from matplotlib.collections import LineCollection + +ir = IsotonicRegression() # create an instance of the IsotonicRegression class + +# Fit isotonic regression model +y_ir = ir.fit_transform(x, y) # fit the model and transform the data +print('Isotonic Regression Predictions :\n',y_ir) + +# Create LineCollection for the isotonic regression line +lines = [[[i, y_ir[i]] for i in range(n)]] + +# Line to measure the difference between actual and target value +lc = LineCollection(lines) + +plt.plot(x, y_ir, '-', markersize=10, label='isotonic regression') + +plt.gca().add_collection(lc) +plt.legend() # add a legend + +plt.title("Isotonic Regression") +plt.show() From bddbc37c893a06a9308d540a47cb7a52ca1b0637 Mon Sep 17 00:00:00 2001 From: Aditya D Date: Tue, 17 Oct 2023 19:45:30 +0530 Subject: [PATCH 62/89] added Stepwise Regression --- Regression-Techniques/Stepwise-Regression.py | 48 ++++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 Regression-Techniques/Stepwise-Regression.py diff --git a/Regression-Techniques/Stepwise-Regression.py b/Regression-Techniques/Stepwise-Regression.py new file mode 100644 index 0000000..3c91866 --- /dev/null +++ b/Regression-Techniques/Stepwise-Regression.py @@ -0,0 +1,48 @@ +import pandas as pd +import numpy as np +from sklearn import linear_model +from sklearn.model_selection import train_test_split +from sklearn.metrics import accuracy_score +from mlxtend.feature_selection import SequentialFeatureSelector + +# Define the array of data +data = np.array([[1, 2, 3, 4], + [5, 6, 7, 8], + [9, 10, 11, 12]]) + +# Convert the array into a dataframe +df = pd.DataFrame(data) + +# Select the features and target +X = df.iloc[:, :-1] +y = df.iloc[:, -1] + +# Perform stepwise regression +sfs = SequentialFeatureSelector(linear_model.LogisticRegression(), + k_features=3, + forward=True, + scoring='accuracy', + cv=None) +selected_features = sfs.fit(X, y) + +# Create a dataframe with only the selected features +selected_columns = [0, 1, 2, 3] +df_selected = df[selected_columns] + +# Split the data into train and test sets +X_train, X_test,\ + y_train, y_test = train_test_split( + df_selected, y, + test_size=0.3, + random_state=42) + +# Fit a logistic regression model using the selected features +logreg = linear_model.LogisticRegression() +logreg.fit(X_train, y_train) + +# Make predictions using the test set +y_pred = logreg.predict(X_test) + +# Evaluate the model performance +print(y_pred) + From ede7bd7f4416ff4d48db9d65f5f420ba230df9e6 Mon Sep 17 00:00:00 2001 From: Aditya D Date: Tue, 17 Oct 2023 19:51:46 +0530 Subject: [PATCH 63/89] added Least Angle Regression --- .../Least-Angle-Regression.py | 25 +++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 Regression-Techniques/Least-Angle-Regression.py diff --git a/Regression-Techniques/Least-Angle-Regression.py b/Regression-Techniques/Least-Angle-Regression.py new file mode 100644 index 0000000..f5d3593 --- /dev/null +++ b/Regression-Techniques/Least-Angle-Regression.py @@ -0,0 +1,25 @@ +# Importing modules that are required + +from sklearn.datasets import load_boston +from sklearn.linear_model import LassoLars +from sklearn.metrics import r2_score +from sklearn.model_selection import train_test_split + +# Loading dataset +dataset = load_boston() +X = dataset.data +y = dataset.target + +# Splitting training and testing data +X_train, X_test, y_train, y_test = train_test_split(X, y, + test_size = 0.15, random_state = 42) + +# Creating and fitting the regressor +regressor = LassoLars(alpha = 0.1) +regressor.fit(X_train, y_train) + + +# Evaluating model +prediction = regressor.predict(X_test) + +print(f"r2 Score of test set : {r2_score(y_test, prediction)}") From de7c9843fd7236795779b6a1f19debc86c0ae866 Mon Sep 17 00:00:00 2001 From: Aditya D Date: Tue, 17 Oct 2023 19:54:42 +0530 Subject: [PATCH 64/89] added Logistic Regression --- Regression-Techniques/Logistic-Regression.py | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 Regression-Techniques/Logistic-Regression.py diff --git a/Regression-Techniques/Logistic-Regression.py b/Regression-Techniques/Logistic-Regression.py new file mode 100644 index 0000000..807d40a --- /dev/null +++ b/Regression-Techniques/Logistic-Regression.py @@ -0,0 +1,20 @@ +# import the necessary libraries +from sklearn.datasets import load_breast_cancer +from sklearn.linear_model import LogisticRegression +from sklearn.model_selection import train_test_split +from sklearn.metrics import accuracy_score +# load the breast cancer dataset +X, y = load_breast_cancer(return_X_y=True) +# split the train and test dataset +X_train, X_test,\ + y_train, y_test = train_test_split(X, y, + test_size=0.20, + random_state=23) +# LogisticRegression +clf = LogisticRegression(random_state=0) +clf.fit(X_train, y_train) +# Prediction +y_pred = clf.predict(X_test) + +acc = accuracy_score(y_test, y_pred) +print("Logistic Regression model accuracy (in %):", acc*100) From 37be03e24671f35f5021fd2aa7dcd743e21f22e2 Mon Sep 17 00:00:00 2001 From: Aditya D Date: Tue, 17 Oct 2023 19:57:40 +0530 Subject: [PATCH 65/89] added Lasso Regression --- Regression-Techniques/Lasso-Regression.py | 140 ++++++++++++++++++++++ 1 file changed, 140 insertions(+) create mode 100644 Regression-Techniques/Lasso-Regression.py diff --git a/Regression-Techniques/Lasso-Regression.py b/Regression-Techniques/Lasso-Regression.py new file mode 100644 index 0000000..7b9626b --- /dev/null +++ b/Regression-Techniques/Lasso-Regression.py @@ -0,0 +1,140 @@ +# Importing libraries + +import numpy as np + +import pandas as pd + +from sklearn.model_selection import train_test_split + +import matplotlib.pyplot as plt + +# Lasso Regression + +class LassoRegression() : + + def __init__( self, learning_rate, iterations, l1_penality ) : + + self.learning_rate = learning_rate + + self.iterations = iterations + + self.l1_penality = l1_penality + + # Function for model training + + def fit( self, X, Y ) : + + # no_of_training_examples, no_of_features + + self.m, self.n = X.shape + + # weight initialization + + self.W = np.zeros( self.n ) + + self.b = 0 + + self.X = X + + self.Y = Y + + # gradient descent learning + + for i in range( self.iterations ) : + + self.update_weights() + + return self + + # Helper function to update weights in gradient descent + + def update_weights( self ) : + + Y_pred = self.predict( self.X ) + + # calculate gradients + + dW = np.zeros( self.n ) + + for j in range( self.n ) : + + if self.W[j] > 0 : + + dW[j] = ( - ( 2 * ( self.X[:, j] ).dot( self.Y - Y_pred ) ) + + + self.l1_penality ) / self.m + + else : + + dW[j] = ( - ( 2 * ( self.X[:, j] ).dot( self.Y - Y_pred ) ) + + - self.l1_penality ) / self.m + + + db = - 2 * np.sum( self.Y - Y_pred ) / self.m + + # update weights + + self.W = self.W - self.learning_rate * dW + + self.b = self.b - self.learning_rate * db + + return self + + # Hypothetical function h( x ) + + def predict( self, X ) : + + return X.dot( self.W ) + self.b + + +def main() : + + # Importing dataset + + df = pd.read_csv( "salary_data.csv" ) + + X = df.iloc[:, :-1].values + + Y = df.iloc[:, 1].values + + # Splitting dataset into train and test set + + X_train, X_test, Y_train, Y_test = train_test_split( X, Y, test_size = 1 / 3, random_state = 0 ) + + # Model training + + model = LassoRegression( iterations = 1000, learning_rate = 0.01, l1_penality = 500 ) + + model.fit( X_train, Y_train ) + + # Prediction on test set + + Y_pred = model.predict( X_test ) + + print( "Predicted values ", np.round( Y_pred[:3], 2 ) ) + + print( "Real values ", Y_test[:3] ) + + print( "Trained W ", round( model.W[0], 2 ) ) + + print( "Trained b ", round( model.b, 2 ) ) + + # Visualization on test set + + plt.scatter( X_test, Y_test, color = 'blue' ) + + plt.plot( X_test, Y_pred, color = 'orange' ) + + plt.title( 'Salary vs Experience' ) + + plt.xlabel( 'Years of Experience' ) + + plt.ylabel( 'Salary' ) + + plt.show() + + +if __name__ == "__main__" : + + main() From f0237235996276aa9d95db8f4b6226218bcfd42f Mon Sep 17 00:00:00 2001 From: Aditya D Date: Tue, 17 Oct 2023 20:00:14 +0530 Subject: [PATCH 66/89] added Ridge Regression --- Regression-Techniques/Ridge-Regression.py | 91 +++++++++++++++++++++++ 1 file changed, 91 insertions(+) create mode 100644 Regression-Techniques/Ridge-Regression.py diff --git a/Regression-Techniques/Ridge-Regression.py b/Regression-Techniques/Ridge-Regression.py new file mode 100644 index 0000000..4f3df50 --- /dev/null +++ b/Regression-Techniques/Ridge-Regression.py @@ -0,0 +1,91 @@ +# Importing libraries + +import numpy as np +import pandas as pd +from sklearn.model_selection import train_test_split +import matplotlib.pyplot as plt + +# Ridge Regression + +class RidgeRegression() : + + def __init__( self, learning_rate, iterations, l2_penality ) : + + self.learning_rate = learning_rate + self.iterations = iterations + self.l2_penality = l2_penality + + # Function for model training + def fit( self, X, Y ) : + + # no_of_training_examples, no_of_features + self.m, self.n = X.shape + + # weight initialization + self.W = np.zeros( self.n ) + + self.b = 0 + self.X = X + self.Y = Y + + # gradient descent learning + + for i in range( self.iterations ) : + self.update_weights() + return self + + # Helper function to update weights in gradient descent + + def update_weights( self ) : + Y_pred = self.predict( self.X ) + + # calculate gradients + dW = ( - ( 2 * ( self.X.T ).dot( self.Y - Y_pred ) ) + + ( 2 * self.l2_penality * self.W ) ) / self.m + db = - 2 * np.sum( self.Y - Y_pred ) / self.m + + # update weights + self.W = self.W - self.learning_rate * dW + self.b = self.b - self.learning_rate * db + return self + + # Hypothetical function h( x ) + def predict( self, X ) : + return X.dot( self.W ) + self.b + +# Driver code + +def main() : + + # Importing dataset + df = pd.read_csv( "salary_data.csv" ) + X = df.iloc[:, :-1].values + Y = df.iloc[:, 1].values + + # Splitting dataset into train and test set + X_train, X_test, Y_train, Y_test = train_test_split( X, Y, + + test_size = 1 / 3, random_state = 0 ) + + # Model training + model = RidgeRegression( iterations = 1000, + learning_rate = 0.01, l2_penality = 1 ) + model.fit( X_train, Y_train ) + + # Prediction on test set + Y_pred = model.predict( X_test ) + print( "Predicted values ", np.round( Y_pred[:3], 2 ) ) + print( "Real values ", Y_test[:3] ) + print( "Trained W ", round( model.W[0], 2 ) ) + print( "Trained b ", round( model.b, 2 ) ) + + # Visualization on test set + plt.scatter( X_test, Y_test, color = 'blue' ) + plt.plot( X_test, Y_pred, color = 'orange' ) + plt.title( 'Salary vs Experience' ) + plt.xlabel( 'Years of Experience' ) + plt.ylabel( 'Salary' ) + plt.show() + +if __name__ == "__main__" : + main() From 0496d0e15b9d6a3d9da01e96d81339e6606a65fa Mon Sep 17 00:00:00 2001 From: Himanshu Agarwal Date: Tue, 17 Oct 2023 20:19:41 +0530 Subject: [PATCH 67/89] Update README.md --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 7ca53f4..369bbdf 100644 --- a/README.md +++ b/README.md @@ -56,8 +56,8 @@ This project follows the [MIT LICENSE](https://choosealicense.com/licenses/mit/) ## Contributors ✨ - - + +
From 0760095d3a4ce8722cabd6fab2707cafb987b36b Mon Sep 17 00:00:00 2001 From: Himanshu Agarwal Date: Tue, 17 Oct 2023 20:21:38 +0530 Subject: [PATCH 68/89] [UPDATED] Directory Structure --- ...-ALgorithm.py => Affinity_Propagation_Clustering_Algorithm.py} | 0 ...stering-Algorithm.py => Agglomerative_Clustering_Algorithm.py} | 0 Clustering Techniques/{BIRCH-Algorithm.py => Birch_Algorithm.py} | 0 Clustering Techniques/{DBSCAN-Model.py => DBScan_Model.py} | 0 .../{Gaussain-Mixture-Model.py => Gaussain_Mixture_Model.py} | 0 ...Clustering-algorithm.py => Mean_Shift_Clustering_Algorithm.py} | 0 .../{OPTICS-algorithm.py => Optics_Algorithm.py} | 0 ..._salesman_problem_solver.py => Travelling_Salesman_Problem.py} | 0 .../Bayesian_Regression.py | 0 .../Isotonic_Regression.py | 0 .../Lasso_Regression.py | 0 .../Least_Angle_Regression.py | 0 .../Linear_Regression.py | 0 .../Logistic_Regression.py | 0 .../Polynomial_Regression.py | 0 .../Quantile_Regression.py | 0 .../Ridge_Regression.py | 0 .../Simple_Linear_Regression.py | 0 .../Stepwise_Regression.py | 0 {Trie => Tree}/Menu_Driven_Code_for_Tries.py | 0 20 files changed, 0 insertions(+), 0 deletions(-) rename Clustering Techniques/{Affinity-Propagation-Clustering-ALgorithm.py => Affinity_Propagation_Clustering_Algorithm.py} (100%) rename Clustering Techniques/{Agglomerative-Clustering-Algorithm.py => Agglomerative_Clustering_Algorithm.py} (100%) rename Clustering Techniques/{BIRCH-Algorithm.py => Birch_Algorithm.py} (100%) rename Clustering Techniques/{DBSCAN-Model.py => DBScan_Model.py} (100%) rename Clustering Techniques/{Gaussain-Mixture-Model.py => Gaussain_Mixture_Model.py} (100%) rename Clustering Techniques/{Mean-Shift-Clustering-algorithm.py => Mean_Shift_Clustering_Algorithm.py} (100%) rename Clustering Techniques/{OPTICS-algorithm.py => Optics_Algorithm.py} (100%) rename Graphs/{travelling_salesman_problem_solver.py => Travelling_Salesman_Problem.py} (100%) rename Regression-Techniques/Bayesian-Regression.py => Regression Techniques/Bayesian_Regression.py (100%) rename Regression-Techniques/Isotonic-Regression.py => Regression Techniques/Isotonic_Regression.py (100%) rename Regression-Techniques/Lasso-Regression.py => Regression Techniques/Lasso_Regression.py (100%) rename Regression-Techniques/Least-Angle-Regression.py => Regression Techniques/Least_Angle_Regression.py (100%) rename Regression-Techniques/Multiple-Linear-Regression.py => Regression Techniques/Linear_Regression.py (100%) rename Regression-Techniques/Logistic-Regression.py => Regression Techniques/Logistic_Regression.py (100%) rename Regression-Techniques/Polynomial-Regression.py => Regression Techniques/Polynomial_Regression.py (100%) rename Regression-Techniques/Quantile-Regression.py => Regression Techniques/Quantile_Regression.py (100%) rename Regression-Techniques/Ridge-Regression.py => Regression Techniques/Ridge_Regression.py (100%) rename Regression-Techniques/simple-linear-regression.py => Regression Techniques/Simple_Linear_Regression.py (100%) rename Regression-Techniques/Stepwise-Regression.py => Regression Techniques/Stepwise_Regression.py (100%) rename {Trie => Tree}/Menu_Driven_Code_for_Tries.py (100%) diff --git a/Clustering Techniques/Affinity-Propagation-Clustering-ALgorithm.py b/Clustering Techniques/Affinity_Propagation_Clustering_Algorithm.py similarity index 100% rename from Clustering Techniques/Affinity-Propagation-Clustering-ALgorithm.py rename to Clustering Techniques/Affinity_Propagation_Clustering_Algorithm.py diff --git a/Clustering Techniques/Agglomerative-Clustering-Algorithm.py b/Clustering Techniques/Agglomerative_Clustering_Algorithm.py similarity index 100% rename from Clustering Techniques/Agglomerative-Clustering-Algorithm.py rename to Clustering Techniques/Agglomerative_Clustering_Algorithm.py diff --git a/Clustering Techniques/BIRCH-Algorithm.py b/Clustering Techniques/Birch_Algorithm.py similarity index 100% rename from Clustering Techniques/BIRCH-Algorithm.py rename to Clustering Techniques/Birch_Algorithm.py diff --git a/Clustering Techniques/DBSCAN-Model.py b/Clustering Techniques/DBScan_Model.py similarity index 100% rename from Clustering Techniques/DBSCAN-Model.py rename to Clustering Techniques/DBScan_Model.py diff --git a/Clustering Techniques/Gaussain-Mixture-Model.py b/Clustering Techniques/Gaussain_Mixture_Model.py similarity index 100% rename from Clustering Techniques/Gaussain-Mixture-Model.py rename to Clustering Techniques/Gaussain_Mixture_Model.py diff --git a/Clustering Techniques/Mean-Shift-Clustering-algorithm.py b/Clustering Techniques/Mean_Shift_Clustering_Algorithm.py similarity index 100% rename from Clustering Techniques/Mean-Shift-Clustering-algorithm.py rename to Clustering Techniques/Mean_Shift_Clustering_Algorithm.py diff --git a/Clustering Techniques/OPTICS-algorithm.py b/Clustering Techniques/Optics_Algorithm.py similarity index 100% rename from Clustering Techniques/OPTICS-algorithm.py rename to Clustering Techniques/Optics_Algorithm.py diff --git a/Graphs/travelling_salesman_problem_solver.py b/Graphs/Travelling_Salesman_Problem.py similarity index 100% rename from Graphs/travelling_salesman_problem_solver.py rename to Graphs/Travelling_Salesman_Problem.py diff --git a/Regression-Techniques/Bayesian-Regression.py b/Regression Techniques/Bayesian_Regression.py similarity index 100% rename from Regression-Techniques/Bayesian-Regression.py rename to Regression Techniques/Bayesian_Regression.py diff --git a/Regression-Techniques/Isotonic-Regression.py b/Regression Techniques/Isotonic_Regression.py similarity index 100% rename from Regression-Techniques/Isotonic-Regression.py rename to Regression Techniques/Isotonic_Regression.py diff --git a/Regression-Techniques/Lasso-Regression.py b/Regression Techniques/Lasso_Regression.py similarity index 100% rename from Regression-Techniques/Lasso-Regression.py rename to Regression Techniques/Lasso_Regression.py diff --git a/Regression-Techniques/Least-Angle-Regression.py b/Regression Techniques/Least_Angle_Regression.py similarity index 100% rename from Regression-Techniques/Least-Angle-Regression.py rename to Regression Techniques/Least_Angle_Regression.py diff --git a/Regression-Techniques/Multiple-Linear-Regression.py b/Regression Techniques/Linear_Regression.py similarity index 100% rename from Regression-Techniques/Multiple-Linear-Regression.py rename to Regression Techniques/Linear_Regression.py diff --git a/Regression-Techniques/Logistic-Regression.py b/Regression Techniques/Logistic_Regression.py similarity index 100% rename from Regression-Techniques/Logistic-Regression.py rename to Regression Techniques/Logistic_Regression.py diff --git a/Regression-Techniques/Polynomial-Regression.py b/Regression Techniques/Polynomial_Regression.py similarity index 100% rename from Regression-Techniques/Polynomial-Regression.py rename to Regression Techniques/Polynomial_Regression.py diff --git a/Regression-Techniques/Quantile-Regression.py b/Regression Techniques/Quantile_Regression.py similarity index 100% rename from Regression-Techniques/Quantile-Regression.py rename to Regression Techniques/Quantile_Regression.py diff --git a/Regression-Techniques/Ridge-Regression.py b/Regression Techniques/Ridge_Regression.py similarity index 100% rename from Regression-Techniques/Ridge-Regression.py rename to Regression Techniques/Ridge_Regression.py diff --git a/Regression-Techniques/simple-linear-regression.py b/Regression Techniques/Simple_Linear_Regression.py similarity index 100% rename from Regression-Techniques/simple-linear-regression.py rename to Regression Techniques/Simple_Linear_Regression.py diff --git a/Regression-Techniques/Stepwise-Regression.py b/Regression Techniques/Stepwise_Regression.py similarity index 100% rename from Regression-Techniques/Stepwise-Regression.py rename to Regression Techniques/Stepwise_Regression.py diff --git a/Trie/Menu_Driven_Code_for_Tries.py b/Tree/Menu_Driven_Code_for_Tries.py similarity index 100% rename from Trie/Menu_Driven_Code_for_Tries.py rename to Tree/Menu_Driven_Code_for_Tries.py From 4b4e0b3ae40f15d88dc6ff4c74059baa6a001dd1 Mon Sep 17 00:00:00 2001 From: ConradKash Date: Tue, 17 Oct 2023 21:26:28 +0300 Subject: [PATCH 69/89] naive_search_pattern algorithm --- .../Naive_Pattern_Searching.py | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/Pattern Searching Algorithm/Naive_Pattern_Searching.py b/Pattern Searching Algorithm/Naive_Pattern_Searching.py index 16e3dea..cc955c3 100644 --- a/Pattern Searching Algorithm/Naive_Pattern_Searching.py +++ b/Pattern Searching Algorithm/Naive_Pattern_Searching.py @@ -16,7 +16,7 @@ def search(pat, txt): if (txt[i + j] != pat[j]): break j += 1 - + if (j == M): print("Pattern found at index ", i) @@ -27,10 +27,13 @@ def search(pat, txt): pat = "AABA" # Function call + print('Below iis an` example of Naive Pattern Searching Algorithm\n') + print('It is being implemented for the following text and pattern: \n') + print(' Text = "AABAACAADAABAAABAA" pattern = "AABA"') + search(pat, txt) #try it yourself + print('\nNow try it yourself\n') txt = input("Enter the text: ") pat = input("Enter the pattern: ") - - - search(pat, txt) + search(pat, txt) \ No newline at end of file From b93f55d81a2cc0da11cc4b8ee93f99f3b521c733 Mon Sep 17 00:00:00 2001 From: pankaj kumar Date: Wed, 18 Oct 2023 00:09:35 +0530 Subject: [PATCH 70/89] I have created for dynamic inputs.. Please review it.. --- Graphs/Tarjan's_Algorithm.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/Graphs/Tarjan's_Algorithm.py b/Graphs/Tarjan's_Algorithm.py index cf9a356..cc88b01 100644 --- a/Graphs/Tarjan's_Algorithm.py +++ b/Graphs/Tarjan's_Algorithm.py @@ -2,7 +2,7 @@ is a dictionary-like container from the collections module that provides a default value for keys that do not exist.""" -from collections import defaultdict +from collections import defaultdict # Function to run Tarjan's algorithm def tarjan(graph): @@ -28,16 +28,16 @@ def strongconnect(node): try: successors = graph[node] except: - + successors = [] for successor in successors: if successor not in indexes: # Successor has not yet been visited; recurse on it strongconnect(successor) - lowlinks[node] = min(lowlinks[node],lowlinks[successor]) + lowlinks[node] = min(lowlinks[node], lowlinks[successor]) elif successor in stack: - # Successor is in stack, hence in current SCC - lowlinks[node] = min(lowlinks[node],indexes[successor]) + # Successor is in the stack, hence in the current SCC + lowlinks[node] = min(lowlinks[node], indexes[successor]) # If `node` is a root node, pop the stack and generate an SCC if lowlinks[node] == indexes[node]: @@ -46,7 +46,8 @@ def strongconnect(node): while True: successor = stack.pop() connected_component.append(successor) - if successor == node: break + if successor == node: + break components.append(connected_component) for node in graph: @@ -55,20 +56,19 @@ def strongconnect(node): return components -# Sample graph -graph = { - 0: [1], - 1: [2], - 2: [0, 3], - 3: [4], - 4: [5], - 5: [3, 6], - 6: [] -} +# Accept dynamic input for the graph +graph = defaultdict(list) +num_nodes = int(input("Enter the number of nodes: ")) +for i in range(num_nodes): + node = int(input(f"Enter the successors of node {i}: ")) + successors = list(map(int, input().split())) + graph[node] = successors +print("Strongly Connected Components:") print(tarjan(graph)) + """ Explanation:-> 1) Tarjan's algorithm performs a DFS on the graph to find strongly connected components. From 5e86ab62f886cd09db0c283d609f4161dccca5dd Mon Sep 17 00:00:00 2001 From: ConradKash Date: Wed, 18 Oct 2023 00:10:19 +0300 Subject: [PATCH 71/89] naive_search_pattern algorithm --- .../Naive_Pattern_Searching.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename {Pattern Searching Algorithm => Searching Techniques}/Naive_Pattern_Searching.py (100%) diff --git a/Pattern Searching Algorithm/Naive_Pattern_Searching.py b/Searching Techniques/Naive_Pattern_Searching.py similarity index 100% rename from Pattern Searching Algorithm/Naive_Pattern_Searching.py rename to Searching Techniques/Naive_Pattern_Searching.py From 0bbeebb82eaff897e29932c471fa6c9c7f52154d Mon Sep 17 00:00:00 2001 From: Avdhesh-Varshney <114330097+Avdhesh-Varshney@users.noreply.github.com> Date: Wed, 18 Oct 2023 10:53:17 +0530 Subject: [PATCH 72/89] Morris Traversal --- Tree/Morris_Traversal.py | 70 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 Tree/Morris_Traversal.py diff --git a/Tree/Morris_Traversal.py b/Tree/Morris_Traversal.py new file mode 100644 index 0000000..46d3c91 --- /dev/null +++ b/Tree/Morris_Traversal.py @@ -0,0 +1,70 @@ +# Morris Traversal +# Time Complexity = O(n) +# Space Complexity = O(1) (Main advantage of using this traversal. Uses only constant space) +# 1 +# / \ +# / \ +# 2 3 +# / \ +# / \ +# 4 5 +# \ +# \ +# 6 +# +# Output --> 4 2 5 6 1 3 + +class TreeNode: + def __init__(self, val=0, left=None, right=None): + self.val = val + self.left = left + self.right = right + +# Morris-inorder traversal +def Morris_Traversal(root): + morris = [] + cur = root + + while cur: + if cur.left is None: + morris.append(cur.val) + cur = cur.right + else: + temp = cur.left + while temp.right and temp.right != cur: + temp = temp.right + + if temp.right is None: + temp.right = cur + cur = cur.left + else: + temp.right = None + morris.append(cur.val) + cur = cur.right + + return morris + +if __name__ == '__main__': + print("\033c", end='', flush=True) + # Input tree elements + root_val = int(input("Enter the value for the root: ")) + root = TreeNode(root_val) + + print('\n') + queue = [root] + while queue: + current = queue.pop(0) + left_val = int(input(f"Enter the value for the left child of {current.val} (Enter -1 for no child): ")) + if left_val != -1: + current.left = TreeNode(left_val) + queue.append(current.left) + right_val = int(input(f"Enter the value for the right child of {current.val} (Enter -1 for no child): ")) + if right_val != -1: + current.right = TreeNode(right_val) + queue.append(current.right) + print('\n') + + # Morris Traversal starts + morris = Morris_Traversal(root) + print(' '.join([str(i) for i in morris])) + From 5c48d9f6b87504c7750549701861f72b90952bd3 Mon Sep 17 00:00:00 2001 From: Tanushree <60938591+aggarwal-tanushree@users.noreply.github.com> Date: Fri, 20 Oct 2023 17:37:16 +0200 Subject: [PATCH 73/89] algo: Added TimSort Algorithm --- Sorting Techniques/Tim_Sort.py | 110 +++++++++++++++++++++++++++++++++ 1 file changed, 110 insertions(+) create mode 100644 Sorting Techniques/Tim_Sort.py diff --git a/Sorting Techniques/Tim_Sort.py b/Sorting Techniques/Tim_Sort.py new file mode 100644 index 0000000..f66b307 --- /dev/null +++ b/Sorting Techniques/Tim_Sort.py @@ -0,0 +1,110 @@ +# Python : Timsort algorithm +#################################################################################################################### +# TimSort is a hybrid sorting algorithm that combines the strengths of merge sort and insertion sort. +# It is designed to efficiently sort a wide range of real-world data types. It maintains the relative order of equal elements in the sorted output. +# Divide into Runs: TimSort starts by dividing the input array into small, already sorted subsequences called "runs." +# Merge Runs: It then merges these runs together using a combination of merge sort and insertion sort. This merging process optimizes performance, especially for data with pre-existing order. +#################################################################################################################### + + +MIN_MERGE = 32 + + +def calcMinRun(n): + """Returns the minimum length of a run from 23 - 64 so that + the len(array)/minrun is less than or equal to a power of 2. + + e.g. 1=>1, ..., 63=>63, 64=>32, 65=>33, + ..., 127=>64, 128=>32, ... + """ + r = 0 + while n >= MIN_MERGE: + r |= n & 1 + n >>= 1 + return n + r + + +# This function sorts array from left index to +# to right index which is of size atmost RUN +def insertionSort(arr, left, right): + for i in range(left + 1, right + 1): + j = i + while j > left and arr[j] < arr[j - 1]: + arr[j], arr[j - 1] = arr[j - 1], arr[j] + j -= 1 + + +# Merge function merges the sorted runs +def merge(arr, l, m, r): + + # original array is broken in two parts + # left and right array + len1, len2 = m - l + 1, r - m + left, right = [], [] + for i in range(0, len1): + left.append(arr[l + i]) + for i in range(0, len2): + right.append(arr[m + 1 + i]) + + i, j, k = 0, 0, l + + # after comparing, we merge those two array + # in larger sub array + while i < len1 and j < len2: + if left[i] <= right[j]: + arr[k] = left[i] + i += 1 + + else: + arr[k] = right[j] + j += 1 + + k += 1 + + # Copy remaining elements of left, if any + while i < len1: + arr[k] = left[i] + k += 1 + i += 1 + + # Copy remaining element of right, if any + while j < len2: + arr[k] = right[j] + k += 1 + j += 1 + + +# Iterative Timsort function to sort the +# array[0...n-1] (similar to merge sort) +def timSort(arr): + n = len(arr) + minRun = calcMinRun(n) + + # Sort individual subarrays of size RUN + for start in range(0, n, minRun): + end = min(start + minRun - 1, n - 1) + insertionSort(arr, start, end) + + # Start merging from size RUN (or 32). It will merge + # to form size 64, then 128, 256 and so on .... + size = minRun + while size < n: + + # Pick starting point of left sub array. We + # are going to merge arr[left..left+size-1] + # and arr[left+size, left+2*size-1] + # After every merge, we increase left by 2*size + for left in range(0, n, 2 * size): + + # Find ending point of left sub array + # mid+1 is starting point of right sub array + mid = min(n - 1, left + size - 1) + right = min((left + 2 * size - 1), (n - 1)) + + # Merge sub array arr[left.....mid] & + # arr[mid+1....right] + if mid < right: + merge(arr, left, mid, right) + + size = 2 * size + From e992049f758031c3fb53d6685c8882f3704be17c Mon Sep 17 00:00:00 2001 From: pankaj kumar Date: Sat, 21 Oct 2023 22:14:29 +0530 Subject: [PATCH 74/89] I have implemented the Boyer moore algo inside searching techniques with dynamic inputs and proper explanation . please review it.. --- Searching Techniques/Boyer_Moore_Algorithm.py | 74 +++++++++++++++++++ 1 file changed, 74 insertions(+) create mode 100644 Searching Techniques/Boyer_Moore_Algorithm.py diff --git a/Searching Techniques/Boyer_Moore_Algorithm.py b/Searching Techniques/Boyer_Moore_Algorithm.py new file mode 100644 index 0000000..1b2df13 --- /dev/null +++ b/Searching Techniques/Boyer_Moore_Algorithm.py @@ -0,0 +1,74 @@ +"""Program for Bad Character Heuristic +of Boyer Moore String Matching Algorithm""" + + +NO_OF_CHARS = 256 + +def badCharHeuristic(string, size): + ''' + The preprocessing function for + Boyer Moore's bad character heuristic + ''' + # Initialize all occurrences as -1 + badChar = [-1] * NO_OF_CHARS + + # Fill the actual value of the last occurrence + for i in range(size): + badChar[ord(string[i])] = i + + # Return the initialized list + return badChar + +def search(txt, pat): + ''' + A pattern searching function that uses the Bad Character + Heuristic of the Boyer Moore Algorithm + ''' + m = len(pat) + n = len(txt) + + # Create the bad character list by calling + # the preprocessing function badCharHeuristic() + # for the given pattern + badChar = badCharHeuristic(pat, m) + + # s is the shift of the pattern with respect to the text + s = 0 + while s <= n - m: + j = m - 1 + + # Keep reducing index j of the pattern while + # characters of the pattern and text are matching + # at this shift s + while j >= 0 and pat[j] == txt[s + j]: + j -= 1 + + # If the pattern is present at the current shift, + # then index j will become -1 after the above loop + if j < 0: + print("Pattern occurs at shift =", s) + + ''' + Shift the pattern so that the next character in the text + aligns with the last occurrence of it in the pattern. + The condition s+m < n is necessary for the case when + the pattern occurs at the end of the text + ''' + s += (m - badChar[ord(txt[s + m])] if s + m < n else 1) + else: + ''' + Shift the pattern so that the bad character in the text + aligns with the last occurrence of it in the pattern. The + max function is used to make sure that we get a positive + shift. We may get a negative shift if the last occurrence + of the bad character in the pattern is on the right side of the + current character. + ''' + s += max(1, j - badChar[ord(txt[s + j])]) + +while True: + txt = input('Enter the text (or press Enter to exit): ') + if not txt: + break + pat = input('Enter the pattern to search for: ') + search(txt, pat) From 289decf9613f065597cf6e37e4e6492fd7a5d76f Mon Sep 17 00:00:00 2001 From: Varun Singh Date: Sat, 21 Oct 2023 23:54:20 +0530 Subject: [PATCH 75/89] add: sieve_of_eratosthenes.py --- Math/sieve_of_eratosthenes.py | 30 ++++++++++++++++++++++++++++++ 1 file changed, 30 insertions(+) create mode 100644 Math/sieve_of_eratosthenes.py diff --git a/Math/sieve_of_eratosthenes.py b/Math/sieve_of_eratosthenes.py new file mode 100644 index 0000000..88cf490 --- /dev/null +++ b/Math/sieve_of_eratosthenes.py @@ -0,0 +1,30 @@ +"""Sieve Of Eratosthenes: +The sieve of eratosthenes is one of the most efficient way to find all +the prime numbers upto the number `n` + +for more reference(https://www.geeksforgeeks.org/sieve-of-eratosthenes/) +""" + +#importing `math` module which will be used later +import math + +#specify upto where you have to find the prime numbers +n = int(input("Enter the range : ")) + +#`arr` is a boolean list that contains `n+1` `False` entries +arr = [False]*(n+1) + +#loop upto the square root of the range `n` +for i in range(2,int(math.sqrt(n))+1): + if arr[i] == False: + for j in range(i*i, n+1, i): + #making the entry `True` for all entries whose index is the multiple + arr[j] = True + +#after the loop exits, all the entry that are prime numbers +#are marked as `False` + +#printing all the prime numbers +for i in range(2,n): + if arr[i+1] == False: + print(i+1) From 6b0c4ab49c4865e5f904d717f022cb5c0f67cde9 Mon Sep 17 00:00:00 2001 From: Prateek Date: Mon, 23 Oct 2023 13:05:42 +0530 Subject: [PATCH 76/89] algo: Added Dijkstra's Algorithm --- Graphs/Dijkstra's_Algorithm.py | 60 ++++++++++++++++++++++++++++++++++ 1 file changed, 60 insertions(+) create mode 100644 Graphs/Dijkstra's_Algorithm.py diff --git a/Graphs/Dijkstra's_Algorithm.py b/Graphs/Dijkstra's_Algorithm.py new file mode 100644 index 0000000..ffbd4fc --- /dev/null +++ b/Graphs/Dijkstra's_Algorithm.py @@ -0,0 +1,60 @@ +# Dijkstra's Algorithm is a widely used graph algorithm designed to find the +# shortest path from a source node to all other nodes in a weighted graph. It +# was developed by Dutch computer scientist Edsger W. Dijkstra in 1956. The +# algorithm is particularly effective when all edge weights are non-negative. + +def Dijkstra(Graph, _s, _d): + row = len(Graph) + col = len(Graph[0]) + dist = [float("Inf")] * row + Blackened = [0] * row + pathlength = [0] * row + parent = [-1] * row + dist[_s] = 0 + for count in range(row-1): + u = MinDistance(dist, Blackened) + + # if MinDistance() returns INFINITY, then the graph is not + # connected and we have traversed all of the vertices in the + # connected component of the source vertex, so it can reduce + # the time complexity sometimes + # In a directed graph, it means that the source vertex + # is not a root + if u == float("Inf"): + break + else: + + # Mark the vertex as Blackened + Blackened[u] = 1 + for v in range(row): + if Blackened[v] == 0 and Graph[u][v] and dist[u]+Graph[u][v] < dist[v]: + parent[v] = u + pathlength[v] = pathlength[parent[v]]+1 + dist[v] = dist[u]+Graph[u][v] + elif Blackened[v] == 0 and Graph[u][v] and dist[u]+Graph[u][v] == dist[v] and pathlength[u]+1 < pathlength[v]: + parent[v] = u + pathlength[v] = pathlength[u] + 1 + if dist[_d] != float("Inf"): + + # Printing the path + PrintPath(parent, _d) + else: + print("There is no path between vertex ", _s, "to vertex ", _d) + +# Function to print the path + +def PrintPath(parent, _d): + if parent[_d] == -1: + print(_d, end='') + return + PrintPath(parent, parent[_d]) + print("->", _d, end='') + + +def MinDistance(dist, Blackened): + min = float("Inf") + for v in range(len(dist)): + if not Blackened[v] and dist[v] < min: + min = dist[v] + Min_index = v + return float("Inf") if min == float("Inf") else Min_index From 0f038d412f7c4d45819bcd8506a568328acfaf5b Mon Sep 17 00:00:00 2001 From: Prateek Date: Mon, 23 Oct 2023 13:35:22 +0530 Subject: [PATCH 77/89] algo: Added Floyd Warshall Algorithm --- Graphs/Floyd_Warshall_Algorithm.py | 77 ++++++++++++++++++++++++++++++ 1 file changed, 77 insertions(+) create mode 100644 Graphs/Floyd_Warshall_Algorithm.py diff --git a/Graphs/Floyd_Warshall_Algorithm.py b/Graphs/Floyd_Warshall_Algorithm.py new file mode 100644 index 0000000..239b07e --- /dev/null +++ b/Graphs/Floyd_Warshall_Algorithm.py @@ -0,0 +1,77 @@ +# The Floyd-Warshall Algorithm is a dynamic programming algorithm used to find the shortest paths between all pairs of nodes in a weighted graph. It works for +# directed or undirected graphs with positive or negative edge weights and is particularly valuable when you need to compute and store all shortest paths in +# a graph. The algorithm has a time and space complexity of O(n ^ 3), making it +# suitable for small to moderately sized graphs. + + +# Number of vertices in the graph +V = 4 + +# Define infinity as the large +# enough value. This value will be +# used for vertices not connected to each other +INF = 99999 + +# Solves all pair shortest path +# via Floyd Warshall Algorithm + + +def floydWarshall(graph): + """ dist[][] will be the output + matrix that will finally + have the shortest distances + between every pair of vertices """ + """ initializing the solution matrix + same as input graph matrix + OR we can say that the initial + values of shortest distances + are based on shortest paths considering no + intermediate vertices """ + + dist = list(map(lambda i: list(map(lambda j: j, i)), graph)) + + """ Add all vertices one by one + to the set of intermediate + vertices. + ---> Before start of an iteration, + we have shortest distances + between all pairs of vertices + such that the shortest + distances consider only the + vertices in the set + {0, 1, 2, .. k-1} as intermediate vertices. + ----> After the end of a + iteration, vertex no. k is + added to the set of intermediate + vertices and the + set becomes {0, 1, 2, .. k} + """ + for k in range(V): + + # pick all vertices as source one by one + for i in range(V): + + # Pick all vertices as destination for the + # above picked source + for j in range(V): + + # If vertex k is on the shortest path from + # i to j, then update the value of dist[i][j] + dist[i][j] = min(dist[i][j], + dist[i][k] + dist[k][j] + ) + printSolution(dist) + + +# A utility function to print the solution +def printSolution(dist): + print("Following matrix shows the shortest distances\ +between every pair of vertices") + for i in range(V): + for j in range(V): + if (dist[i][j] == INF): + print("%7s" % ("INF"), end=" ") + else: + print("%7d\t" % (dist[i][j]), end=' ') + if j == V-1: + print() From c22bd8e2f8c926940789cd6e4618f2067a1b7883 Mon Sep 17 00:00:00 2001 From: Prateek Date: Mon, 23 Oct 2023 13:57:15 +0530 Subject: [PATCH 78/89] algo: Added Floyd Warshall Algorithm --- Graphs/Floyd_Warshall_Algorithm.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/Graphs/Floyd_Warshall_Algorithm.py b/Graphs/Floyd_Warshall_Algorithm.py index 239b07e..1a65876 100644 --- a/Graphs/Floyd_Warshall_Algorithm.py +++ b/Graphs/Floyd_Warshall_Algorithm.py @@ -57,13 +57,14 @@ def floydWarshall(graph): # If vertex k is on the shortest path from # i to j, then update the value of dist[i][j] - dist[i][j] = min(dist[i][j], - dist[i][k] + dist[k][j] - ) + dist[i][j] = min(dist[i][j], + dist[i][k] + dist[k][j] + ) printSolution(dist) - # A utility function to print the solution + + def printSolution(dist): print("Following matrix shows the shortest distances\ between every pair of vertices") From 13f71ef946df7d43d05d9c5c928b527cc74555dd Mon Sep 17 00:00:00 2001 From: Prateek Date: Mon, 23 Oct 2023 14:13:57 +0530 Subject: [PATCH 79/89] algo: Added Floyd Warshall Algorithm --- Graphs/Dijkstra's_Algorithm.py | 60 ---------------------------------- 1 file changed, 60 deletions(-) delete mode 100644 Graphs/Dijkstra's_Algorithm.py diff --git a/Graphs/Dijkstra's_Algorithm.py b/Graphs/Dijkstra's_Algorithm.py deleted file mode 100644 index ffbd4fc..0000000 --- a/Graphs/Dijkstra's_Algorithm.py +++ /dev/null @@ -1,60 +0,0 @@ -# Dijkstra's Algorithm is a widely used graph algorithm designed to find the -# shortest path from a source node to all other nodes in a weighted graph. It -# was developed by Dutch computer scientist Edsger W. Dijkstra in 1956. The -# algorithm is particularly effective when all edge weights are non-negative. - -def Dijkstra(Graph, _s, _d): - row = len(Graph) - col = len(Graph[0]) - dist = [float("Inf")] * row - Blackened = [0] * row - pathlength = [0] * row - parent = [-1] * row - dist[_s] = 0 - for count in range(row-1): - u = MinDistance(dist, Blackened) - - # if MinDistance() returns INFINITY, then the graph is not - # connected and we have traversed all of the vertices in the - # connected component of the source vertex, so it can reduce - # the time complexity sometimes - # In a directed graph, it means that the source vertex - # is not a root - if u == float("Inf"): - break - else: - - # Mark the vertex as Blackened - Blackened[u] = 1 - for v in range(row): - if Blackened[v] == 0 and Graph[u][v] and dist[u]+Graph[u][v] < dist[v]: - parent[v] = u - pathlength[v] = pathlength[parent[v]]+1 - dist[v] = dist[u]+Graph[u][v] - elif Blackened[v] == 0 and Graph[u][v] and dist[u]+Graph[u][v] == dist[v] and pathlength[u]+1 < pathlength[v]: - parent[v] = u - pathlength[v] = pathlength[u] + 1 - if dist[_d] != float("Inf"): - - # Printing the path - PrintPath(parent, _d) - else: - print("There is no path between vertex ", _s, "to vertex ", _d) - -# Function to print the path - -def PrintPath(parent, _d): - if parent[_d] == -1: - print(_d, end='') - return - PrintPath(parent, parent[_d]) - print("->", _d, end='') - - -def MinDistance(dist, Blackened): - min = float("Inf") - for v in range(len(dist)): - if not Blackened[v] and dist[v] < min: - min = dist[v] - Min_index = v - return float("Inf") if min == float("Inf") else Min_index From bcfd1487604e528fa1bcd2d730ca00c1ca0e9769 Mon Sep 17 00:00:00 2001 From: Prateek Date: Tue, 24 Oct 2023 14:46:25 +0530 Subject: [PATCH 80/89] algo: Added Bellman Ford Algorithm --- Graphs/BellMan_Ford_Algorithm.py | 53 ++++++++++++++++++++++++++++++++ 1 file changed, 53 insertions(+) create mode 100644 Graphs/BellMan_Ford_Algorithm.py diff --git a/Graphs/BellMan_Ford_Algorithm.py b/Graphs/BellMan_Ford_Algorithm.py new file mode 100644 index 0000000..0d305e3 --- /dev/null +++ b/Graphs/BellMan_Ford_Algorithm.py @@ -0,0 +1,53 @@ +# The Bellman-Ford Algorithm is a single-source, shortest-path algorithm used to find the shortest paths from a source node to all other nodes in a weighted graph, even when the graph contains negative edge weights. It works by iteratively relaxing the edges in the graph, ensuring that it can detect and handle negative weight cycles. + +# Class to represent a graph + +class Graph: + + def __init__(self, vertices): + self.V = vertices # No. of vertices + self.graph = [] + + # function to add an edge to graph + def addEdge(self, u, v, w): + self.graph.append([u, v, w]) + + # utility function used to print the solution + def printArr(self, dist): + print("Vertex Distance from Source") + for i in range(self.V): + print("{0}\t\t{1}".format(i, dist[i])) + + # The main function that finds shortest distances from src to + # all other vertices using Bellman-Ford algorithm. The function + # also detects negative weight cycle + def BellmanFord(self, src): + + # Step 1: Initialize distances from src to all other vertices + # as INFINITE + dist = [float("Inf")] * self.V + dist[src] = 0 + + # Step 2: Relax all edges |V| - 1 times. A simple shortest + # path from src to any other vertex can have at-most |V| - 1 + # edges + for _ in range(self.V - 1): + # Update dist value and parent index of the adjacent vertices of + # the picked vertex. Consider only those vertices which are still in + # queue + for u, v, w in self.graph: + if dist[u] != float("Inf") and dist[u] + w < dist[v]: + dist[v] = dist[u] + w + + # Step 3: check for negative-weight cycles. The above step + # guarantees shortest distances if graph doesn't contain + # negative weight cycle. If we get a shorter path, then there + # is a cycle. + + for u, v, w in self.graph: + if dist[u] != float("Inf") and dist[u] + w < dist[v]: + print("Graph contains negative weight cycle") + return + + # print all distance + self.printArr(dist) \ No newline at end of file From 6103b1a9e0879f6238e00f2e790c33497f48d20a Mon Sep 17 00:00:00 2001 From: Radhey644 Date: Wed, 25 Oct 2023 22:47:43 +0530 Subject: [PATCH 81/89] Prefix Sum code added --- Array/Prefix_Sum.cpp | 46 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 Array/Prefix_Sum.cpp diff --git a/Array/Prefix_Sum.cpp b/Array/Prefix_Sum.cpp new file mode 100644 index 0000000..8d34455 --- /dev/null +++ b/Array/Prefix_Sum.cpp @@ -0,0 +1,46 @@ +#include +#include + +// Function to calculate the prefix sum of an array +std::vector calculatePrefixSum(const std::vector& arr) { + int n = arr.size(); + std::vector prefixSum(n, 0); + + prefixSum[0] = arr[0]; + for (int i = 1; i < n; i++) { + prefixSum[i] = prefixSum[i - 1] + arr[i]; + } + + return prefixSum; +} + +int main() { + // Input the array size + int n; + std::cout << "Enter the size of the array: "; + std::cin >> n; + + if (n <= 0) { + std::cout << "Array size must be a positive integer." << std::endl; + return 1; // Exit with an error code + } + + // Input the elements of the array + std::vector arr(n); + std::cout << "Enter " << n << " elements of the array: "; + for (int i = 0; i < n; i++) { + std::cin >> arr[i]; + } + + // Calculate the prefix sum + std::vector prefixSum = calculatePrefixSum(arr); + + // Display the prefix sum + std::cout << "Prefix Sum: "; + for (int i = 0; i < n; i++) { + std::cout << prefixSum[i] << " "; + } + std::cout << std::endl; + + return 0; +} From 6291240693cf7c381081067f3e9db18a483d5b32 Mon Sep 17 00:00:00 2001 From: ydvmudit07 Date: Wed, 25 Oct 2023 23:07:40 +0530 Subject: [PATCH 82/89] kadane's algoritm code --- Algoritm/kadane's_algorithm.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 Algoritm/kadane's_algorithm.py diff --git a/Algoritm/kadane's_algorithm.py b/Algoritm/kadane's_algorithm.py new file mode 100644 index 0000000..826f6d9 --- /dev/null +++ b/Algoritm/kadane's_algorithm.py @@ -0,0 +1,16 @@ +def max_subarray_sum(nums): + # Initialize variables to keep track of the maximum subarray sum + + max_ending_here = nums[0] # Maximum sum ending at the current position + max_so_far = nums[0] # Maximum sum seen so far + + # Iterate through the array, starting from the second element + for i in range(1, len(nums)): + # Calculate the maximum sum ending at the current position by considering whether it's better to start a new subarray or extend the previous one. + max_ending_here = max(nums[i], max_ending_here + nums[i]) + + # Update the maximum sum seen so far by comparing it with the maximum sum ending at the current position. + max_so_far = max(max_so_far, max_ending_here) + + # The 'max_so_far' variable now contains the maximum subarray sum. + return max_so_far From 0198111707bce5c19afd7b27ab6ba3af16c576e6 Mon Sep 17 00:00:00 2001 From: swayam patil Date: Thu, 26 Oct 2023 09:04:48 +0530 Subject: [PATCH 83/89] Create AVL_tree.py --- Tree/AVL_tree.py | 146 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 146 insertions(+) create mode 100644 Tree/AVL_tree.py diff --git a/Tree/AVL_tree.py b/Tree/AVL_tree.py new file mode 100644 index 0000000..b4a8ed8 --- /dev/null +++ b/Tree/AVL_tree.py @@ -0,0 +1,146 @@ +class TreeNode: + def __init__(self, key): + self.key = key + self.left = None + self.right = None + self.height = 1 + + +class AVLTree: + def insert(self, root, key): + if not root: + return TreeNode(key) + if key < root.key: + root.left = self.insert(root.left, key) + else: + root.right = self.insert(root.right, key) + root.height = 1 + max(self.get_height(root.left), self.get_height(root.right)) + return self.balance(root) + + def delete(self, root, key): + if not root: + return root + if key < root.key: + root.left = self.delete(root.left, key) + elif key > root.key: + root.right = self.delete(root.right, key) + else: + if not root.left: + temp = root.right + root = None + return temp + elif not root.right: + temp = root.left + root = None + return temp + temp = self.get_min_value_node(root.right) + root.key = temp.key + root.right = self.delete(root.right, temp.key) + root.height = 1 + max(self.get_height(root.left), self.get_height(root.right)) + return self.balance(root) + + def get_height(self, node): + if not node: + return 0 + return node.height + + def get_balance(self, node): + if not node: + return 0 + return self.get_height(node.left) - self.get_height(node.right) + + def balance(self, node): + if not node: + return node + balance = self.get_balance(node) + if balance > 1: + if self.get_balance(node.left) < 0: + node.left = self.left_rotate(node.left) + return self.right_rotate(node) + if balance < -1: + if self.get_balance(node.right) > 0: + node.right = self.right_rotate(node.right) + return self.left_rotate(node) + return node + + def left_rotate(self, z): + y = z.right + T2 = y.left + y.left = z + z.right = T2 + z.height = 1 + max(self.get_height(z.left), self.get_height(z.right)) + y.height = 1 + max(self.get_height(y.left), self.get_height(y.right)) + return y + + def right_rotate(self, y): + x = y.left + T2 = x.right + x.right = y + y.left = T2 + y.height = 1 + max(self.get_height(y.left), self.get_height(y.right)) + x.height = 1 + max(self.get_height(x.left), self.get_height(x.right)) + return x + + def get_min_value_node(self, node): + if node is None or node.left is None: + return node + return self.get_min_value_node(node.left) + + def inorder_traversal(self, root): + if root: + self.inorder_traversal(root.left) + print(root.key, end=" ") + self.inorder_traversal(root.right) + +# Example usage: +if __name__ == "__main__": + avl_tree = AVLTree() + root = None + keys = [9, 5, 10, 0, 6, 11, -1, 1, 2] + for key in keys: + root = avl_tree.insert(root, key) + + print("Inorder Traversal of AVL tree:") + avl_tree.inorder_traversal(root) + + key_to_delete = 10 + root = avl_tree.delete(root, key_to_delete) + print("\nAfter deleting", key_to_delete) + avl_tree.inorder_traversal(root) + +class AVLTreeMenu: + def __init__(self): + self.avl_tree = AVLTree() + self.root = None + + def display_menu(self): + print("AVL Tree Menu:") + print("1. Insert a key") + print("2. Delete a key") + print("3. Display the AVL tree") + print("4. Exit") + + def run(self): + while True: + self.display_menu() + choice = input("Enter your choice: ") + if choice == "1": + key = int(input("Enter the key to insert: ")) + self.root = self.avl_tree.insert(self.root, key) + print(f"Key {key} inserted.") + elif choice == "2": + key = int(input("Enter the key to delete: ")) + self.root = self.avl_tree.delete(self.root, key) + print(f"Key {key} deleted.") + elif choice == "3": + print("Inorder Traversal of AVL tree:") + self.avl_tree.inorder_traversal(self.root) + elif choice == "4": + print("Exiting the AVL Tree Menu.") + break + else: + print("Invalid choice. Please enter a valid option.") + +if __name__ == "__main__": + avl_tree_menu = AVLTreeMenu() + avl_tree_menu.run() From 5a772a900f8241b8ae5838d76da01c02a0c8e1a9 Mon Sep 17 00:00:00 2001 From: Radhey644 Date: Thu, 26 Oct 2023 10:51:12 +0530 Subject: [PATCH 84/89] Code converted in the python format --- Array/Prefix_Sum.py | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) create mode 100644 Array/Prefix_Sum.py diff --git a/Array/Prefix_Sum.py b/Array/Prefix_Sum.py new file mode 100644 index 0000000..4bfa726 --- /dev/null +++ b/Array/Prefix_Sum.py @@ -0,0 +1,25 @@ +# Function to calculate the prefix sum of a list using list comprehension +def calculatePrefixSum(arr): + prefixSum = [sum(arr[:i + 1]) for i in range(len(arr))] + return prefixSum + +def main(): + # Input the list size + n = int(input("Enter the size of the list: ")) + + if n <= 0: + print("List size must be a positive integer.") + return 1 # Exit with an error code + + # Input the elements of the list + print(f"Enter {n} elements of the list:") + arr = [int(input()) for _ in range(n)] + + # Calculate the prefix sum + prefixSum = calculatePrefixSum(arr) + + # Display the prefix sum + print("Prefix Sum:", prefixSum) + +if __name__ == "__main__": + main() From 00e403dc799c07afcd70ebbdd996f524f34e708b Mon Sep 17 00:00:00 2001 From: Radhey644 Date: Thu, 26 Oct 2023 10:51:42 +0530 Subject: [PATCH 85/89] Code converted in the python format --- Array/Prefix_Sum.cpp | 46 -------------------------------------------- 1 file changed, 46 deletions(-) delete mode 100644 Array/Prefix_Sum.cpp diff --git a/Array/Prefix_Sum.cpp b/Array/Prefix_Sum.cpp deleted file mode 100644 index 8d34455..0000000 --- a/Array/Prefix_Sum.cpp +++ /dev/null @@ -1,46 +0,0 @@ -#include -#include - -// Function to calculate the prefix sum of an array -std::vector calculatePrefixSum(const std::vector& arr) { - int n = arr.size(); - std::vector prefixSum(n, 0); - - prefixSum[0] = arr[0]; - for (int i = 1; i < n; i++) { - prefixSum[i] = prefixSum[i - 1] + arr[i]; - } - - return prefixSum; -} - -int main() { - // Input the array size - int n; - std::cout << "Enter the size of the array: "; - std::cin >> n; - - if (n <= 0) { - std::cout << "Array size must be a positive integer." << std::endl; - return 1; // Exit with an error code - } - - // Input the elements of the array - std::vector arr(n); - std::cout << "Enter " << n << " elements of the array: "; - for (int i = 0; i < n; i++) { - std::cin >> arr[i]; - } - - // Calculate the prefix sum - std::vector prefixSum = calculatePrefixSum(arr); - - // Display the prefix sum - std::cout << "Prefix Sum: "; - for (int i = 0; i < n; i++) { - std::cout << prefixSum[i] << " "; - } - std::cout << std::endl; - - return 0; -} From df4fa822fd1c881d1df11b103bc39392b0a08273 Mon Sep 17 00:00:00 2001 From: ydvmudit07 Date: Thu, 26 Oct 2023 14:37:18 +0530 Subject: [PATCH 86/89] file directory changed --- {Algoritm => Math}/kadane's_algorithm.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) rename {Algoritm => Math}/kadane's_algorithm.py (78%) diff --git a/Algoritm/kadane's_algorithm.py b/Math/kadane's_algorithm.py similarity index 78% rename from Algoritm/kadane's_algorithm.py rename to Math/kadane's_algorithm.py index 826f6d9..dcc8858 100644 --- a/Algoritm/kadane's_algorithm.py +++ b/Math/kadane's_algorithm.py @@ -6,11 +6,12 @@ def max_subarray_sum(nums): # Iterate through the array, starting from the second element for i in range(1, len(nums)): - # Calculate the maximum sum ending at the current position by considering whether it's better to start a new subarray or extend the previous one. + # Calculate the maximum sum ending at the current position by considering whether it's better to + # start a new subarray or extend the previous one. max_ending_here = max(nums[i], max_ending_here + nums[i]) - # Update the maximum sum seen so far by comparing it with the maximum sum ending at the current position. + # Update the maximum sum seen so far by comparing it with the maximum sum ending at the current position. max_so_far = max(max_so_far, max_ending_here) # The 'max_so_far' variable now contains the maximum subarray sum. - return max_so_far + return max_so_far \ No newline at end of file From 88650ef88bc33aa5efc11a15441ede02dc61dab6 Mon Sep 17 00:00:00 2001 From: Radhey644 Date: Thu, 26 Oct 2023 20:18:58 +0530 Subject: [PATCH 87/89] File location changed --- {Array => Math}/Prefix_Sum.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename {Array => Math}/Prefix_Sum.py (100%) diff --git a/Array/Prefix_Sum.py b/Math/Prefix_Sum.py similarity index 100% rename from Array/Prefix_Sum.py rename to Math/Prefix_Sum.py From 3483b051c7a8300abbab45c83f06fd932f5d6da9 Mon Sep 17 00:00:00 2001 From: Himanshu Agarwal Date: Mon, 8 Jan 2024 14:06:09 +0530 Subject: [PATCH 88/89] Update README.md --- README.md | 17 +++++++++++------ 1 file changed, 11 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 369bbdf..1d49648 100644 --- a/README.md +++ b/README.md @@ -60,14 +60,19 @@ This project follows the [MIT LICENSE](https://choosealicense.com/licenses/mit/) +## 🪪 License +This project follows the [MIT LICENSE](https://choosealicense.com/licenses/mit/). + +
+

Connect with me

- Github     - LinkedIn     - Instagram     - Facebook     - Gmail    -

(Back to top)

+ Github     + LinkedIn     + Twitter     + Instagram     + Gmail    +

(Back to top)

From 721d4ba6439b55f352b1c587f8ae873314a7ad7c Mon Sep 17 00:00:00 2001 From: Himanshu Agarwal Date: Mon, 8 Jan 2024 14:06:34 +0530 Subject: [PATCH 89/89] Update README.md --- README.md | 3 --- 1 file changed, 3 deletions(-) diff --git a/README.md b/README.md index 1d49648..e24dec8 100644 --- a/README.md +++ b/README.md @@ -60,9 +60,6 @@ This project follows the [MIT LICENSE](https://choosealicense.com/licenses/mit/) -## 🪪 License -This project follows the [MIT LICENSE](https://choosealicense.com/licenses/mit/). -