0% found this document useful (0 votes)
12 views20 pages

22f-3386 Lab 3 6BB

The document outlines a series of programming tasks involving graph traversal algorithms, including Breadth-First Search (BFS) and Depth-First Search (DFS) for navigating web pages and synonyms. It includes code implementations for creating graphs, adding edges, and performing searches, along with displaying results such as paths found and nodes visited. Additionally, it introduces a Depth-Limited Search (DLS) and Iterative Deepening Depth-First Search (IDDFS) for exploring synonyms in a dictionary format.

Uploaded by

f230003
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as DOCX, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
12 views20 pages

22f-3386 Lab 3 6BB

The document outlines a series of programming tasks involving graph traversal algorithms, including Breadth-First Search (BFS) and Depth-First Search (DFS) for navigating web pages and synonyms. It includes code implementations for creating graphs, adding edges, and performing searches, along with displaying results such as paths found and nodes visited. Additionally, it introduces a Depth-Limited Search (DLS) and Iterative Deepening Depth-First Search (IDDFS) for exploring synonyms in a dictionary format.

Uploaded by

f230003
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as DOCX, PDF, TXT or read online on Scribd
You are on page 1/ 20

lab number 3

Abdul Rafey Syed(22f-3386)

[DATE]
[Company name]
[Company address]
lab number 3

Task 1:

from collections import defaultdict, deque

# Class representing a directed graph using adjacency list

class Graph:

def __init__(self):

self.graph = defaultdict(list) # Dictionary to store adjacency list

self.num_edges = 0 # Track the number of edges

# Function to add an edge to the graph

def addEdge(self, u, v):

self.graph[u].append(v)

self.num_edges += 1 # Increment edge count

# Function to perform BFS traversal and calculate complexities

def BFS(self, start_node, goal_node):

visited = set() # Set to track visited nodes

queue = deque([start_node]) # Initialize queue with start node

parent = {} # To reconstruct the path

nodes_visited = 0 # Counter for time complexity

max_nodes_in_memory = 0 # Maximum number of nodes in memory at any point before goal

while queue:

current_level_size = len(queue) # Track current level size

# Condition: only update max_nodes_in_memory if goal is NOT in the current level

if goal_node not in queue:

lab number 3 lab


lab number 3

max_nodes_in_memory = max(max_nodes_in_memory, current_level_size)

for _ in range(current_level_size):

node = queue.popleft() # Dequeue a vertex

nodes_visited += 1 # Increment visited node count

# If we reached the goal, reconstruct the path

if node == goal_node:

path = []

while node:

path.append(node)

node = parent.get(node)

return path[::-1], nodes_visited, max_nodes_in_memory

if node not in visited:

visited.add(node) # Mark as visited

for neighbor in self.graph[node]:

if neighbor not in visited:

queue.append(neighbor)

parent[neighbor] = node # Store parent for path reconstruction

return None, nodes_visited, max_nodes_in_memory # Goal not found

# Driver code

g = Graph()

# Creating the web crawling graph from Task 1

g.addEdge("Home", "Products")

g.addEdge("Home", "Blogs")

lab number 3 lab


lab number 3

g.addEdge("Home", "About Us")

g.addEdge("Home", "Contact Us")

g.addEdge("Products", "Product 1")

g.addEdge("Products", "Product 2")

g.addEdge("Products", "Product 3")

g.addEdge("Blogs", "Blog Post 1")

g.addEdge("Blogs", "Blog Post 2")

# Set Start and Goal Node

start_node = "Home"

goal_node = "Blog Post 2"

# Perform BFS Search

bfs_path, nodes_visited, max_nodes_in_memory = g.BFS(start_node, goal_node)

# Display Results

if bfs_path:

print("Path from", start_node, "to", goal_node, ":", " -> ".join(bfs_path))

print("Total Nodes Visited (Time Complexity):", nodes_visited)

print("Max Nodes in Memory Before Goal (Space Complexity):", max_nodes_in_memory)

else:

print("Goal node", goal_node, "not found.")

lab number 3 lab


lab number 3

Task 2:
from collections import defaultdict

# Class representing a directed graph using adjacency list

class WebGraph:

def __init__(self):

self.site_map = defaultdict(list) # Dictionary to store page connections

self.link_count = 0 # Track the number of links

# Method to create a link between two pages

def add_link(self, source, destination):

self.site_map[source].append(destination)

self.link_count += 1 # Increase link count

# Method to perform Depth-First Search (DFS)

def execute_DFS(self, entry_point, target_page):

visited_pages = set() # Set to track visited nodes

exploration_stack = [(entry_point, [entry_point])] # Stack holding (page, path)

pages_explored = 0 # Counter for time complexity

max_memory_usage = 0 # Maximum pages in memory

while exploration_stack:

# Track the peak memory usage

max_memory_usage = max(max_memory_usage, len(exploration_stack))

current_page, navigation_path = exploration_stack.pop() # Retrieve last added element (LIFO)

pages_explored += 1 # Increment visited count

lab number 3 lab


lab number 3

# If destination is found, return the path

if current_page == target_page:

return navigation_path, pages_explored, max_memory_usage

if current_page not in visited_pages:

visited_pages.add(current_page) # Mark page as visited

# Add connected pages in reverse order to maintain consistent traversal

for linked_page in reversed(self.site_map[current_page]):

if linked_page not in visited_pages:

exploration_stack.append((linked_page, navigation_path + [linked_page]))

return None, pages_explored, max_memory_usage # Target not found

# Driver code

web = WebGraph()

# Constructing the site navigation graph

web.add_link("Home", "Products")

web.add_link("Home", "Blogs")

web.add_link("Home", "About Us")

web.add_link("Home", "Contact Us")

web.add_link("Products", "Product 1")

web.add_link("Products", "Product 2")

web.add_link("Products", "Product 3")

web.add_link("Blogs", "Blog Post 1")

web.add_link("Blogs", "Blog Post 2")

lab number 3 lab


lab number 3

# Define start and goal page

start_location = "Home"

target_location = "Product 3"

# Execute DFS

path_traversed, total_pages_checked, peak_memory_used = web.execute_DFS(start_location,


target_location)

# Display results

if path_traversed:

print("Traversal path from", start_location, "to", target_location, ":", " → ".join(path_traversed))

print("Total Pages Explored (Time Complexity):", total_pages_checked)

print("Maximum Pages Held in Memory Before Completion (Space Complexity):",


peak_memory_used)

else:

print("The target page", target_location, "was not located.")

lab number 3 lab


lab number 3

Task 3:
# Synonyms dictionary as a graph representation

synonyms = {

'happy': ['joyful', 'cheerful', 'glad', 'content', 'pleased'],

'joyful': ['happy', 'cheerful', 'glad', 'content', 'pleased'],

'cheerful': ['happy', 'joyful', 'glad', 'content', 'pleased'],

'glad': ['happy', 'joyful', 'cheerful', 'content', 'pleased'],

'content': ['happy', 'joyful', 'cheerful', 'glad', 'pleased'],

'pleased': ['happy', 'joyful', 'cheerful', 'glad', 'content'],

'sad': ['unhappy', 'sorrowful', 'depressed', 'melancholy', 'gloomy'],

'unhappy': ['sad', 'sorrowful', 'depressed', 'melancholy', 'gloomy'],

'sorrowful': ['sad', 'unhappy', 'depressed', 'melancholy', 'gloomy'],

'depressed': ['sad', 'unhappy', 'sorrowful', 'melancholy', 'gloomy'],

'melancholy': ['sad', 'unhappy', 'sorrowful', 'depressed', 'gloomy'],

'gloomy': ['sad', 'unhappy', 'sorrowful', 'depressed', 'melancholy'],

'big': ['large', 'huge', 'enormous', 'immense', 'gigantic'],

'large': ['big', 'huge', 'enormous', 'immense', 'gigantic'],

'huge': ['big', 'large', 'enormous', 'immense', 'gigantic'],

'enormous': ['big', 'large', 'huge', 'immense', 'gigantic'],

'immense': ['big', 'large', 'huge', 'enormous', 'gigantic'],

'gigantic': ['big', 'large', 'huge', 'enormous', 'immense'],

'small': ['little', 'tiny', 'miniature', 'petite', 'diminutive'],

'little': ['small', 'tiny', 'miniature', 'petite', 'diminutive'],

'tiny': ['small', 'little', 'miniature', 'petite', 'diminutive'],

'miniature': ['small', 'little', 'tiny', 'petite', 'diminutive'],

'petite': ['small', 'little', 'tiny', 'miniature', 'diminutive'],

'diminutive': ['small', 'little', 'tiny', 'miniature', 'petite'],

'hot': ['warm', 'heated', 'boiling', 'sizzling', 'scorching'],

lab number 3 lab


lab number 3

'warm': ['hot', 'heated', 'boiling', 'sizzling', 'scorching'],

'heated': ['hot', 'warm', 'boiling', 'sizzling', 'scorching'],

'boiling': ['hot', 'warm', 'heated', 'sizzling', 'scorching'],

'sizzling': ['hot', 'warm', 'heated', 'boiling', 'scorching'],

'scorching': ['hot', 'warm', 'heated', 'boiling', 'sizzling'],

'cold': ['chilly', 'cool', 'freezing', 'icy', 'frigid'],

'chilly': ['cold', 'cool', 'freezing', 'icy', 'frigid'],

'cool': ['cold', 'chilly', 'freezing', 'icy', 'frigid'],

'freezing': ['cold', 'chilly', 'cool', 'icy', 'frigid'],

'icy': ['cold', 'chilly', 'cool', 'freezing', 'frigid'],

'frigid': ['cold', 'chilly', 'cool', 'freezing', 'icy'],

'fast': ['quick', 'rapid', 'speedy', 'swift', 'brisk'],

'quick': ['fast', 'rapid', 'speedy', 'swift', 'brisk'],

'rapid': ['fast', 'quick', 'speedy', 'swift', 'brisk'],

'speedy': ['fast', 'quick', 'rapid', 'swift', 'brisk'],

'swift': ['fast', 'quick', 'rapid', 'speedy', 'brisk'],

'brisk': ['fast', 'quick', 'rapid', 'speedy', 'swift'],

'slow': ['leisurely', 'sluggish', 'tardy', 'plodding', 'gradual'],

'leisurely': ['slow', 'sluggish', 'tardy', 'plodding', 'gradual'],

'sluggish': ['slow', 'leisurely', 'tardy', 'plodding', 'gradual'],

'tardy': ['slow', 'leisurely', 'sluggish', 'plodding', 'gradual'],

'plodding': ['slow', 'leisurely', 'sluggish', 'tardy', 'gradual'],

'gradual': ['slow', 'leisurely', 'sluggish', 'tardy', 'plodding']

# Depth-Limited Search (DLS) function

def depth_limited_search(start_word, goal_word, depth_limit):

stack = [(start_word, [start_word], 0)] # (current word, path, depth)

nodes_visited = 0 # Time Complexity

lab number 3 lab


lab number 3

max_nodes_in_memory = 0 # Space Complexity

while stack:

max_nodes_in_memory = max(max_nodes_in_memory, len(stack)) # Track max stack size

word, path, depth = stack.pop() # LIFO: Last in, first out

nodes_visited += 1 # Increment visited count

# If goal word is found, return the path

if word == goal_word:

return path, nodes_visited, max_nodes_in_memory

# If within depth limit, continue exploring

if depth < depth_limit:

for synonym in synonyms.get(word, []):

if synonym not in path: # Avoid cycles

stack.append((synonym, path + [synonym], depth + 1))

return None, nodes_visited, max_nodes_in_memory # Goal not found

# Main Execution

if __name__ == "__main__":

start_word = input("Enter the starting word: ").strip()

goal_word = input("Enter the word to search (goal): ").strip()

depth_limit = int(input("Enter the depth limit: "))

# Perform Depth-Limited Search

path, nodes_visited, max_nodes_in_memory = depth_limited_search(start_word, goal_word,


depth_limit)

lab number 3 lab


lab number 3

# Display Results

if path:

print(f"Path from {start_word} to {goal_word}: {' -> '.join(path)}")

print(f"Total Nodes Visited (Time Complexity): {nodes_visited}")

print(f"Max Nodes in Memory Before Goal (Space Complexity): {max_nodes_in_memory}")

else:

print(f"Goal word '{goal_word}' not found within depth limit.")

Task 4:
import json

import time

# Function to load the dictionary graph from the file

def load_dictionary(filename):

with open(filename, 'r') as file:

synonyms = json.load(file)

return synonyms

# Depth-Limited Search (DLS) function


lab number 3 lab
lab number 3

def depth_limited_search(graph, current_word, goal_word, limit, path, visited, nodes_explored):

nodes_explored[0] += 1 # Increment the node count

if current_word == goal_word:

return path # Goal found, return the path

if limit <= 0:

return None # Depth limit reached

visited.add(current_word)

for neighbor in graph.get(current_word, []):

if neighbor not in visited:

result = depth_limited_search(graph, neighbor, goal_word, limit - 1, path + [neighbor], visited,

nodes_explored)

if result:

return result # Return the path if found

return None # Goal not found within depth limit

# Iterative Deepening DFS (IDDFS)

def iterative_deepening_search(graph, start_word, goal_word, max_depth):

nodes_explored = [0] # To track number of nodes explored (list to allow mutation)

for depth in range(max_depth + 1):

visited = set()

result = depth_limited_search(graph, start_word, goal_word, depth, [start_word], visited,


nodes_explored)

lab number 3 lab


lab number 3

if result:

return result, depth, nodes_explored[0] # Return path, depth, and explored nodes

return None, max_depth, nodes_explored[0] # Return None if not found within depth limit

# Main function to run the IDDFS search

def main():

dictionary_file = "EnglishDictionary.txt"

# Load the dictionary graph

graph = load_dictionary(dictionary_file)

# User input for search parameters

start_word = input("Enter the starting word: ").strip().lower()

goal_word = input("Enter the word to search: ").strip().lower()

max_depth = int(input("Enter the maximum depth for search: "))

start_time = time.time() # Start time for time complexity calculation

# Perform IDDFS

path, depth_reached, nodes_explored = iterative_deepening_search(graph, start_word, goal_word,


max_depth)

end_time = time.time() # End time for time complexity calculation

if path:

print(" -> ".join(path))

lab number 3 lab


lab number 3

# Complexity Calculation

branching_factor = max(len(neighbors) for neighbors in graph.values()) # Estimate max branching


factor

print(f"Time Complexity: O({branching_factor}^{depth_reached}) ≈ {branching_factor **


depth_reached}")

print(f"Space Complexity: O({depth_reached}) ≈ {depth_reached}")

# Run the program

if __name__ == "__main__":

main()

Task 5:
import heapq

# Graph representation

graph = {

"Faisalabad": [("Lahore", 2), ("Chiniot", 1), ("Islamabad", 4), ("Sargodha", 2)],

"Lahore": [("Islamabad", 5), ("Faisalabad", 2)],

"Chiniot": [("Islamabad", 6), ("Lahore", 3)],

"Rawalpindi": [("Islamabad", 1), ("Murree", 1)]

lab number 3 lab


lab number 3

# Uniform Cost Search (UCS) Function

def uniform_cost_search(graph, start, goal):

priority_queue = [(0, start, [start])] # (cost, current_node, path)

visited = set()

nodes_explored = 0 # Time Complexity

max_nodes_in_memory = 0 # Space Complexity

while priority_queue:

max_nodes_in_memory = max(max_nodes_in_memory, len(priority_queue)) # Track max queue


size

cost, node, path = heapq.heappop(priority_queue)

nodes_explored += 1 # Increment visited count

if node == goal:

return path, cost, nodes_explored, max_nodes_in_memory

if node not in visited:

visited.add(node)

for neighbor, edge_cost in graph.get(node, []):

if neighbor not in visited:

heapq.heappush(priority_queue, (cost + edge_cost, neighbor, path + [neighbor]))

return None, float("inf"), nodes_explored, max_nodes_in_memory # Goal not found

# Main Execution

if __name__ == "__main__":

start = input("Enter the start city: ").strip()

lab number 3 lab


lab number 3

goal = input("Enter the goal city: ").strip()

path, total_cost, nodes_explored, max_nodes_in_memory = uniform_cost_search(graph, start, goal)

if path:

print(f"Path found: {' -> '.join(path)}")

print(f"Total Cost: {total_cost}")

else:

print("No path found.")

print("\n### Complexity Analysis ###")

print(f"Total Nodes Explored (Time Complexity): {nodes_explored}")

print(f"Max Nodes in Memory (Space Complexity): {max_nodes_in_memory}")

lab number 3 lab


lab number 3

Task 6:
from collections import deque

import time

def bidirectional_search(graph, start, goal):

if start not in graph or goal not in graph:

return None # If start or goal not in graph, return no path

# Initialize forward and backward search queues

forward_queue = deque([start])

backward_queue = deque([goal])

# Visited sets for tracking explored nodes

forward_visited = {start}

backward_visited = {goal}

# Parent maps for reconstructing the path

forward_parent = {start: None}

backward_parent = {goal: None}

while forward_queue and backward_queue:

# Forward step

if forward_queue:

current_forward = forward_queue.popleft()

for neighbor in graph.get(current_forward, []):

if neighbor not in forward_visited:

forward_visited.add(neighbor)

forward_parent[neighbor] = current_forward

lab number 3 lab


lab number 3

forward_queue.append(neighbor)

if neighbor in backward_visited: # Meeting point found

return construct_path(forward_parent, backward_parent, neighbor)

# Backward step

if backward_queue:

current_backward = backward_queue.popleft()

for neighbor in graph.get(current_backward, []):

if neighbor not in backward_visited:

backward_visited.add(neighbor)

backward_parent[neighbor] = current_backward

backward_queue.append(neighbor)

if neighbor in forward_visited: # Meeting point found

return construct_path(forward_parent, backward_parent, neighbor)

return None # If no path is found

# Path reconstruction function

def construct_path(forward_parent, backward_parent, intersection):

path = [intersection]

# Construct forward path

current = intersection

while current in forward_parent and forward_parent[current] is not None:

current = forward_parent[current]

path.insert(0, current)

# Construct backward path

current = intersection

lab number 3 lab


lab number 3

while current in backward_parent and backward_parent[current] is not None:

current = backward_parent[current]

path.append(current)

return path

# Define the web crawling graph

graph = {

"Home": ["Products", "Blogs", "About Us", "Contact Us"],

"Products": ["Product 1", "Product 2"],

"Blogs": ["Blog 1", "Blog 2"],

"About Us": [],

"Contact Us": [],

"Product 1": [],

"Product 2": [],

"Blog 1": [],

"Blog 2": []

# Test the search

start_node = "Home"

goal_node = "Blog 1"

start_time = time.time()

path = bidirectional_search(graph, start_node, goal_node)

end_time = time.time()

# Output results

if path:

lab number 3 lab


lab number 3

print("Path found:", " -> ".join(path))

else:

print("No path found.")

# Calculate complexities

branching_factor = max(len(neighbors) for neighbors in graph.values())

depth = len(path) - 1 if path else 0

print(f"Time Complexity: O({branching_factor}^{depth})")

print(f"Space Complexity: O({branching_factor} * {depth})")

print(f"Time taken (seconds): {end_time - start_time}")

lab number 3 lab

You might also like

pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy