0% found this document useful (0 votes)
11 views10 pages

SiddharthShah 1032221195 DivC 50 DL LabAssignment5

Description

Uploaded by

1032221195
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
11 views10 pages

SiddharthShah 1032221195 DivC 50 DL LabAssignment5

Description

Uploaded by

1032221195
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 10

import numpy as np

import matplotlib.pyplot as plt

# Activation Function
def sigmoid(x):
return 1 / (1 + np.exp(-x))

# Initialize Parameters
def initialize_parameters(n_inputs):
weights = np.random.rand(n_inputs) # Shape (n_inputs,)
bias = np.random.rand(1) # Shape (1,)
return weights, bias

# Forward Propagation
def forward_propagation(X, weights, bias):
z = np.dot(X, weights) + bias # z shape (m,) where m is the number of
samples
return sigmoid(z)

# Backward Propagation
def backward_propagation(X, Y, output, weights, bias, learning_rate):
m = Y.shape[0] # Number of samples
error = output - Y.flatten() # Ensure Y is flattened for shape
consistency

# Compute gradients
d_weights = np.dot(X.T, error * output * (1 - output)) / m # Shape
(n_inputs,)
d_bias = np.sum(error * output * (1 - output)) / m # Shape (1,)

# Update parameters
weights -= learning_rate * d_weights # Correctly update weights
bias -= learning_rate * d_bias # Correctly update bias
return weights, bias

# Train the Model


def train(X, Y, learning_rate, num_iterations):
weights, bias = initialize_parameters(X.shape[1])
loss_history = []

for i in range(num_iterations):
output = forward_propagation(X, weights, bias)
loss = np.mean((output - Y.flatten()) ** 2) # Mean Squared Error
loss_history.append(loss)

weights, bias = backward_propagation(X, Y, output, weights, bias,


learning_rate)

return weights, bias, loss_history

# Testing the Model Performance


def test(X, weights, bias):
output = forward_propagation(X, weights, bias)
predictions = np.round(output) # Threshold at 0.5
return predictions

# Parameters
learning_rate = 0.1
num_iterations = 10000

# AND Gate Data


X_and = np.array([[0, 0], [0, 1], [1, 0], [1, 1]]) # Shape (4, 2)
Y_and = np.array([[0], [0], [0], [1]]) # Shape (4, 1)

weights_and, bias_and, loss_history_and = train(X_and, Y_and,


learning_rate, num_iterations)

plt.plot(loss_history_and)
plt.title('Loss vs Epoch for AND Gate')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.show()
print("AND Gate Predictions:")
print(test(X_and, weights_and, bias_and))

# XOR Gate Data


X_xor = np.array([[0, 0], [0, 1], [1, 0], [1, 1]]) # Shape (4, 2)
Y_xor = np.array([[0], [1], [1], [0]]) # Shape (4, 1)

weights_xor, bias_xor, loss_history_xor = train(X_xor, Y_xor,


learning_rate, num_iterations)

plt.plot(loss_history_xor)
plt.title('Loss vs Epoch for XOR Gate')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.show()
print("XOR Gate Predictions:")
print(test(X_xor, weights_xor, bias_xor))

# NOR Gate Data


X_nor = np.array([[0, 0], [0, 1], [1, 0], [1, 1]]) # Shape (4, 2)
Y_nor = np.array([[1], [0], [0], [0]]) # Shape (4, 1)

weights_nor, bias_nor, loss_history_nor = train(X_nor, Y_nor,


learning_rate, num_iterations)

plt.plot(loss_history_nor)
plt.title('Loss vs Epoch for NOR Gate')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.show()
print("NOR Gate Predictions:")
print(test(X_nor, weights_nor, bias_nor))

You might also like

pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy