SiddharthShah 1032221195 DivC 50 DL LabAssignment5
SiddharthShah 1032221195 DivC 50 DL LabAssignment5
# Activation Function
def sigmoid(x):
return 1 / (1 + np.exp(-x))
# Initialize Parameters
def initialize_parameters(n_inputs):
weights = np.random.rand(n_inputs) # Shape (n_inputs,)
bias = np.random.rand(1) # Shape (1,)
return weights, bias
# Forward Propagation
def forward_propagation(X, weights, bias):
z = np.dot(X, weights) + bias # z shape (m,) where m is the number of
samples
return sigmoid(z)
# Backward Propagation
def backward_propagation(X, Y, output, weights, bias, learning_rate):
m = Y.shape[0] # Number of samples
error = output - Y.flatten() # Ensure Y is flattened for shape
consistency
# Compute gradients
d_weights = np.dot(X.T, error * output * (1 - output)) / m # Shape
(n_inputs,)
d_bias = np.sum(error * output * (1 - output)) / m # Shape (1,)
# Update parameters
weights -= learning_rate * d_weights # Correctly update weights
bias -= learning_rate * d_bias # Correctly update bias
return weights, bias
for i in range(num_iterations):
output = forward_propagation(X, weights, bias)
loss = np.mean((output - Y.flatten()) ** 2) # Mean Squared Error
loss_history.append(loss)
# Parameters
learning_rate = 0.1
num_iterations = 10000
plt.plot(loss_history_and)
plt.title('Loss vs Epoch for AND Gate')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.show()
print("AND Gate Predictions:")
print(test(X_and, weights_and, bias_and))
plt.plot(loss_history_xor)
plt.title('Loss vs Epoch for XOR Gate')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.show()
print("XOR Gate Predictions:")
print(test(X_xor, weights_xor, bias_xor))
plt.plot(loss_history_nor)
plt.title('Loss vs Epoch for NOR Gate')
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.show()
print("NOR Gate Predictions:")
print(test(X_nor, weights_nor, bias_nor))