Practical No: – 9
Name: Hiren Daxeshbhai Patel Roll No.: 07
Title: Build an Artificial Neural Network by implementing the Back propagation algorithm and
test the same using appropriate data sets.
Software Requirement:
• Python
• NumPy
• Pandas
• scikit-learn
• Jupyter Notebook
Source Code:
import numpy as np import pandas as pd from
sklearn.model_selection import train_test_split
from sklearn.preprocessing import OneHotEncoder
from sklearn.metrics import accuracy_score
# Load the Iris dataset
from sklearn.datasets import load_iris
# Load Iris dataset
iris = load_iris() X
= iris.data
y = iris.target.reshape(-1, 1)
# One-hot encode the target labels encoder
= OneHotEncoder(sparse=False)
y_encoded = encoder.fit_transform(y)
# Split the dataset into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y_encoded, test_size=0.3, random_state=42)
# Define the ANN class
class ANN: def __init__(self, input_size, hidden_size, output_size,
learning_rate=0.01):
# Initialize weights and biases
self.input_size = input_size self.hidden_size
= hidden_size self.output_size =
output_size
self.learning_rate = learning_rate
# Weights self.W1 = np.random.rand(self.input_size,
self.hidden_size) self.W2 =
np.random.rand(self.hidden_size, self.output_size)
# Biases self.b1 = np.zeros((1,
self.hidden_size))
self.b2 = np.zeros((1, self.output_size))
def sigmoid(self, z):
return 1 / (1 + np.exp(-z))
def sigmoid_derivative(self, z):
return z * (1 - z)
def forward(self, X): # Forward pass
self.z1 = np.dot(X, self.W1) + self.b1
self.a1 = self.sigmoid(self.z1) self.z2 =
np.dot(self.a1, self.W2) + self.b2 self.a2 =
self.sigmoid(self.z2)
return self.a2
def backward(self, X, y, output):
# Backward pass
output_error = y - output
output_delta = output_error * self.sigmoid_derivative(output)
hidden_error = output_delta.dot(self.W2.T)
hidden_delta = hidden_error * self.sigmoid_derivative(self.a1)
# Update weights and biases
self.W2 += self.a1.T.dot(output_delta) * self.learning_rate
self.b2 += np.sum(output_delta, axis=0, keepdims=True) * self.learning_rate
self.W1 += X.T.dot(hidden_delta) * self.learning_rate
self.b1 += np.sum(hidden_delta, axis=0, keepdims=True) * self.learning_rate
def train(self, X, y, epochs=1000):
for _ in range(epochs):
output = self.forward(X)
self.backward(X, y, output)
def predict(self, X): output =
self.forward(X) return
np.argmax(output, axis=1)
# Create and train the ANN input_size
= X_train.shape[1] hidden_size = 5
output_size = y_encoded.shape[1]
ann = ANN(input_size, hidden_size, output_size) ann.train(X_train,
y_train, epochs=10000)
# Predict and evaluate the model y_pred
= ann.predict(X_test)
y_test_labels = np.argmax(y_test, axis=1)
# Calculate accuracy accuracy =
accuracy_score(y_test_labels, y_pred)
print(f"Accuracy of the ANN: {accuracy * 100:.2f}%")
Output:
Conclusions:
ANNs with Backpropagation are powerful tools for learning complex patterns in data. The
training process involves forward propagation to generate predictions, backpropagation to
compute gradients and update weights, and evaluation to measure the model's effectiveness.
Proper tuning of hyperparameters and careful evaluation are essential for achieving high
performance in classification tasks.