ML All
ML All
Namani.Vamshi Krishna
SAK Informatics
Address: Plot No. 544, #102, Shiva Sai Murali Ramaneeyam, near
Peacock Circle, Pragathi Nagar, Hyderabad, Telangana 500090
Phone: 090001 88676
[2]: iris=pd.read_csv(r"Iris.csv")
[3]: iris
iris.head()
1
[5.1, 3.8, 1.5, 0.3],
[5.4, 3.4, 1.7, 0.2],
[5.1, 3.7, 1.5, 0.4],
[4.6, 3.6, 1. , 0.2],
[5.1, 3.3, 1.7, 0.5],
[4.8, 3.4, 1.9, 0.2],
[5. , 3. , 1.6, 0.2],
[5. , 3.4, 1.6, 0.4],
[5.2, 3.5, 1.5, 0.2],
[5.2, 3.4, 1.4, 0.2],
[4.7, 3.2, 1.6, 0.2],
[4.8, 3.1, 1.6, 0.2],
[5.4, 3.4, 1.5, 0.4],
[5.2, 4.1, 1.5, 0.1],
[5.5, 4.2, 1.4, 0.2],
[4.9, 3.1, 1.5, 0.2],
[5. , 3.2, 1.2, 0.2],
[5.5, 3.5, 1.3, 0.2],
[4.9, 3.6, 1.4, 0.1],
[4.4, 3. , 1.3, 0.2],
[5.1, 3.4, 1.5, 0.2],
[5. , 3.5, 1.3, 0.3],
[4.5, 2.3, 1.3, 0.3],
[4.4, 3.2, 1.3, 0.2],
[5. , 3.5, 1.6, 0.6],
[5.1, 3.8, 1.9, 0.4],
[4.8, 3. , 1.4, 0.3],
[5.1, 3.8, 1.6, 0.2],
[4.6, 3.2, 1.4, 0.2],
[5.3, 3.7, 1.5, 0.2],
[5. , 3.3, 1.4, 0.2],
[7. , 3.2, 4.7, 1.4],
[6.4, 3.2, 4.5, 1.5],
[6.9, 3.1, 4.9, 1.5],
[5.5, 2.3, 4. , 1.3],
[6.5, 2.8, 4.6, 1.5],
[5.7, 2.8, 4.5, 1.3],
[6.3, 3.3, 4.7, 1.6],
[4.9, 2.4, 3.3, 1. ],
[6.6, 2.9, 4.6, 1.3],
[5.2, 2.7, 3.9, 1.4],
[5. , 2. , 3.5, 1. ],
[5.9, 3. , 4.2, 1.5],
[6. , 2.2, 4. , 1. ],
[6.1, 2.9, 4.7, 1.4],
[5.6, 2.9, 3.6, 1.3],
[6.7, 3.1, 4.4, 1.4],
2
[5.6, 3. , 4.5, 1.5],
[5.8, 2.7, 4.1, 1. ],
[6.2, 2.2, 4.5, 1.5],
[5.6, 2.5, 3.9, 1.1],
[5.9, 3.2, 4.8, 1.8],
[6.1, 2.8, 4. , 1.3],
[6.3, 2.5, 4.9, 1.5],
[6.1, 2.8, 4.7, 1.2],
[6.4, 2.9, 4.3, 1.3],
[6.6, 3. , 4.4, 1.4],
[6.8, 2.8, 4.8, 1.4],
[6.7, 3. , 5. , 1.7],
[6. , 2.9, 4.5, 1.5],
[5.7, 2.6, 3.5, 1. ],
[5.5, 2.4, 3.8, 1.1],
[5.5, 2.4, 3.7, 1. ],
[5.8, 2.7, 3.9, 1.2],
[6. , 2.7, 5.1, 1.6],
[5.4, 3. , 4.5, 1.5],
[6. , 3.4, 4.5, 1.6],
[6.7, 3.1, 4.7, 1.5],
[6.3, 2.3, 4.4, 1.3],
[5.6, 3. , 4.1, 1.3],
[5.5, 2.5, 4. , 1.3],
[5.5, 2.6, 4.4, 1.2],
[6.1, 3. , 4.6, 1.4],
[5.8, 2.6, 4. , 1.2],
[5. , 2.3, 3.3, 1. ],
[5.6, 2.7, 4.2, 1.3],
[5.7, 3. , 4.2, 1.2],
[5.7, 2.9, 4.2, 1.3],
[6.2, 2.9, 4.3, 1.3],
[5.1, 2.5, 3. , 1.1],
[5.7, 2.8, 4.1, 1.3],
[6.3, 3.3, 6. , 2.5],
[5.8, 2.7, 5.1, 1.9],
[7.1, 3. , 5.9, 2.1],
[6.3, 2.9, 5.6, 1.8],
[6.5, 3. , 5.8, 2.2],
[7.6, 3. , 6.6, 2.1],
[4.9, 2.5, 4.5, 1.7],
[7.3, 2.9, 6.3, 1.8],
[6.7, 2.5, 5.8, 1.8],
[7.2, 3.6, 6.1, 2.5],
[6.5, 3.2, 5.1, 2. ],
[6.4, 2.7, 5.3, 1.9],
[6.8, 3. , 5.5, 2.1],
3
[5.7, 2.5, 5. , 2. ],
[5.8, 2.8, 5.1, 2.4],
[6.4, 3.2, 5.3, 2.3],
[6.5, 3. , 5.5, 1.8],
[7.7, 3.8, 6.7, 2.2],
[7.7, 2.6, 6.9, 2.3],
[6. , 2.2, 5. , 1.5],
[6.9, 3.2, 5.7, 2.3],
[5.6, 2.8, 4.9, 2. ],
[7.7, 2.8, 6.7, 2. ],
[6.3, 2.7, 4.9, 1.8],
[6.7, 3.3, 5.7, 2.1],
[7.2, 3.2, 6. , 1.8],
[6.2, 2.8, 4.8, 1.8],
[6.1, 3. , 4.9, 1.8],
[6.4, 2.8, 5.6, 2.1],
[7.2, 3. , 5.8, 1.6],
[7.4, 2.8, 6.1, 1.9],
[7.9, 3.8, 6.4, 2. ],
[6.4, 2.8, 5.6, 2.2],
[6.3, 2.8, 5.1, 1.5],
[6.1, 2.6, 5.6, 1.4],
[7.7, 3. , 6.1, 2.3],
[6.3, 3.4, 5.6, 2.4],
[6.4, 3.1, 5.5, 1.8],
[6. , 3. , 4.8, 1.8],
[6.9, 3.1, 5.4, 2.1],
[6.7, 3.1, 5.6, 2.4],
[6.9, 3.1, 5.1, 2.3],
[5.8, 2.7, 5.1, 1.9],
[6.8, 3.2, 5.9, 2.3],
[6.7, 3.3, 5.7, 2.5],
[6.7, 3. , 5.2, 2.3],
[6.3, 2.5, 5. , 1.9],
[6.5, 3. , 5.2, 2. ],
[6.2, 3.4, 5.4, 2.3],
[5.9, 3. , 5.1, 1.8]])
4
'Normal', 'Normal', 'Normal', 'Normal', 'Normal', 'Normal',
'Normal', 'Normal', 'Normal', 'Normal', 'Normal', 'Normal',
'Normal', 'Normal', 'left eye disorder', 'left eye disorder',
'left eye disorder', 'left eye disorder', 'left eye disorder',
'left eye disorder', 'left eye disorder', 'left eye disorder',
'left eye disorder', 'left eye disorder', 'left eye disorder',
'left eye disorder', 'left eye disorder', 'left eye disorder',
'left eye disorder', 'left eye disorder', 'left eye disorder',
'left eye disorder', 'left eye disorder', 'left eye disorder',
'left eye disorder', 'left eye disorder', 'left eye disorder',
'left eye disorder', 'left eye disorder', 'left eye disorder',
'left eye disorder', 'left eye disorder', 'left eye disorder',
'left eye disorder', 'left eye disorder', 'left eye disorder',
'left eye disorder', 'left eye disorder', 'left eye disorder',
'left eye disorder', 'left eye disorder', 'left eye disorder',
'left eye disorder', 'left eye disorder', 'left eye disorder',
'left eye disorder', 'left eye disorder', 'left eye disorder',
'left eye disorder', 'left eye disorder', 'left eye disorder',
'left eye disorder', 'left eye disorder', 'left eye disorder',
'right eye disorder', 'right eye disorder', 'right eye disorder',
'right eye disorder', 'right eye disorder', 'right eye disorder',
'right eye disorder', 'right eye disorder', 'right eye disorder',
'right eye disorder', 'right eye disorder', 'right eye disorder',
'right eye disorder', 'right eye disorder', 'right eye disorder',
'right eye disorder', 'right eye disorder', 'right eye disorder',
'right eye disorder', 'right eye disorder', 'right eye disorder',
'right eye disorder', 'right eye disorder', 'right eye disorder',
'right eye disorder', 'right eye disorder', 'right eye disorder',
'right eye disorder', 'right eye disorder', 'right eye disorder',
'right eye disorder', 'right eye disorder', 'right eye disorder',
'right eye disorder', 'right eye disorder', 'right eye disorder',
'right eye disorder', 'right eye disorder', 'right eye disorder',
'right eye disorder', 'right eye disorder', 'right eye disorder',
'right eye disorder', 'right eye disorder', 'right eye disorder',
'right eye disorder', 'right eye disorder', 'right eye disorder',
'right eye disorder', 'right eye disorder'], dtype=object)
5
# Customize the plot
plot.set_xlabel("Categories")
plot.set_ylabel("Count")
plot.set_title("Count Plot of Virginica")
[9]: y = le.fit_transform(y)
y
6
[9]: array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2])
[ ]:
X shape is (150, 4)
Y shape is (150,)
X_test shape is (30, 4)
X_train shape is (120, 4)
y_train shape is (120,)
y_test shape is (30,)
7
precision = precision_score(y_test, y_pred, average='weighted')
# Recall
recall = recall_score(y_test, y_pred, average='weighted')
# F1 score
f1 = f1_score(y_test, y_pred, average='weighted')
Accuracy: 0.97
Precision: 0.97
Recall: 0.97
F1 Score: 0.97
# Assuming y_test and y_pred are your true labels and predicted labels,␣
↪respectively
plt.xlabel('Predicted')
plt.ylabel('True')
plt.title('Confusion Matrix')
plt.show()
8
[15]: from sklearn.metrics import classification_report
Classification Report:
precision recall f1-score support
accuracy 0.97 30
macro avg 0.95 0.97 0.96 30
weighted avg 0.97 0.97 0.97 30
9
[16]: from sklearn.tree import DecisionTreeClassifier
dt = DecisionTreeClassifier()
dt.fit(X_train,y_train)
y_pred = lr.predict(X_test)
plt.xlabel('Predicted')
plt.ylabel('True')
plt.title('Confusion Matrix of of LRC')
plt.show()
10
Classification Report of LRC:
precision recall f1-score support
accuracy 0.97 30
macro avg 0.95 0.97 0.96 30
weighted avg 0.97 0.97 0.97 30
11
print("predicted y_test values are",y_pred)
plt.xlabel('Predicted')
plt.ylabel('True')
plt.title('Confusion Matrix of DTC')
plt.show()
12
Classification Report of DTC:
precision recall f1-score support
accuracy 1.00 30
macro avg 1.00 1.00 1.00 30
weighted avg 1.00 1.00 1.00 30
13
print("predicted y_test values are",y_pred)
plt.xlabel('Predicted')
plt.ylabel('True')
plt.title('Confusion Matrix of RFC')
plt.show()
14
Classification Report of RFC:
precision recall f1-score support
accuracy 1.00 30
macro avg 1.00 1.00 1.00 30
weighted avg 1.00 1.00 1.00 30
15
print("predicted y_test values are",y_pred)
plt.xlabel('Predicted')
plt.ylabel('True')
plt.title('Confusion Matrix of SVM')
plt.show()
16
Classification Report of SVM:
precision recall f1-score support
accuracy 1.00 30
macro avg 1.00 1.00 1.00 30
weighted avg 1.00 1.00 1.00 30
knn = KNeighborsClassifier()
knn.fit(X_train, y_train)
y_pred = knn.predict(X_test)
17
# The rest of your code remains the same
print("Original y_test values are ",y_test)
print("predicted y_test values are",y_pred)
plt.xlabel('Predicted')
plt.ylabel('True')
plt.title('Confusion Matrix')
plt.show()
18
Classification Report:
precision recall f1-score support
accuracy 0.97 30
macro avg 0.95 0.97 0.96 30
weighted avg 0.97 0.97 0.97 30
mlp = MLPClassifier()
mlp.fit(X_train, y_train)
y_pred = mlp.predict(X_test)
19
# The rest of your code remains the same
plt.xlabel('Predicted')
plt.ylabel('True')
plt.title('Confusion Matrix')
plt.show()
c:\users\asus\appdata\local\programs\python\python37\lib\site-
packages\sklearn\neural_network\_multilayer_perceptron.py:585:
ConvergenceWarning: Stochastic Optimizer: Maximum iterations (200) reached and
the optimization hasn't converged yet.
% self.max_iter, ConvergenceWarning)
Original y_test values are [2 1 0 2 0 2 0 1 1 1 2 1 1 1 1 0 1 1 0 0 2 1 0 0 2 0
0 1 1 0]
predicted y_test values are [2 1 0 2 0 2 0 1 1 1 2 1 1 1 1 0 1 1 0 0 2 1 0 0 2 0
0 1 1 0]
Accuracy: 1.00
Precision: 1.00
Recall: 1.00
F1 Score: 1.00
20
Classification Report:
precision recall f1-score support
accuracy 1.00 30
macro avg 1.00 1.00 1.00 30
weighted avg 1.00 1.00 1.00 30
lin_reg = LinearRegression()
lin_reg.fit(X_train, y_train)
y_pred = lin_reg.predict(X_test)
y_pred=np.round(y_pred)
21
print("Original y_test values are ",y_test)
print("predicted y_test values are",y_pred)
plt.xlabel('Predicted')
plt.ylabel('True')
plt.title('Confusion Matrix')
plt.show()
22
Classification Report:
precision recall f1-score support
accuracy 1.00 30
macro avg 1.00 1.00 1.00 30
weighted avg 1.00 1.00 1.00 30
ada_boost = AdaBoostClassifier()
ada_boost.fit(X_train, y_train)
y_pred = ada_boost.predict(X_test)
print("Original y_test values are ",y_test)
23
print("predicted y_test values are",y_pred)
plt.xlabel('Predicted')
plt.ylabel('True')
plt.title('Confusion Matrix')
plt.show()
24
Classification Report:
precision recall f1-score support
accuracy 0.97 30
macro avg 0.98 0.94 0.96 30
weighted avg 0.97 0.97 0.97 30
extra_tree = ExtraTreesClassifier()
extra_tree.fit(X_train, y_train)
y_pred = extra_tree.predict(X_test)
print("Original y_test values are ",y_test)
25
print("predicted y_test values are",y_pred)
plt.xlabel('Predicted')
plt.ylabel('True')
plt.title('Confusion Matrix')
plt.show()
26
Classification Report:
precision recall f1-score support
accuracy 1.00 30
macro avg 1.00 1.00 1.00 30
weighted avg 1.00 1.00 1.00 30
27
# Decision Trees
from sklearn.tree import DecisionTreeClassifier
# Ensemble Methods
from sklearn.ensemble import RandomForestClassifier, BaggingClassifier,␣
↪GradientBoostingClassifier, AdaBoostClassifier, ExtraTreesClassifier
# Naive Bayes
from sklearn.naive_bayes import GaussianNB, MultinomialNB, ComplementNB,␣
↪BernoulliNB
# Nearest Neighbors
from sklearn.neighbors import KNeighborsClassifier
# Linear Models
from sklearn.linear_model import LinearRegression, Ridge, Lasso, ElasticNet
# Decision Trees
from sklearn.tree import DecisionTreeRegressor
# Ensemble Methods
from sklearn.ensemble import RandomForestRegressor, BaggingRegressor,␣
↪GradientBoostingRegressor, AdaBoostRegressor, ExtraTreesRegressor
# Nearest Neighbors
from sklearn.neighbors import KNeighborsRegressor
# Clustering
from sklearn.cluster import KMeans, DBSCAN, AgglomerativeClustering
# Dimensionality Reduction
28
from sklearn.decomposition import TruncatedSVD
29