louiecerv commited on
Commit
30e13c9
·
1 Parent(s): c3db2ff

update the repository

Browse files
Files changed (2) hide show
  1. app.py +136 -0
  2. requirements.txt +5 -0
app.py ADDED
@@ -0,0 +1,136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import numpy as np
3
+ import matplotlib.pyplot as plt
4
+ from sklearn import datasets
5
+ from sklearn.model_selection import train_test_split
6
+ from sklearn.naive_bayes import GaussianNB
7
+ from sklearn.svm import SVC
8
+ from sklearn.neighbors import KNeighborsClassifier
9
+ from sklearn.metrics import accuracy_score, classification_report, confusion_matrix
10
+ import seaborn as sns
11
+
12
+ # Load MNIST dataset
13
+ digits = datasets.load_digits()
14
+ X, y = digits.data, digits.target
15
+
16
+ # Split into train and test sets
17
+ X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
18
+
19
+ # Initialize classifiers
20
+ nb_classifier = GaussianNB()
21
+ svm_classifier = SVC()
22
+ knn_classifier = KNeighborsClassifier(n_neighbors=3)
23
+
24
+ # Train classifiers
25
+ nb_classifier.fit(X_train, y_train)
26
+ svm_classifier.fit(X_train, y_train)
27
+ knn_classifier.fit(X_train, y_train)
28
+
29
+ # Predict
30
+ nb_predictions = nb_classifier.predict(X_test)
31
+ svm_predictions = svm_classifier.predict(X_test)
32
+ knn_predictions = knn_classifier.predict(X_test)
33
+
34
+ # Compute accuracy
35
+ nb_accuracy = accuracy_score(y_test, nb_predictions)
36
+ svm_accuracy = accuracy_score(y_test, svm_predictions)
37
+ knn_accuracy = accuracy_score(y_test, knn_predictions)
38
+
39
+ # Compute classification reports and confusion matrices
40
+ nb_report = classification_report(y_test, nb_predictions)
41
+ svm_report = classification_report(y_test, svm_predictions)
42
+ knn_report = classification_report(y_test, knn_predictions)
43
+
44
+ nb_cm = confusion_matrix(y_test, nb_predictions)
45
+ svm_cm = confusion_matrix(y_test, svm_predictions)
46
+ knn_cm = confusion_matrix(y_test, knn_predictions)
47
+
48
+ def main():
49
+ # Streamlit App
50
+ st.title("MNIST Classifier Performance")
51
+ st.write("### Sample Images from MNIST Dataset")
52
+
53
+ about = """# 🖥️ MNIST Classifier Performance App 🚀
54
+ This Streamlit app demonstrates the performance of three different machine learning classifiers on the **MNIST handwritten digits dataset**. 📊 The classifiers compared are:
55
+
56
+ ✅ **Naïve Bayes**
57
+ ✅ **Support Vector Machine (SVM)**
58
+ ✅ **K-Nearest Neighbors (KNN)**
59
+
60
+ ## 🔍 Features:
61
+ - 📸 **Displays 5 sample images** from the MNIST dataset.
62
+ - 📊 **Trains and evaluates** Naïve Bayes, SVM, and KNN classifiers.
63
+ - 🏆 **Compares classifier accuracy** on the test dataset.
64
+ - 📄 **Shows classification reports** with precision, recall, and F1-score.
65
+ - 🔥 **Visualizes confusion matrices** using heatmaps for better understanding.
66
+
67
+ ## 📌 How to Use:
68
+ 1. Run the app using Streamlit.
69
+ 2. Navigate through the **three tabs** to check the performance of each classifier.
70
+ 3. Analyze the **classification report and confusion matrix** for deeper insights.
71
+ 4. Read the **comparison section** to understand the strengths and weaknesses of each model.
72
+
73
+ ## 🎯 Insights:
74
+ - **Naïve Bayes**: Fast but may struggle with complex patterns.
75
+ - **SVM**: Balanced performance with good accuracy.
76
+ - **KNN**: Effective but can be slow with large datasets.
77
+
78
+ 🚀 Explore and experiment with different models to enhance classification performance!
79
+
80
+ ### 📌 About the Creator
81
+ **Created by:** *Louie F. Cervantes, M.Eng. (Information Engineering)*
82
+ **(c) 2025 West Visayas State University**
83
+ """
84
+ with st.expander("About the App"):
85
+ st.markdown(about)
86
+
87
+ # Display 5 sample images
88
+ fig, axes = plt.subplots(1, 5, figsize=(10, 3))
89
+ for i, ax in enumerate(axes):
90
+ ax.imshow(digits.images[i], cmap='gray')
91
+ ax.set_title(f"Label: {digits.target[i]}")
92
+ ax.axis('off')
93
+ st.pyplot(fig)
94
+
95
+ # Create tabs
96
+ tab1, tab2, tab3 = st.tabs(["Naïve Bayes", "SVM", "KNN"])
97
+
98
+ with tab1:
99
+ st.subheader("Naïve Bayes Classifier")
100
+ st.write(f"Accuracy: {nb_accuracy:.4f}")
101
+ st.write("Classification Report:")
102
+ st.write(nb_report)
103
+ st.write("Confusion Matrix:")
104
+ fig, ax = plt.subplots()
105
+ sns.heatmap(nb_cm, annot=True, fmt='d', cmap='Blues', ax=ax)
106
+ st.pyplot(fig)
107
+
108
+ with tab2:
109
+ st.subheader("Support Vector Machine (SVM)")
110
+ st.write(f"Accuracy: {svm_accuracy:.4f}")
111
+ st.write("Classification Report:")
112
+ st.write(svm_report)
113
+ st.write("Confusion Matrix:")
114
+ fig, ax = plt.subplots()
115
+ sns.heatmap(svm_cm, annot=True, fmt='d', cmap='Blues', ax=ax)
116
+ st.pyplot(fig)
117
+
118
+ with tab3:
119
+ st.subheader("K-Nearest Neighbors (KNN)")
120
+ st.write(f"Accuracy: {knn_accuracy:.4f}")
121
+ st.write("Classification Report:")
122
+ st.write(knn_report)
123
+ st.write("Confusion Matrix:")
124
+ fig, ax = plt.subplots()
125
+ sns.heatmap(knn_cm, annot=True, fmt='d', cmap='Blues', ax=ax)
126
+ st.pyplot(fig)
127
+
128
+ # Comparison
129
+ st.write("## Classifier Comparison")
130
+ st.write("### Observations:")
131
+ st.write("- **Naïve Bayes** is fast but may struggle with complex patterns.")
132
+ st.write("- **SVM** performs well with a balance of accuracy and speed.")
133
+ st.write("- **KNN** can be effective but may be slower with large datasets.")
134
+
135
+ if __name__ == "__main__":
136
+ main()
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ streamlit
2
+ numpy
3
+ matplotlib
4
+ scikit-learn
5
+ seaborn