louiecerv commited on
Commit
3d81d0a
·
1 Parent(s): 89fe298

sync with remote

Browse files
Files changed (3) hide show
  1. app.py +71 -0
  2. data/overlapped.csv +360 -0
  3. requirements.txt +6 -0
app.py ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import pandas as pd
3
+ import numpy as np
4
+ import matplotlib.pyplot as plt
5
+ import seaborn as sns
6
+ from sklearn.model_selection import train_test_split
7
+ from sklearn.svm import SVC
8
+ from sklearn.metrics import confusion_matrix, classification_report
9
+
10
+ # Load the dataset
11
+ st.title("SVM Kernel Performance Comparison")
12
+
13
+ uploaded_file = 'data\overlapped.csv'
14
+ if uploaded_file:
15
+ df = pd.read_csv(uploaded_file)
16
+ st.write("### Data Preview")
17
+ st.dataframe(df)
18
+
19
+ # Assuming the last column is the target
20
+ X = df.iloc[:, :-1]
21
+ y = df.iloc[:, -1]
22
+
23
+ # Splitting dataset
24
+ X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=42)
25
+
26
+ # Plot overlapped clusters
27
+ st.write("### Cluster Visualization")
28
+ fig, ax = plt.subplots()
29
+ scatter = sns.scatterplot(x=X.iloc[:, 0], y=X.iloc[:, 1], hue=y, palette='coolwarm', alpha=0.6)
30
+ plt.xlabel("Feature 1")
31
+ plt.ylabel("Feature 2")
32
+ plt.title("Overlapped Clusters")
33
+ st.pyplot(fig)
34
+
35
+ # Function to train SVM and get performance metrics
36
+ def evaluate_svm(kernel_type):
37
+ model = SVC(kernel=kernel_type)
38
+ model.fit(X_train, y_train)
39
+ y_pred = model.predict(X_test)
40
+ cm = confusion_matrix(y_test, y_pred)
41
+ cr = classification_report(y_test, y_pred, output_dict=True)
42
+ return cm, cr
43
+
44
+ # Streamlit tabs
45
+ tab1, tab2, tab3 = st.tabs(["Linear Kernel", "Polynomial Kernel", "RBF Kernel"])
46
+
47
+ for tab, kernel in zip([tab1, tab2, tab3], ["linear", "poly", "rbf"]):
48
+ with tab:
49
+ st.write(f"## SVM with {kernel.capitalize()} Kernel")
50
+ cm, cr = evaluate_svm(kernel)
51
+
52
+ # Confusion matrix
53
+ st.write("### Confusion Matrix")
54
+ fig, ax = plt.subplots()
55
+ sns.heatmap(cm, annot=True, fmt='d', cmap='Blues')
56
+ plt.xlabel("Predicted")
57
+ plt.ylabel("Actual")
58
+ plt.title("Confusion Matrix")
59
+ st.pyplot(fig)
60
+
61
+ # Classification report
62
+ st.write("### Classification Report")
63
+ st.dataframe(pd.DataFrame(cr).transpose())
64
+
65
+ # Explanation
66
+ explanation = {
67
+ "linear": "The linear kernel performs well when the data is linearly separable.",
68
+ "poly": "The polynomial kernel captures more complex relationships but may overfit with high-degree polynomials.",
69
+ "rbf": "The RBF kernel is effective in capturing non-linear relationships in the data but requires careful tuning of parameters."
70
+ }
71
+ st.markdown(f"**Performance Analysis:** {explanation[kernel]}")
data/overlapped.csv ADDED
@@ -0,0 +1,360 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 4.86,4.87,0
2
+ 4.69,5.37,0
3
+ 3.82,5.71,0
4
+ 2.58,9.88,1
5
+ 8.3,5.36,1
6
+ 3.41,1.46,1
7
+ 4.4,5.77,0
8
+ 5.35,5.02,0
9
+ 4.77,5.03,0
10
+ 2.18,8.08,1
11
+ 7.41,5.87,1
12
+ 2.6,1.7,1
13
+ 6.39,6.26,0
14
+ 4.83,3.62,0
15
+ 5.82,4.95,0
16
+ 1.88,7.33,1
17
+ 7.93,6.52,1
18
+ 3.1,3.05,1
19
+ 4.44,5.09,0
20
+ 5.69,5.35,0
21
+ 5.11,5.76,0
22
+ 2.02,8.78,1
23
+ 8.9,4.67,1
24
+ 2.53,2.63,1
25
+ 5.56,4.9,0
26
+ 3.79,5.27,0
27
+ 5.45,4.65,0
28
+ 3.54,7.7,1
29
+ 6.44,4.94,1
30
+ 3.34,2.62,1
31
+ 5.24,6.22,0
32
+ 4.28,4.92,0
33
+ 4.75,5.88,0
34
+ 2.35,7.94,1
35
+ 7.86,4.65,1
36
+ 4.47,1.69,1
37
+ 4.19,5.53,0
38
+ 4.58,4.97,0
39
+ 4.81,4.95,0
40
+ 2.88,7.87,1
41
+ 7.21,5.37,1
42
+ 2.46,2.61,1
43
+ 5.77,5.61,0
44
+ 5.13,4.87,0
45
+ 5.9,2.26,0
46
+ 2.11,8.14,1
47
+ 8.68,5.61,1
48
+ 3.19,2.19,1
49
+ 4.73,6.74,0
50
+ 5.27,5.62,0
51
+ 4.62,5.77,0
52
+ 1.59,8.38,1
53
+ 6.98,4.72,1
54
+ 2.9,3.02,1
55
+ 4.81,5.5,0
56
+ 4.16,4.88,0
57
+ 4.34,3.7,0
58
+ 1.08,7.69,1
59
+ 8.13,5.58,1
60
+ 3.13,1.68,1
61
+ 5.42,4.57,0
62
+ 4.67,5.04,0
63
+ 4.49,5.13,0
64
+ 2.85,6.91,1
65
+ 8.68,5.95,1
66
+ 3.69,1.27,1
67
+ 5.07,5.19,0
68
+ 3.96,4.93,0
69
+ 4.27,4.45,0
70
+ 2.46,8.31,1
71
+ 7.81,4.54,1
72
+ 2.8,1.32,1
73
+ 6.03,5.69,0
74
+ 4.08,4.96,0
75
+ 5.67,5.57,0
76
+ 2.18,8.42,1
77
+ 8.22,5.77,1
78
+ 4.04,1.1,1
79
+ 5.48,4.92,0
80
+ 4.39,3.83,0
81
+ 4.75,4.6,0
82
+ 2.53,7.92,1
83
+ 7.31,5.45,1
84
+ 3.35,2.18,1
85
+ 4.55,4.87,0
86
+ 5.44,6.27,0
87
+ 5.4,3.88,0
88
+ 1.94,8.62,1
89
+ 7.15,6.57,1
90
+ 2.95,1.22,1
91
+ 4.55,5.29,0
92
+ 4.52,6.22,0
93
+ 5.03,4.13,0
94
+ 3.38,7.45,1
95
+ 8.86,4.62,1
96
+ 1.54,1.67,1
97
+ 5.25,5.05,0
98
+ 4.52,5.58,0
99
+ 4.59,4.85,0
100
+ 2.12,8.41,1
101
+ 7.14,4.97,1
102
+ 4.6,1.57,1
103
+ 4.95,4.45,0
104
+ 5.1,5.59,0
105
+ 5.21,6.03,0
106
+ 1.87,8.12,1
107
+ 8.17,4.38,1
108
+ 2.86,1.48,1
109
+ 4.57,4.99,0
110
+ 6.21,5.75,0
111
+ 4.47,4.39,0
112
+ 0.75,8.36,1
113
+ 8.45,5.37,1
114
+ 4.27,1.52,1
115
+ 5.21,6.52,0
116
+ 3.5,5.0,0
117
+ 4.17,5.17,0
118
+ 3.6,8.2,1
119
+ 8.3,5.82,1
120
+ 2.4,2.25,1
121
+ 4.38,5.05,0
122
+ 6.15,5.32,0
123
+ 4.53,5.54,0
124
+ 1.23,7.82,1
125
+ 7.08,5.55,1
126
+ 3.5,1.59,1
127
+ 6.21,4.02,0
128
+ 5.5,4.18,0
129
+ 6.29,4.73,0
130
+ 2.89,8.32,1
131
+ 8.81,6.08,1
132
+ 2.48,2.35,1
133
+ 5.51,3.91,0
134
+ 5.02,5.5,0
135
+ 4.02,4.76,0
136
+ 3.58,7.91,1
137
+ 9.32,4.43,1
138
+ 4.27,3.07,1
139
+ 3.64,5.24,0
140
+ 4.76,3.91,0
141
+ 4.69,5.08,0
142
+ 2.96,7.92,1
143
+ 8.17,5.8,1
144
+ 2.76,1.11,1
145
+ 5.34,5.59,0
146
+ 4.84,5.3,0
147
+ 5.63,5.81,0
148
+ 2.98,7.88,1
149
+ 8.27,4.42,1
150
+ 2.93,1.1,1
151
+ 4.62,4.89,0
152
+ 6.09,4.83,0
153
+ 6.26,4.67,0
154
+ 1.52,8.73,1
155
+ 7.78,5.37,1
156
+ 3.77,1.01,1
157
+ 5.21,6.67,0
158
+ 4.67,4.63,0
159
+ 5.81,4.44,0
160
+ 1.51,7.27,1
161
+ 8.27,4.91,1
162
+ 1.97,0.35,1
163
+ 4.4,5.18,0
164
+ 4.38,3.88,0
165
+ 3.96,5.07,0
166
+ 3.46,9.07,1
167
+ 10.18,5.05,1
168
+ 3.15,2.59,1
169
+ 6.66,4.82,0
170
+ 4.99,5.24,0
171
+ 4.34,4.83,0
172
+ 3.21,7.98,1
173
+ 7.64,4.45,1
174
+ 2.94,2.61,1
175
+ 5.25,5.1,0
176
+ 5.49,4.73,0
177
+ 4.93,5.29,0
178
+ 1.57,8.23,1
179
+ 8.27,5.66,1
180
+ 2.11,2.06,1
181
+ 4.47,4.81,0
182
+ 5.33,6.31,0
183
+ 5.69,4.62,0
184
+ 3.16,7.96,1
185
+ 9.3,6.11,1
186
+ 1.79,3.3,1
187
+ 6.04,5.69,0
188
+ 4.47,4.45,0
189
+ 4.06,5.61,0
190
+ 1.79,7.8,1
191
+ 7.58,5.98,1
192
+ 2.5,3.0,1
193
+ 5.42,5.03,0
194
+ 5.62,4.48,0
195
+ 4.99,5.25,0
196
+ 1.75,8.06,1
197
+ 8.1,5.6,1
198
+ 3.18,1.51,1
199
+ 4.64,5.15,0
200
+ 4.74,4.2,0
201
+ 3.53,6.52,0
202
+ 2.34,8.02,1
203
+ 8.5,4.71,1
204
+ 3.33,2.34,1
205
+ 5.64,5.4,0
206
+ 5.25,4.39,0
207
+ 4.54,4.31,0
208
+ 2.43,7.84,1
209
+ 8.06,5.49,1
210
+ 2.89,0.89,1
211
+ 5.81,5.18,0
212
+ 5.23,4.78,0
213
+ 5.49,5.27,0
214
+ 1.56,8.29,1
215
+ 6.87,5.5,1
216
+ 3.67,1.78,1
217
+ 5.7,4.76,0
218
+ 4.41,4.77,0
219
+ 4.63,6.21,0
220
+ 1.86,8.58,1
221
+ 8.29,6.11,1
222
+ 1.71,1.34,1
223
+ 5.24,5.23,0
224
+ 6.06,5.09,0
225
+ 5.42,5.45,0
226
+ 2.99,8.51,1
227
+ 8.89,4.74,1
228
+ 1.97,2.54,1
229
+ 5.32,5.03,0
230
+ 5.43,6.42,0
231
+ 6.34,5.9,0
232
+ 2.15,8.08,1
233
+ 9.23,5.55,1
234
+ 2.5,2.24,1
235
+ 5.6,5.12,0
236
+ 5.17,4.43,0
237
+ 5.17,6.01,0
238
+ 3.77,6.38,1
239
+ 7.69,4.22,1
240
+ 3.47,1.37,1
241
+ 5.3,4.83,0
242
+ 5.69,5.2,0
243
+ 5.09,5.34,0
244
+ 2.11,8.13,1
245
+ 7.92,6.32,1
246
+ 3.39,1.44,1
247
+ 5.52,4.62,0
248
+ 4.37,5.36,0
249
+ 4.49,4.4,0
250
+ 2.79,9.68,1
251
+ 8.96,5.35,1
252
+ 3.43,2.07,1
253
+ 5.01,5.71,0
254
+ 4.76,4.58,0
255
+ 5.94,4.77,0
256
+ 2.5,7.6,1
257
+ 6.49,4.83,1
258
+ 4.09,2.58,1
259
+ 5.83,6.57,0
260
+ 3.75,4.82,0
261
+ 4.78,5.36,0
262
+ 1.5,9.46,1
263
+ 8.33,5.42,1
264
+ 3.07,2.51,1
265
+ 4.23,5.78,0
266
+ 4.7,4.86,0
267
+ 6.1,5.83,0
268
+ 2.76,8.45,1
269
+ 8.38,6.21,1
270
+ 3.15,2.1,1
271
+ 4.7,5.61,0
272
+ 4.77,4.82,0
273
+ 5.39,6.2,0
274
+ 2.42,8.57,1
275
+ 7.59,5.78,1
276
+ 3.78,1.37,1
277
+ 5.25,5.49,0
278
+ 3.74,3.51,0
279
+ 3.9,5.09,0
280
+ 1.98,8.97,1
281
+ 7.07,6.61,1
282
+ 3.71,1.88,1
283
+ 4.9,3.93,0
284
+ 4.64,5.2,0
285
+ 4.54,4.63,0
286
+ 2.52,8.14,1
287
+ 8.54,5.15,1
288
+ 2.7,0.43,1
289
+ 4.68,5.76,0
290
+ 5.84,4.63,0
291
+ 6.2,4.75,0
292
+ 2.52,7.32,1
293
+ 7.9,5.05,1
294
+ 2.98,1.86,1
295
+ 5.11,6.02,0
296
+ 5.33,5.64,0
297
+ 5.37,5.79,0
298
+ 1.6,8.85,1
299
+ 9.08,5.46,1
300
+ 3.85,3.76,1
301
+ 6.15,5.31,0
302
+ 5.36,4.23,0
303
+ 5.81,5.36,0
304
+ 3.49,7.84,1
305
+ 8.95,5.95,1
306
+ 2.99,1.77,1
307
+ 5.31,4.85,0
308
+ 5.49,4.99,0
309
+ 4.31,5.01,0
310
+ 3.71,8.84,1
311
+ 8.25,4.83,1
312
+ 2.91,2.53,1
313
+ 5.83,7.03,0
314
+ 3.32,5.09,0
315
+ 4.66,5.09,0
316
+ 3.02,7.22,1
317
+ 7.24,4.67,1
318
+ 3.09,2.43,1
319
+ 4.75,5.27,0
320
+ 5.18,4.2,0
321
+ 5.01,4.47,0
322
+ 2.02,7.92,1
323
+ 8.05,6.65,1
324
+ 2.55,2.65,1
325
+ 5.51,5.19,0
326
+ 4.28,3.82,0
327
+ 5.71,5.9,0
328
+ 1.89,9.15,1
329
+ 7.14,5.64,1
330
+ 3.42,1.41,1
331
+ 5.14,6.11,0
332
+ 4.54,6.96,0
333
+ 4.29,5.76,0
334
+ 2.24,7.66,1
335
+ 7.78,5.83,1
336
+ 2.65,1.09,1
337
+ 5.29,5.68,0
338
+ 3.46,4.89,0
339
+ 4.4,5.91,0
340
+ 2.71,8.98,1
341
+ 7.02,6.16,1
342
+ 3.48,1.63,1
343
+ 6.11,5.97,0
344
+ 4.78,4.69,0
345
+ 5.35,5.65,0
346
+ 1.78,8.26,1
347
+ 8.04,5.83,1
348
+ 3.24,1.55,1
349
+ 6.99,4.63,0
350
+ 4.26,5.47,0
351
+ 4.75,5.97,0
352
+ 2.49,8.73,1
353
+ 7.55,5.34,1
354
+ 2.18,2.04,1
355
+ 4.89,5.2,0
356
+ 4.11,4.8,0
357
+ 5.13,3.6,0
358
+ 5.58,7.17,1
359
+ 8.62,6.02,1
360
+ 3.05,2.11,1
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ streamlit
2
+ pandas
3
+ numpy
4
+ matplotlib
5
+ seaborn
6
+ scikit-learn