Update README.md
Browse files
README.md
CHANGED
@@ -49,7 +49,7 @@ You can find more about the dataset [here](https://users.cs.northwestern.edu/~yg
|
|
49 |
|
50 |
### Training Parameters
|
51 |
- **Batch Size**: 128
|
52 |
-
- **Epochs**:
|
53 |
|
54 |
### Loss Function
|
55 |
- Binary Cross-Entropy Loss
|
@@ -63,35 +63,35 @@ You can find more about the dataset [here](https://users.cs.northwestern.edu/~yg
|
|
63 |
|
64 |
Below is a summary of the model's performance over the training process:
|
65 |
|
66 |
-
| FN | FP | TN | TP | accuracy | loss | val_FN | val_FP | val_TN | val_TP | val_accuracy | val_loss |
|
67 |
-
|
68 |
-
| 187 | 140 | 499 | 559 | 0.763899 | 1.93435 | 19 | 2 | 169 | 156 | 0.939306 | 0.378412 |
|
69 |
-
| 16 | 35 | 604 | 730 | 0.963177 | 0.242954 | 6 | 3 | 168 | 169 | 0.973988 | 0.186523 |
|
70 |
-
| 29 | 19 | 620 | 717 | 0.965343 | 0.174301 | 3 | 12 | 159 | 172 | 0.956647 | 0.149353 |
|
71 |
-
| 17 | 26 | 613 | 729 | 0.968953 | 0.116907 | 3 | 7 | 164 | 172 | 0.971098 | 0.0973678 |
|
72 |
-
| 14 | 17 | 622 | 732 | 0.977617 | 0.0807728 | 2 | 12 | 159 | 173 | 0.959538 | 0.0991694 |
|
73 |
-
| 13 | 18 | 621 | 733 | 0.977617 | 0.0713564 | 3 | 6 | 165 | 172 | 0.973988 | 0.0604228 |
|
74 |
-
| 10 | 17 | 622 | 736 | 0.980505 | 0.0604733 | 3 | 5 | 166 | 172 | 0.976879 | 0.0584633 |
|
75 |
-
| 9 | 13 | 626 | 737 | 0.984116 | 0.0478682 | 4 | 4 | 167 | 171 | 0.976879 | 0.0745383 |
|
76 |
-
| 5 | 13 | 626 | 741 | 0.987004 | 0.0376507 | 3 | 7 | 164 | 172 | 0.971098 | 0.0773135 |
|
77 |
-
| 7 | 6 | 633 | 739 | 0.990614 | 0.0248621 | 3 | 2 | 169 | 172 | 0.985549 | 0.0628484 |
|
78 |
-
| 7 | 10 | 629 | 739 | 0.987726 | 0.0298408 | 2 | 2 | 169 | 173 | 0.988439 | 0.0531744 |
|
79 |
-
| 4 | 10 | 629 | 742 | 0.989892 | 0.0242986 | 3 | 2 | 169 | 172 | 0.985549 | 0.0675473 |
|
80 |
-
| 6 | 7 | 632 | 740 | 0.990614 | 0.0326418 | 2 | 3 | 168 | 173 | 0.985549 | 0.0671533 |
|
81 |
-
| 6 | 9 | 630 | 740 | 0.98917 | 0.0279278 | 2 | 5 | 166 | 173 | 0.979769 | 0.06897 |
|
82 |
-
| 7 | 7 | 632 | 739 | 0.989892 | 0.0244297 | 2 | 3 | 168 | 173 | 0.985549 | 0.0550619 |
|
83 |
-
| 5 | 4 | 635 | 741 | 0.993502 | 0.0228877 | 3 | 2 | 169 | 172 | 0.985549 | 0.0558347 |
|
84 |
-
| 4 | 10 | 629 | 742 | 0.989892 | 0.0277857 | 4 | 0 | 171 | 171 | 0.988439 | 0.0401906 |
|
85 |
-
| 2 | 7 | 632 | 744 | 0.993502 | 0.0193213 | 9 | 1 | 170 | 166 | 0.971098 | 0.0959367 |
|
86 |
-
| 7 | 3 | 636 | 739 | 0.99278 | 0.0151094 | 4 | 6 | 165 | 171 | 0.971098 | 0.0873069 |
|
87 |
-
| 7 | 3 | 636 | 739 | 0.99278 | 0.017344 | 1 | 6 | 165 | 174 | 0.979769 | 0.0964724 |
|
88 |
-
| 5 | 4 | 635 | 741 | 0.993502 | 0.024251 | 1 | 4 | 167 | 174 | 0.985549 | 0.0580115 |
|
89 |
-
| 3 | 9 | 630 | 743 | 0.991336 | 0.0207464 | 1 | 3 | 168 | 174 | 0.988439 | 0.0510422 |
|
90 |
-
| 2 | 6 | 633 | 744 | 0.994224 | 0.0122992 | 1 | 1 | 170 | 174 | 0.99422 | 0.0439666 |
|
91 |
-
| 3 | 2 | 637 | 743 | 0.99639 | 0.0105987 | 4 | 2 | 169 | 171 | 0.982659 | 0.0502425 |
|
92 |
-
| 3 | 6 | 633 | 743 | 0.993502 | 0.0164827 | 6 | 1 | 170 | 169 | 0.979769 | 0.0865423 |
|
93 |
-
| 5 | 7 | 632 | 741 | 0.991336 | 0.0245302 | 6 | 3 | 168 | 169 | 0.973988 | 0.0918282 |
|
94 |
-
| 9 | 8 | 631 | 737 | 0.987726 | 0.0296158 | 5 | 1 | 170 | 170 | 0.982659 | 0.0991676 |
|
95 |
|
96 |
---
|
97 |
|
|
|
49 |
|
50 |
### Training Parameters
|
51 |
- **Batch Size**: 128
|
52 |
+
- **Epochs**: 26 (early stopping)
|
53 |
|
54 |
### Loss Function
|
55 |
- Binary Cross-Entropy Loss
|
|
|
63 |
|
64 |
Below is a summary of the model's performance over the training process:
|
65 |
|
66 |
+
| | FN | FP | TN | TP | accuracy | loss | val_FN | val_FP | val_TN | val_TP | val_accuracy | val_loss |
|
67 |
+
|---:|-----:|-----:|-----:|-----:|-----------:|----------:|---------:|---------:|---------:|---------:|---------------:|-----------:|
|
68 |
+
| 0 | 187 | 140 | 499 | 559 | 0.763899 | 1.93435 | 19 | 2 | 169 | 156 | 0.939306 | 0.378412 |
|
69 |
+
| 1 | 16 | 35 | 604 | 730 | 0.963177 | 0.242954 | 6 | 3 | 168 | 169 | 0.973988 | 0.186523 |
|
70 |
+
| 2 | 29 | 19 | 620 | 717 | 0.965343 | 0.174301 | 3 | 12 | 159 | 172 | 0.956647 | 0.149353 |
|
71 |
+
| 3 | 17 | 26 | 613 | 729 | 0.968953 | 0.116907 | 3 | 7 | 164 | 172 | 0.971098 | 0.0973678 |
|
72 |
+
| 4 | 14 | 17 | 622 | 732 | 0.977617 | 0.0807728 | 2 | 12 | 159 | 173 | 0.959538 | 0.0991694 |
|
73 |
+
| 5 | 13 | 18 | 621 | 733 | 0.977617 | 0.0713564 | 3 | 6 | 165 | 172 | 0.973988 | 0.0604228 |
|
74 |
+
| 6 | 10 | 17 | 622 | 736 | 0.980505 | 0.0604733 | 3 | 5 | 166 | 172 | 0.976879 | 0.0584633 |
|
75 |
+
| 7 | 9 | 13 | 626 | 737 | 0.984116 | 0.0478682 | 4 | 4 | 167 | 171 | 0.976879 | 0.0745383 |
|
76 |
+
| 8 | 5 | 13 | 626 | 741 | 0.987004 | 0.0376507 | 3 | 7 | 164 | 172 | 0.971098 | 0.0773135 |
|
77 |
+
| 9 | 7 | 6 | 633 | 739 | 0.990614 | 0.0248621 | 3 | 2 | 169 | 172 | 0.985549 | 0.0628484 |
|
78 |
+
| 10 | 7 | 10 | 629 | 739 | 0.987726 | 0.0298408 | 2 | 2 | 169 | 173 | 0.988439 | 0.0531744 |
|
79 |
+
| 11 | 4 | 10 | 629 | 742 | 0.989892 | 0.0242986 | 3 | 2 | 169 | 172 | 0.985549 | 0.0675473 |
|
80 |
+
| 12 | 6 | 7 | 632 | 740 | 0.990614 | 0.0326418 | 2 | 3 | 168 | 173 | 0.985549 | 0.0671533 |
|
81 |
+
| 13 | 6 | 9 | 630 | 740 | 0.98917 | 0.0279278 | 2 | 5 | 166 | 173 | 0.979769 | 0.06897 |
|
82 |
+
| 14 | 7 | 7 | 632 | 739 | 0.989892 | 0.0244297 | 2 | 3 | 168 | 173 | 0.985549 | 0.0550619 |
|
83 |
+
| 15 | 5 | 4 | 635 | 741 | 0.993502 | 0.0228877 | 3 | 2 | 169 | 172 | 0.985549 | 0.0558347 |
|
84 |
+
| 16 | 4 | 10 | 629 | 742 | 0.989892 | 0.0277857 | 4 | 0 | 171 | 171 | 0.988439 | 0.0401906 |
|
85 |
+
| 17 | 2 | 7 | 632 | 744 | 0.993502 | 0.0193213 | 9 | 1 | 170 | 166 | 0.971098 | 0.0959367 |
|
86 |
+
| 18 | 7 | 3 | 636 | 739 | 0.99278 | 0.0151094 | 4 | 6 | 165 | 171 | 0.971098 | 0.0873069 |
|
87 |
+
| 19 | 7 | 3 | 636 | 739 | 0.99278 | 0.017344 | 1 | 6 | 165 | 174 | 0.979769 | 0.0964724 |
|
88 |
+
| 20 | 5 | 4 | 635 | 741 | 0.993502 | 0.024251 | 1 | 4 | 167 | 174 | 0.985549 | 0.0580115 |
|
89 |
+
| 21 | 3 | 9 | 630 | 743 | 0.991336 | 0.0207464 | 1 | 3 | 168 | 174 | 0.988439 | 0.0510422 |
|
90 |
+
| 22 | 2 | 6 | 633 | 744 | 0.994224 | 0.0122992 | 1 | 1 | 170 | 174 | 0.99422 | 0.0439666 |
|
91 |
+
| 23 | 3 | 2 | 637 | 743 | 0.99639 | 0.0105987 | 4 | 2 | 169 | 171 | 0.982659 | 0.0502425 |
|
92 |
+
| 24 | 3 | 6 | 633 | 743 | 0.993502 | 0.0164827 | 6 | 1 | 170 | 169 | 0.979769 | 0.0865423 |
|
93 |
+
| 25 | 5 | 7 | 632 | 741 | 0.991336 | 0.0245302 | 6 | 3 | 168 | 169 | 0.973988 | 0.0918282 |
|
94 |
+
| 26 | 9 | 8 | 631 | 737 | 0.987726 | 0.0296158 | 5 | 1 | 170 | 170 | 0.982659 | 0.0991676 |
|
95 |
|
96 |
---
|
97 |
|