itshiroto commited on
Commit
13ae495
·
verified ·
1 Parent(s): 85245da

umn-cyber/indobert-hoax-detection

Browse files
Files changed (5) hide show
  1. README.md +4 -4
  2. all_results.json +11 -11
  3. eval_results.json +8 -8
  4. test_results.json +7 -7
  5. train_results.json +4 -4
README.md CHANGED
@@ -21,10 +21,10 @@ should probably proofread and complete it, then remove this comment. -->
21
 
22
  This model is a fine-tuned version of [indobenchmark/indobert-base-p1](https://huggingface.co/indobenchmark/indobert-base-p1) on an unknown dataset.
23
  It achieves the following results on the evaluation set:
24
- - Loss: 0.0767
25
- - Accuracy: 0.9885
26
- - F1: 0.9879
27
- - Precision: 0.9886
28
  - Recall: 0.9872
29
 
30
  ## Model description
 
21
 
22
  This model is a fine-tuned version of [indobenchmark/indobert-base-p1](https://huggingface.co/indobenchmark/indobert-base-p1) on an unknown dataset.
23
  It achieves the following results on the evaluation set:
24
+ - Loss: 0.0457
25
+ - Accuracy: 0.9875
26
+ - F1: 0.9868
27
+ - Precision: 0.9865
28
  - Recall: 0.9872
29
 
30
  ## Model description
all_results.json CHANGED
@@ -1,16 +1,16 @@
1
  {
2
  "epoch": 5.0,
3
- "eval_accuracy": 0.9861299052774019,
4
- "eval_f1": 0.9854455094071708,
5
- "eval_loss": 0.045191410928964615,
6
- "eval_precision": 0.9836995038979447,
7
  "eval_recall": 0.9871977240398293,
8
- "eval_runtime": 82.3688,
9
- "eval_samples_per_second": 35.887,
10
- "eval_steps_per_second": 1.129,
11
  "total_flos": 3.11005644392448e+16,
12
- "train_loss": 0.03944617702703827,
13
- "train_runtime": 23402.751,
14
- "train_samples_per_second": 10.101,
15
- "train_steps_per_second": 0.316
16
  }
 
1
  {
2
  "epoch": 5.0,
3
+ "eval_accuracy": 0.9874830852503383,
4
+ "eval_f1": 0.9868467827941699,
5
+ "eval_loss": 0.04571225121617317,
6
+ "eval_precision": 0.9864960909737029,
7
  "eval_recall": 0.9871977240398293,
8
+ "eval_runtime": 81.5092,
9
+ "eval_samples_per_second": 36.266,
10
+ "eval_steps_per_second": 1.141,
11
  "total_flos": 3.11005644392448e+16,
12
+ "train_loss": 0.03391138697509353,
13
+ "train_runtime": 20063.9202,
14
+ "train_samples_per_second": 5.891,
15
+ "train_steps_per_second": 0.184
16
  }
eval_results.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
  "epoch": 5.0,
3
- "eval_accuracy": 0.9868020304568528,
4
- "eval_f1": 0.9861751152073732,
5
- "eval_loss": 0.04318568482995033,
6
- "eval_precision": 0.9830388692579505,
7
- "eval_recall": 0.9893314366998578,
8
- "eval_runtime": 82.3694,
9
- "eval_samples_per_second": 35.875,
10
- "eval_steps_per_second": 1.129
11
  }
 
1
  {
2
  "epoch": 5.0,
3
+ "eval_accuracy": 0.9895093062605753,
4
+ "eval_f1": 0.9889953851615193,
5
+ "eval_loss": 0.039101775735616684,
6
+ "eval_precision": 0.9872430900070872,
7
+ "eval_recall": 0.9907539118065434,
8
+ "eval_runtime": 81.401,
9
+ "eval_samples_per_second": 36.302,
10
+ "eval_steps_per_second": 1.142
11
  }
test_results.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
  "epoch": 5.0,
3
- "eval_accuracy": 0.9861299052774019,
4
- "eval_f1": 0.9854455094071708,
5
- "eval_loss": 0.045191410928964615,
6
- "eval_precision": 0.9836995038979447,
7
  "eval_recall": 0.9871977240398293,
8
- "eval_runtime": 82.3688,
9
- "eval_samples_per_second": 35.887,
10
- "eval_steps_per_second": 1.129
11
  }
 
1
  {
2
  "epoch": 5.0,
3
+ "eval_accuracy": 0.9874830852503383,
4
+ "eval_f1": 0.9868467827941699,
5
+ "eval_loss": 0.04571225121617317,
6
+ "eval_precision": 0.9864960909737029,
7
  "eval_recall": 0.9871977240398293,
8
+ "eval_runtime": 81.5092,
9
+ "eval_samples_per_second": 36.266,
10
+ "eval_steps_per_second": 1.141
11
  }
train_results.json CHANGED
@@ -1,8 +1,8 @@
1
  {
2
  "epoch": 5.0,
3
  "total_flos": 3.11005644392448e+16,
4
- "train_loss": 0.03944617702703827,
5
- "train_runtime": 23402.751,
6
- "train_samples_per_second": 10.101,
7
- "train_steps_per_second": 0.316
8
  }
 
1
  {
2
  "epoch": 5.0,
3
  "total_flos": 3.11005644392448e+16,
4
+ "train_loss": 0.03391138697509353,
5
+ "train_runtime": 20063.9202,
6
+ "train_samples_per_second": 5.891,
7
+ "train_steps_per_second": 0.184
8
  }