ethangclark commited on
Commit
b877d67
·
verified ·
1 Parent(s): e5647af

End of training

Browse files
README.md CHANGED
@@ -15,14 +15,14 @@ should probably proofread and complete it, then remove this comment. -->
15
 
16
  This model is a fine-tuned version of [microsoft/layoutlm-base-uncased](https://huggingface.co/microsoft/layoutlm-base-uncased) on the None dataset.
17
  It achieves the following results on the evaluation set:
18
- - Loss: 0.5509
19
- - : {'precision': 0.3076923076923077, 'recall': 0.36363636363636365, 'f1': 0.33333333333333337, 'number': 22}
20
- - C: {'precision': 0.26666666666666666, 'recall': 0.34285714285714286, 'f1': 0.3, 'number': 35}
21
- - H: {'precision': 0.4074074074074074, 'recall': 0.4230769230769231, 'f1': 0.4150943396226415, 'number': 26}
22
- - Overall Precision: 0.3163
23
- - Overall Recall: 0.3735
24
- - Overall F1: 0.3425
25
- - Overall Accuracy: 0.8835
26
 
27
  ## Model description
28
 
@@ -51,23 +51,23 @@ The following hyperparameters were used during training:
51
 
52
  ### Training results
53
 
54
- | Training Loss | Epoch | Step | Validation Loss | | C | H | Overall Precision | Overall Recall | Overall F1 | Overall Accuracy |
55
- |:-------------:|:-----:|:----:|:---------------:|:-------------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------:|:----------------------------------------------------------------------------------------------------------:|:-----------------:|:--------------:|:----------:|:----------------:|
56
- | 1.2408 | 1.0 | 2 | 0.9939 | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 22} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 35} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | 0.0 | 0.0 | 0.0 | 0.8182 |
57
- | 0.6685 | 2.0 | 4 | 0.8477 | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 22} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 35} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | 0.0 | 0.0 | 0.0 | 0.8182 |
58
- | 0.5229 | 3.0 | 6 | 0.7519 | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 22} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 35} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | 0.0 | 0.0 | 0.0 | 0.8182 |
59
- | 0.4147 | 4.0 | 8 | 0.6701 | {'precision': 0.045454545454545456, 'recall': 0.045454545454545456, 'f1': 0.045454545454545456, 'number': 22} | {'precision': 0.045454545454545456, 'recall': 0.02857142857142857, 'f1': 0.03508771929824561, 'number': 35} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | 0.0455 | 0.0241 | 0.0315 | 0.8464 |
60
- | 0.2847 | 5.0 | 10 | 0.6154 | {'precision': 0.125, 'recall': 0.13636363636363635, 'f1': 0.13043478260869565, 'number': 22} | {'precision': 0.041666666666666664, 'recall': 0.02857142857142857, 'f1': 0.03389830508474576, 'number': 35} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | 0.0833 | 0.0482 | 0.0611 | 0.8643 |
61
- | 0.2597 | 6.0 | 12 | 0.5752 | {'precision': 0.034482758620689655, 'recall': 0.045454545454545456, 'f1': 0.0392156862745098, 'number': 22} | {'precision': 0.06896551724137931, 'recall': 0.05714285714285714, 'f1': 0.0625, 'number': 35} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | 0.0517 | 0.0361 | 0.0426 | 0.8668 |
62
- | 0.3162 | 7.0 | 14 | 0.6510 | {'precision': 0.15789473684210525, 'recall': 0.13636363636363635, 'f1': 0.14634146341463414, 'number': 22} | {'precision': 0.09523809523809523, 'recall': 0.05714285714285714, 'f1': 0.07142857142857142, 'number': 35} | {'precision': 0.5, 'recall': 0.038461538461538464, 'f1': 0.07142857142857144, 'number': 26} | 0.1429 | 0.0723 | 0.0960 | 0.8399 |
63
- | 0.3601 | 8.0 | 16 | 0.6555 | {'precision': 0.13636363636363635, 'recall': 0.13636363636363635, 'f1': 0.13636363636363635, 'number': 22} | {'precision': 0.07692307692307693, 'recall': 0.05714285714285714, 'f1': 0.06557377049180328, 'number': 35} | {'precision': 0.6, 'recall': 0.11538461538461539, 'f1': 0.1935483870967742, 'number': 26} | 0.1509 | 0.0964 | 0.1176 | 0.8438 |
64
- | 0.3773 | 9.0 | 18 | 0.5827 | {'precision': 0.125, 'recall': 0.13636363636363635, 'f1': 0.13043478260869565, 'number': 22} | {'precision': 0.125, 'recall': 0.11428571428571428, 'f1': 0.11940298507462688, 'number': 35} | {'precision': 0.6363636363636364, 'recall': 0.2692307692307692, 'f1': 0.37837837837837834, 'number': 26} | 0.2090 | 0.1687 | 0.1867 | 0.8681 |
65
- | 0.2094 | 10.0 | 20 | 0.5452 | {'precision': 0.25, 'recall': 0.2727272727272727, 'f1': 0.2608695652173913, 'number': 22} | {'precision': 0.2571428571428571, 'recall': 0.2571428571428571, 'f1': 0.2571428571428571, 'number': 35} | {'precision': 0.625, 'recall': 0.38461538461538464, 'f1': 0.4761904761904762, 'number': 26} | 0.3333 | 0.3012 | 0.3165 | 0.8899 |
66
- | 0.1932 | 11.0 | 22 | 0.5436 | {'precision': 0.23076923076923078, 'recall': 0.2727272727272727, 'f1': 0.24999999999999994, 'number': 22} | {'precision': 0.23076923076923078, 'recall': 0.2571428571428571, 'f1': 0.24324324324324323, 'number': 35} | {'precision': 0.47368421052631576, 'recall': 0.34615384615384615, 'f1': 0.39999999999999997, 'number': 26} | 0.2857 | 0.2892 | 0.2874 | 0.8848 |
67
- | 0.1774 | 12.0 | 24 | 0.5541 | {'precision': 0.2916666666666667, 'recall': 0.3181818181818182, 'f1': 0.30434782608695654, 'number': 22} | {'precision': 0.2682926829268293, 'recall': 0.3142857142857143, 'f1': 0.2894736842105263, 'number': 35} | {'precision': 0.4782608695652174, 'recall': 0.4230769230769231, 'f1': 0.44897959183673475, 'number': 26} | 0.3295 | 0.3494 | 0.3392 | 0.8835 |
68
- | 0.159 | 13.0 | 26 | 0.5567 | {'precision': 0.32, 'recall': 0.36363636363636365, 'f1': 0.3404255319148936, 'number': 22} | {'precision': 0.2857142857142857, 'recall': 0.34285714285714286, 'f1': 0.3116883116883117, 'number': 35} | {'precision': 0.5217391304347826, 'recall': 0.46153846153846156, 'f1': 0.4897959183673469, 'number': 26} | 0.3556 | 0.3855 | 0.3699 | 0.8835 |
69
- | 0.1623 | 14.0 | 28 | 0.5543 | {'precision': 0.32, 'recall': 0.36363636363636365, 'f1': 0.3404255319148936, 'number': 22} | {'precision': 0.3023255813953488, 'recall': 0.37142857142857144, 'f1': 0.3333333333333333, 'number': 35} | {'precision': 0.46153846153846156, 'recall': 0.46153846153846156, 'f1': 0.46153846153846156, 'number': 26} | 0.3511 | 0.3976 | 0.3729 | 0.8860 |
70
- | 0.2053 | 15.0 | 30 | 0.5509 | {'precision': 0.3076923076923077, 'recall': 0.36363636363636365, 'f1': 0.33333333333333337, 'number': 22} | {'precision': 0.26666666666666666, 'recall': 0.34285714285714286, 'f1': 0.3, 'number': 35} | {'precision': 0.4074074074074074, 'recall': 0.4230769230769231, 'f1': 0.4150943396226415, 'number': 26} | 0.3163 | 0.3735 | 0.3425 | 0.8835 |
71
 
72
 
73
  ### Framework versions
 
15
 
16
  This model is a fine-tuned version of [microsoft/layoutlm-base-uncased](https://huggingface.co/microsoft/layoutlm-base-uncased) on the None dataset.
17
  It achieves the following results on the evaluation set:
18
+ - Loss: 0.5985
19
+ - : {'precision': 0.17391304347826086, 'recall': 0.18181818181818182, 'f1': 0.17777777777777776, 'number': 22}
20
+ - C: {'precision': 0.20408163265306123, 'recall': 0.2857142857142857, 'f1': 0.23809523809523808, 'number': 35}
21
+ - H: {'precision': 0.41935483870967744, 'recall': 0.5, 'f1': 0.45614035087719296, 'number': 26}
22
+ - Overall Precision: 0.2621
23
+ - Overall Recall: 0.3253
24
+ - Overall F1: 0.2903
25
+ - Overall Accuracy: 0.8694
26
 
27
  ## Model description
28
 
 
51
 
52
  ### Training results
53
 
54
+ | Training Loss | Epoch | Step | Validation Loss | | C | H | Overall Precision | Overall Recall | Overall F1 | Overall Accuracy |
55
+ |:-------------:|:-----:|:----:|:---------------:|:-------------------------------------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------------------:|:---------------------------------------------------------------------------------------------------------:|:-----------------:|:--------------:|:----------:|:----------------:|
56
+ | 1.3414 | 1.0 | 2 | 0.9941 | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 22} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 35} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | 0.0 | 0.0 | 0.0 | 0.8182 |
57
+ | 0.6808 | 2.0 | 4 | 0.8831 | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 22} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 35} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | 0.0 | 0.0 | 0.0 | 0.8182 |
58
+ | 0.5134 | 3.0 | 6 | 0.7517 | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 22} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 35} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | 0.0 | 0.0 | 0.0 | 0.8182 |
59
+ | 0.4175 | 4.0 | 8 | 0.6992 | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 22} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 35} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | 0.0 | 0.0 | 0.0 | 0.8182 |
60
+ | 0.3048 | 5.0 | 10 | 0.6476 | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 22} | {'precision': 0.13333333333333333, 'recall': 0.05714285714285714, 'f1': 0.08, 'number': 35} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | 0.0667 | 0.0241 | 0.0354 | 0.8310 |
61
+ | 0.2767 | 6.0 | 12 | 0.6375 | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 22} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 35} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | 0.0 | 0.0 | 0.0 | 0.8399 |
62
+ | 0.3514 | 7.0 | 14 | 0.6033 | {'precision': 0.047619047619047616, 'recall': 0.045454545454545456, 'f1': 0.046511627906976744, 'number': 22} | {'precision': 0.047619047619047616, 'recall': 0.02857142857142857, 'f1': 0.03571428571428571, 'number': 35} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | 0.0476 | 0.0241 | 0.032 | 0.8656 |
63
+ | 0.3766 | 8.0 | 16 | 0.6462 | {'precision': 0.13333333333333333, 'recall': 0.09090909090909091, 'f1': 0.10810810810810811, 'number': 22} | {'precision': 0.06666666666666667, 'recall': 0.02857142857142857, 'f1': 0.04, 'number': 35} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | 0.1 | 0.0361 | 0.0531 | 0.8271 |
64
+ | 0.4447 | 9.0 | 18 | 0.6570 | {'precision': 0.06666666666666667, 'recall': 0.045454545454545456, 'f1': 0.05405405405405406, 'number': 22} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 35} | {'precision': 0.0, 'recall': 0.0, 'f1': 0.0, 'number': 26} | 0.0333 | 0.0120 | 0.0177 | 0.8182 |
65
+ | 0.2359 | 10.0 | 20 | 0.6297 | {'precision': 0.15, 'recall': 0.13636363636363635, 'f1': 0.14285714285714282, 'number': 22} | {'precision': 0.08333333333333333, 'recall': 0.05714285714285714, 'f1': 0.06779661016949153, 'number': 35} | {'precision': 0.5, 'recall': 0.07692307692307693, 'f1': 0.13333333333333336, 'number': 26} | 0.1458 | 0.0843 | 0.1069 | 0.8438 |
66
+ | 0.2136 | 11.0 | 22 | 0.6072 | {'precision': 0.20833333333333334, 'recall': 0.22727272727272727, 'f1': 0.21739130434782608, 'number': 22} | {'precision': 0.16666666666666666, 'recall': 0.17142857142857143, 'f1': 0.16901408450704225, 'number': 35} | {'precision': 0.42857142857142855, 'recall': 0.23076923076923078, 'f1': 0.3, 'number': 26} | 0.2297 | 0.2048 | 0.2166 | 0.8617 |
67
+ | 0.2114 | 12.0 | 24 | 0.5978 | {'precision': 0.17391304347826086, 'recall': 0.18181818181818182, 'f1': 0.17777777777777776, 'number': 22} | {'precision': 0.1951219512195122, 'recall': 0.22857142857142856, 'f1': 0.21052631578947367, 'number': 35} | {'precision': 0.4090909090909091, 'recall': 0.34615384615384615, 'f1': 0.37500000000000006, 'number': 26} | 0.2442 | 0.2530 | 0.2485 | 0.8656 |
68
+ | 0.1826 | 13.0 | 26 | 0.5982 | {'precision': 0.17391304347826086, 'recall': 0.18181818181818182, 'f1': 0.17777777777777776, 'number': 22} | {'precision': 0.18181818181818182, 'recall': 0.22857142857142856, 'f1': 0.20253164556962025, 'number': 35} | {'precision': 0.4230769230769231, 'recall': 0.4230769230769231, 'f1': 0.4230769230769231, 'number': 26} | 0.2473 | 0.2771 | 0.2614 | 0.8668 |
69
+ | 0.1861 | 14.0 | 28 | 0.5983 | {'precision': 0.17391304347826086, 'recall': 0.18181818181818182, 'f1': 0.17777777777777776, 'number': 22} | {'precision': 0.21739130434782608, 'recall': 0.2857142857142857, 'f1': 0.24691358024691357, 'number': 35} | {'precision': 0.4642857142857143, 'recall': 0.5, 'f1': 0.4814814814814815, 'number': 26} | 0.2784 | 0.3253 | 0.3000 | 0.8707 |
70
+ | 0.2442 | 15.0 | 30 | 0.5985 | {'precision': 0.17391304347826086, 'recall': 0.18181818181818182, 'f1': 0.17777777777777776, 'number': 22} | {'precision': 0.20408163265306123, 'recall': 0.2857142857142857, 'f1': 0.23809523809523808, 'number': 35} | {'precision': 0.41935483870967744, 'recall': 0.5, 'f1': 0.45614035087719296, 'number': 26} | 0.2621 | 0.3253 | 0.2903 | 0.8694 |
71
 
72
 
73
  ### Framework versions
logs/events.out.tfevents.1711206742.ethanmbp.lan.34423.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:40aa2d31a9f37b9506d52442893b90490c2f4f3fa6118da5aa8556dbd373a762
3
- size 5414
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4dd1690302c8cf0e618e67ea0050ba7cdade6a13b84f09d323765666fa11464f
3
+ size 15590
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ff381db291e27c638b6e3dd7ab1bcc4eb205c5625cb54d72c19718d2c6bf9cd4
3
  size 450552060
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:099bb93cfe5e201ed12dac7973bee497c7c6d011e6e4135fa217f500c0fcfcb9
3
  size 450552060
tokenizer.json CHANGED
@@ -1,7 +1,21 @@
1
  {
2
  "version": "1.0",
3
- "truncation": null,
4
- "padding": null,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  "added_tokens": [
6
  {
7
  "id": 0,
 
1
  {
2
  "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 512,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
+ "padding": {
10
+ "strategy": {
11
+ "Fixed": 512
12
+ },
13
+ "direction": "Right",
14
+ "pad_to_multiple_of": null,
15
+ "pad_id": 0,
16
+ "pad_type_id": 0,
17
+ "pad_token": "[PAD]"
18
+ },
19
  "added_tokens": [
20
  {
21
  "id": 0,
tokenizer_config.json CHANGED
@@ -54,11 +54,9 @@
54
  "do_basic_tokenize": true,
55
  "do_lower_case": true,
56
  "mask_token": "[MASK]",
57
- "max_length": 512,
58
  "model_max_length": 512,
59
  "never_split": null,
60
  "only_label_first_subword": true,
61
- "pad_to_multiple_of": null,
62
  "pad_token": "[PAD]",
63
  "pad_token_box": [
64
  0,
@@ -67,8 +65,6 @@
67
  0
68
  ],
69
  "pad_token_label": -100,
70
- "pad_token_type_id": 0,
71
- "padding_side": "right",
72
  "processor_class": "LayoutLMv2Processor",
73
  "sep_token": "[SEP]",
74
  "sep_token_box": [
@@ -77,11 +73,8 @@
77
  1000,
78
  1000
79
  ],
80
- "stride": 0,
81
  "strip_accents": null,
82
  "tokenize_chinese_chars": true,
83
  "tokenizer_class": "LayoutLMv2Tokenizer",
84
- "truncation_side": "right",
85
- "truncation_strategy": "longest_first",
86
  "unk_token": "[UNK]"
87
  }
 
54
  "do_basic_tokenize": true,
55
  "do_lower_case": true,
56
  "mask_token": "[MASK]",
 
57
  "model_max_length": 512,
58
  "never_split": null,
59
  "only_label_first_subword": true,
 
60
  "pad_token": "[PAD]",
61
  "pad_token_box": [
62
  0,
 
65
  0
66
  ],
67
  "pad_token_label": -100,
 
 
68
  "processor_class": "LayoutLMv2Processor",
69
  "sep_token": "[SEP]",
70
  "sep_token_box": [
 
73
  1000,
74
  1000
75
  ],
 
76
  "strip_accents": null,
77
  "tokenize_chinese_chars": true,
78
  "tokenizer_class": "LayoutLMv2Tokenizer",
 
 
79
  "unk_token": "[UNK]"
80
  }