Commit
路
fc65222
1
Parent(s):
6eade8d
Updated Parameters
Browse files
README.md
CHANGED
@@ -16,31 +16,34 @@ datasets:
|
|
16 |
|
17 |
## Hyperparameters
|
18 |
```
|
19 |
-
batch_size
|
20 |
-
n_epochs
|
21 |
base_LM_model = "deepset/roberta-base-squad2"
|
22 |
-
max_seq_len
|
23 |
doc_stride=128
|
24 |
-
learning_rate
|
25 |
-
|
26 |
-
|
|
|
|
|
|
|
27 |
n_best_size=20
|
28 |
max_answer_length=30
|
29 |
min_null_score=7.0
|
30 |
-
CLS_threshold=-3
|
31 |
```
|
|
|
32 |
|
33 |
## Performance
|
34 |
```
|
35 |
"exact": 81.192622
|
36 |
-
"f1":
|
37 |
"total": 11873
|
38 |
"HasAns_exact": 74.190283
|
39 |
-
"HasAns_f1":
|
40 |
"HasAns_total": 5928
|
41 |
-
"NoAns_exact":
|
42 |
-
"NoAns_f1":
|
43 |
-
"NoAns_total":
|
44 |
```
|
45 |
|
46 |
## Usage
|
@@ -62,4 +65,7 @@ print(res)
|
|
62 |
# b) Load model & tokenizer
|
63 |
model = AutoModelForQuestionAnswering.from_pretrained(model_name)
|
64 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
65 |
-
```
|
|
|
|
|
|
|
|
16 |
|
17 |
## Hyperparameters
|
18 |
```
|
19 |
+
batch_size=8
|
20 |
+
n_epochs=6
|
21 |
base_LM_model = "deepset/roberta-base-squad2"
|
22 |
+
max_seq_len=386
|
23 |
doc_stride=128
|
24 |
+
learning_rate=1.5e-5
|
25 |
+
adam_epsilon=1e-5
|
26 |
+
adam_beta1=0.95
|
27 |
+
adam_beta2=0.999
|
28 |
+
warmup_steps=100
|
29 |
+
weight_decay=0.01
|
30 |
n_best_size=20
|
31 |
max_answer_length=30
|
32 |
min_null_score=7.0
|
|
|
33 |
```
|
34 |
+
##### There is a special threshold value CLS_threshold=-3 used to more accurately identify no answers [Logic will be available in GitHub Repo [TBD]
|
35 |
|
36 |
## Performance
|
37 |
```
|
38 |
"exact": 81.192622
|
39 |
+
"f1": 83.95408
|
40 |
"total": 11873
|
41 |
"HasAns_exact": 74.190283
|
42 |
+
"HasAns_f1": 79.721119
|
43 |
"HasAns_total": 5928
|
44 |
+
"NoAns_exact": 88.174937
|
45 |
+
"NoAns_f1": 88.174937
|
46 |
+
"NoAns_total": 5945
|
47 |
```
|
48 |
|
49 |
## Usage
|
|
|
65 |
# b) Load model & tokenizer
|
66 |
model = AutoModelForQuestionAnswering.from_pretrained(model_name)
|
67 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
68 |
+
```
|
69 |
+
|
70 |
+
## Authors
|
71 |
+
Premal Matalia
|