Datasets:

Modalities:
Text
Formats:
json
Size:
< 1K
Libraries:
Datasets
Dask
License:
ayushi0430 commited on
Commit
4814609
·
1 Parent(s): 374234a

add results

Browse files
icd-ft-mistral-7b-instruct-hf/results.json ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model": "LaminiModel",
4
+ "model_args": null,
5
+ "batch_size": 1,
6
+ "batch_sizes": [],
7
+ "device": null,
8
+ "use_cache": null,
9
+ "limit": 100,
10
+ "bootstrap_iters": 100000,
11
+ "gen_kwargs": null,
12
+ "model_dtype": "bfloat16",
13
+ "model_name": "884ac2f39ab1f44c8291bb40d59f7224b39256e4fbe2f608b2c56243fa72811f",
14
+ "model_sha": "main"
15
+ },
16
+ "results": {
17
+ "product_response_subjective_score": {
18
+ "product_response_subjective_score": 3.1875
19
+ },
20
+ "product_id_precision_score": {
21
+ "product_id_precision_score": 0.0
22
+ },
23
+ "earnings_response_subjective_score": {
24
+ "earnings_response_subjective_score": 2.2363636363636363
25
+ },
26
+ "earnings_precision_score": {
27
+ "earnings_precision_score": 0.01818181818181818
28
+ },
29
+ "icd11_response_subjective_score": {
30
+ "icd11_response_subjective_score": 2.2888888888888888
31
+ },
32
+ "icd11_precision_score": {
33
+ "icd11_precision_score": 0.2
34
+ },
35
+ "mmlu_flan_n_shot_generative_global_facts": {
36
+ "exact_match,strict-match": 0.34,
37
+ "exact_match_stderr,strict-match": 0.047609522856952344,
38
+ "exact_match,flexible-extract": 0.34,
39
+ "exact_match_stderr,flexible-extract": 0.047609522856952344,
40
+ "alias": "mmlu_flan_n_shot_generative_global_facts"
41
+ },
42
+ "truthfulqa_gen": {
43
+ "bleu_max,none": 17.795881878483,
44
+ "bleu_max_stderr,none": 1.813182129677248,
45
+ "bleu_acc,none": 0.53,
46
+ "bleu_acc_stderr,none": 0.050161355804659205,
47
+ "bleu_diff,none": -2.125661702465655,
48
+ "bleu_diff_stderr,none": 1.434744252223102,
49
+ "rouge1_max,none": 39.008845311741894,
50
+ "rouge1_max_stderr,none": 2.114340485165445,
51
+ "rouge1_acc,none": 0.52,
52
+ "rouge1_acc_stderr,none": 0.050211673156867795,
53
+ "rouge1_diff,none": -1.6407676542576224,
54
+ "rouge1_diff_stderr,none": 1.797673507015776,
55
+ "rouge2_max,none": 27.163127972432466,
56
+ "rouge2_max_stderr,none": 2.1194097651441424,
57
+ "rouge2_acc,none": 0.49,
58
+ "rouge2_acc_stderr,none": 0.05024183937956913,
59
+ "rouge2_diff,none": -2.254914453263179,
60
+ "rouge2_diff_stderr,none": 1.9148845239446861,
61
+ "rougeL_max,none": 36.01824298531865,
62
+ "rougeL_max_stderr,none": 2.128032657919697,
63
+ "rougeL_acc,none": 0.53,
64
+ "rougeL_acc_stderr,none": 0.05016135580465919,
65
+ "rougeL_diff,none": -1.8971433636477628,
66
+ "rougeL_diff_stderr,none": 1.8261427558039585,
67
+ "alias": "truthfulqa_gen"
68
+ }
69
+ }
70
+ }
products-ft-llama2-7b-chat-hf/results.json ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "config": {
3
+ "model": "LaminiModel",
4
+ "model_args": null,
5
+ "batch_size": 1,
6
+ "batch_sizes": [],
7
+ "device": null,
8
+ "use_cache": null,
9
+ "limit": 100,
10
+ "bootstrap_iters": 100000,
11
+ "gen_kwargs": null,
12
+ "model_dtype": "bfloat16",
13
+ "model_name": "products-ft-llama2-7b-chat-hf",
14
+ "model_sha": "main"
15
+ },
16
+ "results": {
17
+ "product_response_subjective_score": {
18
+ "product_response_subjective_score": 2.823529411764706
19
+ },
20
+ "product_id_precision_score": {
21
+ "product_id_precision_score": 0.0
22
+ },
23
+ "earnings_response_subjective_score": {
24
+ "earnings_response_subjective_score": 1.3
25
+ },
26
+ "earnings_precision_score": {
27
+ "earnings_precision_score": 0.0
28
+ },
29
+ "icd11_response_subjective_score": {
30
+ "icd11_response_subjective_score": 1.0
31
+ },
32
+ "icd11_precision_score": {
33
+ "icd11_precision_score": 0.042105263157894736
34
+ },
35
+ "mmlu_flan_n_shot_generative_global_facts": {
36
+ "exact_match,strict-match": 0.24,
37
+ "exact_match_stderr,strict-match": 0.04292346959909281,
38
+ "exact_match,flexible-extract": 0.29,
39
+ "exact_match_stderr,flexible-extract": 0.045604802157206845,
40
+ "alias": "mmlu_flan_n_shot_generative_global_facts"
41
+ },
42
+ "truthfulqa_gen": {
43
+ "bleu_max,none": 6.531036466069969,
44
+ "bleu_max_stderr,none": 0.9043289680312812,
45
+ "bleu_acc,none": 0.46,
46
+ "bleu_acc_stderr,none": 0.05009082659620332,
47
+ "bleu_diff,none": -0.9323317213212583,
48
+ "bleu_diff_stderr,none": 0.8067385601751962,
49
+ "rouge1_max,none": 20.89456732926107,
50
+ "rouge1_max_stderr,none": 1.4865748512002068,
51
+ "rouge1_acc,none": 0.44,
52
+ "rouge1_acc_stderr,none": 0.04988876515698589,
53
+ "rouge1_diff,none": -0.9440419957596459,
54
+ "rouge1_diff_stderr,none": 1.0937789188254103,
55
+ "rouge2_max,none": 13.502001575762279,
56
+ "rouge2_max_stderr,none": 1.3536063014199604,
57
+ "rouge2_acc,none": 0.4,
58
+ "rouge2_acc_stderr,none": 0.049236596391733084,
59
+ "rouge2_diff,none": -1.2095201954204824,
60
+ "rouge2_diff_stderr,none": 1.1259112814340064,
61
+ "rougeL_max,none": 19.029126902563377,
62
+ "rougeL_max_stderr,none": 1.4521123787780108,
63
+ "rougeL_acc,none": 0.4,
64
+ "rougeL_acc_stderr,none": 0.049236596391733084,
65
+ "rougeL_diff,none": -0.9873337635962663,
66
+ "rougeL_diff_stderr,none": 0.9428926541420342,
67
+ "alias": "truthfulqa_gen"
68
+ }
69
+ }
70
+ }