alrope commited on
Commit
e073902
·
verified ·
1 Parent(s): 5f04839

Upload folder using huggingface_hub

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. temperature=0.0/Llama-2-13b-chat-hf.json +27 -0
  2. temperature=0.0/Llama-2-70b-chat-hf.json +27 -0
  3. temperature=0.0/Llama-2-7b-chat-hf.json +27 -0
  4. temperature=0.0/Llama-3.1-70B-Instruct.json +27 -0
  5. temperature=0.0/Llama-3.1-8B-Instruct.json +27 -0
  6. temperature=0.0/Mistral-7B-Instruct-v0.3.json +27 -0
  7. temperature=0.0/Mistral-Large-Instruct-2407.json +27 -0
  8. temperature=0.0/Mistral-Small-Instruct-2409.json +27 -0
  9. temperature=0.0/OLMo-7B-0724-Instruct-hf.json +27 -0
  10. temperature=0.0/OLMo-7B-SFT-hf.json +27 -0
  11. temperature=0.0/Phi-3-medium-4k-instruct.json +27 -0
  12. temperature=0.0/Qwen1.5-110B-Chat.json +27 -0
  13. temperature=0.0/Qwen2-72B-Instruct.json +27 -0
  14. temperature=0.0/Qwen2.5-72B-Instruct.json +27 -0
  15. temperature=0.0/WizardLM-13B-V1.2.json +27 -0
  16. temperature=0.0/Yi-1.5-34B-Chat.json +27 -0
  17. temperature=0.0/dolly-v2-12b.json +27 -0
  18. temperature=0.0/dolly-v2-7b.json +27 -0
  19. temperature=0.0/gpt4all-13b-snoozy.json +27 -0
  20. temperature=0.0/koala-13B-HF.json +27 -0
  21. temperature=0.0/koala-7B-HF.json +27 -0
  22. temperature=0.0/mpt-7b-chat.json +27 -0
  23. temperature=0.0/oasst-sft-1-pythia-12b.json +27 -0
  24. temperature=0.0/tulu-2-dpo-13b.json +27 -0
  25. temperature=0.0/tulu-2-dpo-70b.json +27 -0
  26. temperature=0.0/tulu-2-dpo-7b.json +27 -0
  27. temperature=0.0/tulu-v2.5-ppo-13b-uf-mean-70b-uf-rm.json +27 -0
  28. temperature=0.0/vicuna-13b-v1.5.json +27 -0
  29. temperature=0.0/vicuna-7b-v1.5.json +27 -0
  30. temperature=1.0/Llama-2-13b-chat-hf.json +27 -0
  31. temperature=1.0/Llama-2-70b-chat-hf.json +27 -0
  32. temperature=1.0/Llama-2-7b-chat-hf.json +27 -0
  33. temperature=1.0/Llama-3.1-70B-Instruct.json +27 -0
  34. temperature=1.0/Llama-3.1-8B-Instruct.json +27 -0
  35. temperature=1.0/Mistral-7B-Instruct-v0.3.json +27 -0
  36. temperature=1.0/Mistral-Large-Instruct-2407.json +27 -0
  37. temperature=1.0/Mistral-Small-Instruct-2409.json +27 -0
  38. temperature=1.0/OLMo-7B-0724-Instruct-hf.json +27 -0
  39. temperature=1.0/OLMo-7B-SFT-hf.json +27 -0
  40. temperature=1.0/Phi-3-medium-4k-instruct.json +27 -0
  41. temperature=1.0/Qwen1.5-110B-Chat.json +27 -0
  42. temperature=1.0/Qwen2-72B-Instruct.json +27 -0
  43. temperature=1.0/Qwen2.5-72B-Instruct.json +27 -0
  44. temperature=1.0/WizardLM-13B-V1.2.json +27 -0
  45. temperature=1.0/Yi-1.5-34B-Chat.json +27 -0
  46. temperature=1.0/dolly-v2-12b.json +27 -0
  47. temperature=1.0/dolly-v2-7b.json +27 -0
  48. temperature=1.0/gpt4all-13b-snoozy.json +27 -0
  49. temperature=1.0/koala-13B-HF.json +27 -0
  50. temperature=1.0/koala-7B-HF.json +27 -0
temperature=0.0/Llama-2-13b-chat-hf.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "meta-llama/Llama-2-13b-chat-hf",
3
+ "brainstorm": 0.167779632721202,
4
+ "open_qa": 0.5245098039215687,
5
+ "closed_qa": 0.21782178217821785,
6
+ "extract": 0.19801980198019803,
7
+ "generation": 0.15916666666666668,
8
+ "rewrite": 0.14214285714285715,
9
+ "summarize": 0.19801980198019803,
10
+ "classify": 0.2736318407960199,
11
+ "reasoning_over_numerical_data": 0.11495327102803739,
12
+ "multi-document_synthesis": 0.09050772626931568,
13
+ "fact_checking_or_attributed_qa": 0.4199134199134199,
14
+ "average": 0.22786060041797282,
15
+ "brainstorm_rank": 15,
16
+ "open_qa_rank": 24,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 11,
20
+ "rewrite_rank": 13,
21
+ "summarize_rank": 6,
22
+ "classify_rank": 13,
23
+ "reasoning_over_numerical_data_rank": 9,
24
+ "multi-document_synthesis_rank": 16,
25
+ "fact_checking_or_attributed_qa_rank": 9,
26
+ "average_rank": 13
27
+ }
temperature=0.0/Llama-2-70b-chat-hf.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "meta-llama/Llama-2-70b-chat-hf",
3
+ "brainstorm": 0.22287145242070117,
4
+ "open_qa": 0.6127450980392156,
5
+ "closed_qa": 0.2995049504950495,
6
+ "extract": 0.2599009900990099,
7
+ "generation": 0.19166666666666665,
8
+ "rewrite": 0.17071428571428574,
9
+ "summarize": 0.21782178217821782,
10
+ "classify": 0.34328358208955223,
11
+ "reasoning_over_numerical_data": 0.19065420560747662,
12
+ "multi-document_synthesis": 0.13245033112582782,
13
+ "fact_checking_or_attributed_qa": 0.4848484848484848,
14
+ "average": 0.28422380266222613,
15
+ "brainstorm_rank": 12,
16
+ "open_qa_rank": 12,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 11,
20
+ "rewrite_rank": 11,
21
+ "summarize_rank": 6,
22
+ "classify_rank": 9,
23
+ "reasoning_over_numerical_data_rank": 9,
24
+ "multi-document_synthesis_rank": 13,
25
+ "fact_checking_or_attributed_qa_rank": 5,
26
+ "average_rank": 10
27
+ }
temperature=0.0/Llama-2-7b-chat-hf.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "meta-llama/Llama-2-7b-chat-hf",
3
+ "brainstorm": 0.1636060100166945,
4
+ "open_qa": 0.41666666666666663,
5
+ "closed_qa": 0.21287128712871287,
6
+ "extract": 0.12871287128712872,
7
+ "generation": 0.13333333333333333,
8
+ "rewrite": 0.12857142857142856,
9
+ "summarize": 0.09653465346534654,
10
+ "classify": 0.19154228855721395,
11
+ "reasoning_over_numerical_data": 0.07289719626168224,
12
+ "multi-document_synthesis": 0.07505518763796909,
13
+ "fact_checking_or_attributed_qa": 0.3354978354978355,
14
+ "average": 0.177753523493092,
15
+ "brainstorm_rank": 15,
16
+ "open_qa_rank": 27,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 16,
20
+ "rewrite_rank": 13,
21
+ "summarize_rank": 15,
22
+ "classify_rank": 20,
23
+ "reasoning_over_numerical_data_rank": 15,
24
+ "multi-document_synthesis_rank": 16,
25
+ "fact_checking_or_attributed_qa_rank": 21,
26
+ "average_rank": 17
27
+ }
temperature=0.0/Llama-3.1-70B-Instruct.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "meta-llama/Llama-3.1-70B-Instruct",
3
+ "brainstorm": 0.48580968280467446,
4
+ "open_qa": 0.8431372549019608,
5
+ "closed_qa": 0.551980198019802,
6
+ "extract": 0.45049504950495045,
7
+ "generation": 0.45499999999999996,
8
+ "rewrite": 0.4464285714285714,
9
+ "summarize": 0.43316831683168316,
10
+ "classify": 0.5447761194029851,
11
+ "reasoning_over_numerical_data": 0.502803738317757,
12
+ "multi-document_synthesis": 0.4977924944812362,
13
+ "fact_checking_or_attributed_qa": 0.5714285714285714,
14
+ "average": 0.5257109088292902,
15
+ "brainstorm_rank": 4,
16
+ "open_qa_rank": 1,
17
+ "closed_qa_rank": 1,
18
+ "extract_rank": 1,
19
+ "generation_rank": 2,
20
+ "rewrite_rank": 2,
21
+ "summarize_rank": 2,
22
+ "classify_rank": 1,
23
+ "reasoning_over_numerical_data_rank": 1,
24
+ "multi-document_synthesis_rank": 4,
25
+ "fact_checking_or_attributed_qa_rank": 1,
26
+ "average_rank": 1
27
+ }
temperature=0.0/Llama-3.1-8B-Instruct.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "meta-llama/Llama-3.1-8B-Instruct",
3
+ "brainstorm": 0.4666110183639399,
4
+ "open_qa": 0.7941176470588235,
5
+ "closed_qa": 0.40594059405940597,
6
+ "extract": 0.32425742574257427,
7
+ "generation": 0.36333333333333334,
8
+ "rewrite": 0.3671428571428571,
9
+ "summarize": 0.32920792079207917,
10
+ "classify": 0.43034825870646765,
11
+ "reasoning_over_numerical_data": 0.29906542056074764,
12
+ "multi-document_synthesis": 0.3576158940397351,
13
+ "fact_checking_or_attributed_qa": 0.39285714285714285,
14
+ "average": 0.41186341024155515,
15
+ "brainstorm_rank": 7,
16
+ "open_qa_rank": 3,
17
+ "closed_qa_rank": 1,
18
+ "extract_rank": 1,
19
+ "generation_rank": 6,
20
+ "rewrite_rank": 5,
21
+ "summarize_rank": 6,
22
+ "classify_rank": 1,
23
+ "reasoning_over_numerical_data_rank": 9,
24
+ "multi-document_synthesis_rank": 7,
25
+ "fact_checking_or_attributed_qa_rank": 15,
26
+ "average_rank": 6
27
+ }
temperature=0.0/Mistral-7B-Instruct-v0.3.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "mistralai/Mistral-7B-Instruct-v0.3",
3
+ "brainstorm": 0.2412353923205342,
4
+ "open_qa": 0.6078431372549019,
5
+ "closed_qa": 0.21287128712871287,
6
+ "extract": 0.1410891089108911,
7
+ "generation": 0.19083333333333333,
8
+ "rewrite": 0.185,
9
+ "summarize": 0.1683168316831683,
10
+ "classify": 0.27114427860696516,
11
+ "reasoning_over_numerical_data": 0.17289719626168226,
12
+ "multi-document_synthesis": 0.25496688741721857,
13
+ "fact_checking_or_attributed_qa": 0.4653679653679654,
14
+ "average": 0.2646877652986702,
15
+ "brainstorm_rank": 10,
16
+ "open_qa_rank": 12,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 11,
20
+ "rewrite_rank": 11,
21
+ "summarize_rank": 6,
22
+ "classify_rank": 14,
23
+ "reasoning_over_numerical_data_rank": 9,
24
+ "multi-document_synthesis_rank": 8,
25
+ "fact_checking_or_attributed_qa_rank": 9,
26
+ "average_rank": 11
27
+ }
temperature=0.0/Mistral-Large-Instruct-2407.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "mistralai/Mistral-Large-Instruct-2407",
3
+ "brainstorm": 0.5450751252086812,
4
+ "open_qa": 0.5882352941176471,
5
+ "closed_qa": 0.35148514851485146,
6
+ "extract": 0.39603960396039606,
7
+ "generation": 0.5141666666666667,
8
+ "rewrite": 0.505,
9
+ "summarize": 0.452970297029703,
10
+ "classify": 0.44527363184079605,
11
+ "reasoning_over_numerical_data": 0.48504672897196266,
12
+ "multi-document_synthesis": 0.5905077262693157,
13
+ "fact_checking_or_attributed_qa": 0.2597402597402597,
14
+ "average": 0.46668549839275264,
15
+ "brainstorm_rank": 1,
16
+ "open_qa_rank": 24,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 1,
20
+ "rewrite_rank": 1,
21
+ "summarize_rank": 1,
22
+ "classify_rank": 1,
23
+ "reasoning_over_numerical_data_rank": 1,
24
+ "multi-document_synthesis_rank": 1,
25
+ "fact_checking_or_attributed_qa_rank": 23,
26
+ "average_rank": 1
27
+ }
temperature=0.0/Mistral-Small-Instruct-2409.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "mistralai/Mistral-Small-Instruct-2409",
3
+ "brainstorm": 0.4699499165275459,
4
+ "open_qa": 0.6764705882352942,
5
+ "closed_qa": 0.3217821782178218,
6
+ "extract": 0.36386138613861385,
7
+ "generation": 0.4241666666666667,
8
+ "rewrite": 0.4492857142857143,
9
+ "summarize": 0.400990099009901,
10
+ "classify": 0.39303482587064675,
11
+ "reasoning_over_numerical_data": 0.397196261682243,
12
+ "multi-document_synthesis": 0.5474613686534217,
13
+ "fact_checking_or_attributed_qa": 0.25757575757575757,
14
+ "average": 0.4274340693512388,
15
+ "brainstorm_rank": 5,
16
+ "open_qa_rank": 12,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 5,
20
+ "rewrite_rank": 2,
21
+ "summarize_rank": 2,
22
+ "classify_rank": 8,
23
+ "reasoning_over_numerical_data_rank": 1,
24
+ "multi-document_synthesis_rank": 3,
25
+ "fact_checking_or_attributed_qa_rank": 23,
26
+ "average_rank": 5
27
+ }
temperature=0.0/OLMo-7B-0724-Instruct-hf.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "allenai/OLMo-7B-SFT",
3
+ "brainstorm": 0.08931552587646077,
4
+ "open_qa": 0.3627450980392157,
5
+ "closed_qa": 0.05693069306930693,
6
+ "extract": 0.05198019801980198,
7
+ "generation": 0.059166666666666666,
8
+ "rewrite": 0.051428571428571435,
9
+ "summarize": 0.05693069306930693,
10
+ "classify": 0.07960199004975124,
11
+ "reasoning_over_numerical_data": 0.04953271028037383,
12
+ "multi-document_synthesis": 0.0728476821192053,
13
+ "fact_checking_or_attributed_qa": 0.22727272727272727,
14
+ "average": 0.10525023235376256,
15
+ "brainstorm_rank": 17,
16
+ "open_qa_rank": 27,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 20,
20
+ "rewrite_rank": 20,
21
+ "summarize_rank": 15,
22
+ "classify_rank": 20,
23
+ "reasoning_over_numerical_data_rank": 15,
24
+ "multi-document_synthesis_rank": 16,
25
+ "fact_checking_or_attributed_qa_rank": 26,
26
+ "average_rank": 25
27
+ }
temperature=0.0/OLMo-7B-SFT-hf.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "allenai/OLMo-7B-SFT",
3
+ "brainstorm": 0.011686143572621035,
4
+ "open_qa": 0.8039215686274509,
5
+ "closed_qa": 0.13366336633663367,
6
+ "extract": 0.06930693069306931,
7
+ "generation": 0.06666666666666667,
8
+ "rewrite": 0.04357142857142857,
9
+ "summarize": 0.01485148514851485,
10
+ "classify": 0.22388059701492535,
11
+ "reasoning_over_numerical_data": 0.04672897196261682,
12
+ "multi-document_synthesis": 0.024282560706401765,
13
+ "fact_checking_or_attributed_qa": 0.5,
14
+ "average": 0.17623270175457534,
15
+ "brainstorm_rank": 21,
16
+ "open_qa_rank": 3,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 20,
20
+ "rewrite_rank": 23,
21
+ "summarize_rank": 15,
22
+ "classify_rank": 14,
23
+ "reasoning_over_numerical_data_rank": 15,
24
+ "multi-document_synthesis_rank": 22,
25
+ "fact_checking_or_attributed_qa_rank": 5,
26
+ "average_rank": 19
27
+ }
temperature=0.0/Phi-3-medium-4k-instruct.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "microsoft/Phi-3-medium-4k-instruct",
3
+ "brainstorm": 0.27712854757929883,
4
+ "open_qa": 0.8137254901960784,
5
+ "closed_qa": 0.349009900990099,
6
+ "extract": 0.24257425742574257,
7
+ "generation": 0.2625,
8
+ "rewrite": 0.2807142857142857,
9
+ "summarize": 0.2400990099009901,
10
+ "classify": 0.4527363184079602,
11
+ "reasoning_over_numerical_data": 0.3719626168224299,
12
+ "multi-document_synthesis": 0.17218543046357615,
13
+ "fact_checking_or_attributed_qa": 0.6017316017316018,
14
+ "average": 0.3694879508392784,
15
+ "brainstorm_rank": 10,
16
+ "open_qa_rank": 3,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 9,
20
+ "rewrite_rank": 7,
21
+ "summarize_rank": 6,
22
+ "classify_rank": 1,
23
+ "reasoning_over_numerical_data_rank": 1,
24
+ "multi-document_synthesis_rank": 11,
25
+ "fact_checking_or_attributed_qa_rank": 1,
26
+ "average_rank": 7
27
+ }
temperature=0.0/Qwen1.5-110B-Chat.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "Qwen/Qwen1.5-110B-Chat",
3
+ "brainstorm": 0.4774624373956594,
4
+ "open_qa": 0.803921568627451,
5
+ "closed_qa": 0.31435643564356436,
6
+ "extract": 0.30445544554455445,
7
+ "generation": 0.4391666666666667,
8
+ "rewrite": 0.38999999999999996,
9
+ "summarize": 0.3712871287128713,
10
+ "classify": 0.4950248756218905,
11
+ "reasoning_over_numerical_data": 0.3878504672897196,
12
+ "multi-document_synthesis": 0.5165562913907285,
13
+ "fact_checking_or_attributed_qa": 0.39826839826839827,
14
+ "average": 0.4453045195601368,
15
+ "brainstorm_rank": 5,
16
+ "open_qa_rank": 3,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 2,
20
+ "rewrite_rank": 5,
21
+ "summarize_rank": 4,
22
+ "classify_rank": 1,
23
+ "reasoning_over_numerical_data_rank": 1,
24
+ "multi-document_synthesis_rank": 4,
25
+ "fact_checking_or_attributed_qa_rank": 15,
26
+ "average_rank": 1
27
+ }
temperature=0.0/Qwen2-72B-Instruct.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "Qwen/Qwen2-72B-Instruct",
3
+ "brainstorm": 0.42404006677796324,
4
+ "open_qa": 0.7156862745098039,
5
+ "closed_qa": 0.24752475247524752,
6
+ "extract": 0.32425742574257427,
7
+ "generation": 0.31666666666666665,
8
+ "rewrite": 0.2907142857142857,
9
+ "summarize": 0.1534653465346535,
10
+ "classify": 0.40049751243781095,
11
+ "reasoning_over_numerical_data": 0.3841121495327103,
12
+ "multi-document_synthesis": 0.26379690949227375,
13
+ "fact_checking_or_attributed_qa": 0.47186147186147187,
14
+ "average": 0.36296571470413286,
15
+ "brainstorm_rank": 7,
16
+ "open_qa_rank": 3,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 8,
20
+ "rewrite_rank": 7,
21
+ "summarize_rank": 6,
22
+ "classify_rank": 1,
23
+ "reasoning_over_numerical_data_rank": 1,
24
+ "multi-document_synthesis_rank": 8,
25
+ "fact_checking_or_attributed_qa_rank": 9,
26
+ "average_rank": 8
27
+ }
temperature=0.0/Qwen2.5-72B-Instruct.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "Qwen/Qwen2.5-72B-Instruct",
3
+ "brainstorm": 0.5141903171953255,
4
+ "open_qa": 0.7156862745098039,
5
+ "closed_qa": 0.30198019801980197,
6
+ "extract": 0.4084158415841584,
7
+ "generation": 0.4766666666666666,
8
+ "rewrite": 0.45999999999999996,
9
+ "summarize": 0.36386138613861385,
10
+ "classify": 0.4353233830845771,
11
+ "reasoning_over_numerical_data": 0.45607476635514016,
12
+ "multi-document_synthesis": 0.5750551876379691,
13
+ "fact_checking_or_attributed_qa": 0.23376623376623376,
14
+ "average": 0.4491836595416627,
15
+ "brainstorm_rank": 2,
16
+ "open_qa_rank": 12,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 2,
20
+ "rewrite_rank": 2,
21
+ "summarize_rank": 4,
22
+ "classify_rank": 1,
23
+ "reasoning_over_numerical_data_rank": 1,
24
+ "multi-document_synthesis_rank": 1,
25
+ "fact_checking_or_attributed_qa_rank": 26,
26
+ "average_rank": 1
27
+ }
temperature=0.0/WizardLM-13B-V1.2.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "WizardLMTeam/WizardLM-13B-V1.2",
3
+ "brainstorm": 0.1686143572621035,
4
+ "open_qa": 0.6323529411764706,
5
+ "closed_qa": 0.22772277227722773,
6
+ "extract": 0.17079207920792078,
7
+ "generation": 0.15833333333333333,
8
+ "rewrite": 0.14714285714285713,
9
+ "summarize": 0.07673267326732673,
10
+ "classify": 0.3358208955223881,
11
+ "reasoning_over_numerical_data": 0.09719626168224299,
12
+ "multi-document_synthesis": 0.11258278145695365,
13
+ "fact_checking_or_attributed_qa": 0.4393939393939394,
14
+ "average": 0.23333499015661488,
15
+ "brainstorm_rank": 12,
16
+ "open_qa_rank": 12,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 11,
20
+ "rewrite_rank": 13,
21
+ "summarize_rank": 15,
22
+ "classify_rank": 9,
23
+ "reasoning_over_numerical_data_rank": 15,
24
+ "multi-document_synthesis_rank": 13,
25
+ "fact_checking_or_attributed_qa_rank": 9,
26
+ "average_rank": 13
27
+ }
temperature=0.0/Yi-1.5-34B-Chat.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "01-ai/Yi-1.5-34B-Chat",
3
+ "brainstorm": 0.496661101836394,
4
+ "open_qa": 0.7156862745098039,
5
+ "closed_qa": 0.2524752475247525,
6
+ "extract": 0.2698019801980198,
7
+ "generation": 0.3616666666666667,
8
+ "rewrite": 0.30071428571428577,
9
+ "summarize": 0.21534653465346534,
10
+ "classify": 0.3582089552238806,
11
+ "reasoning_over_numerical_data": 0.3177570093457944,
12
+ "multi-document_synthesis": 0.4105960264900662,
13
+ "fact_checking_or_attributed_qa": 0.2878787878787879,
14
+ "average": 0.3624357154583561,
15
+ "brainstorm_rank": 2,
16
+ "open_qa_rank": 12,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 6,
20
+ "rewrite_rank": 7,
21
+ "summarize_rank": 6,
22
+ "classify_rank": 9,
23
+ "reasoning_over_numerical_data_rank": 1,
24
+ "multi-document_synthesis_rank": 6,
25
+ "fact_checking_or_attributed_qa_rank": 23,
26
+ "average_rank": 9
27
+ }
temperature=0.0/dolly-v2-12b.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "databricks/dolly-v2-7b",
3
+ "brainstorm": 0.00333889816360601,
4
+ "open_qa": 0.7549019607843137,
5
+ "closed_qa": 0.12376237623762376,
6
+ "extract": 0.07425742574257425,
7
+ "generation": 0.028333333333333332,
8
+ "rewrite": 0.02,
9
+ "summarize": 0.0,
10
+ "classify": 0.06716417910447761,
11
+ "reasoning_over_numerical_data": 0.03271028037383177,
12
+ "multi-document_synthesis": 0.008830022075055188,
13
+ "fact_checking_or_attributed_qa": 0.3961038961038961,
14
+ "average": 0.1372183974471556,
15
+ "brainstorm_rank": 24,
16
+ "open_qa_rank": 3,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 26,
20
+ "rewrite_rank": 27,
21
+ "summarize_rank": 15,
22
+ "classify_rank": 20,
23
+ "reasoning_over_numerical_data_rank": 15,
24
+ "multi-document_synthesis_rank": 24,
25
+ "fact_checking_or_attributed_qa_rank": 15,
26
+ "average_rank": 25
27
+ }
temperature=0.0/dolly-v2-7b.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "databricks/dolly-v2-12b",
3
+ "brainstorm": 0.001669449081803005,
4
+ "open_qa": 0.7058823529411765,
5
+ "closed_qa": 0.09405940594059406,
6
+ "extract": 0.04950495049504951,
7
+ "generation": 0.02,
8
+ "rewrite": 0.012857142857142857,
9
+ "summarize": 0.0,
10
+ "classify": 0.06218905472636816,
11
+ "reasoning_over_numerical_data": 0.03644859813084112,
12
+ "multi-document_synthesis": 0.002207505518763797,
13
+ "fact_checking_or_attributed_qa": 0.4199134199134199,
14
+ "average": 0.12770289814592353,
15
+ "brainstorm_rank": 24,
16
+ "open_qa_rank": 12,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 28,
20
+ "rewrite_rank": 27,
21
+ "summarize_rank": 15,
22
+ "classify_rank": 20,
23
+ "reasoning_over_numerical_data_rank": 15,
24
+ "multi-document_synthesis_rank": 29,
25
+ "fact_checking_or_attributed_qa_rank": 15,
26
+ "average_rank": 25
27
+ }
temperature=0.0/gpt4all-13b-snoozy.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "nomic-ai/gpt4all-13b-snoozy",
3
+ "brainstorm": 0.00667779632721202,
4
+ "open_qa": 0.9019607843137255,
5
+ "closed_qa": 0.037128712871287134,
6
+ "extract": 0.06188118811881188,
7
+ "generation": 0.06416666666666668,
8
+ "rewrite": 0.05714285714285714,
9
+ "summarize": 0.009900990099009901,
10
+ "classify": 0.11691542288557213,
11
+ "reasoning_over_numerical_data": 0.0514018691588785,
12
+ "multi-document_synthesis": 0.013245033112582781,
13
+ "fact_checking_or_attributed_qa": 0.3593073593073593,
14
+ "average": 0.15270260727308752,
15
+ "brainstorm_rank": 24,
16
+ "open_qa_rank": 1,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 20,
20
+ "rewrite_rank": 20,
21
+ "summarize_rank": 15,
22
+ "classify_rank": 20,
23
+ "reasoning_over_numerical_data_rank": 15,
24
+ "multi-document_synthesis_rank": 24,
25
+ "fact_checking_or_attributed_qa_rank": 21,
26
+ "average_rank": 22
27
+ }
temperature=0.0/koala-13B-HF.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "TheBloke/koala-13B-HF",
3
+ "brainstorm": 0.01001669449081803,
4
+ "open_qa": 0.7009803921568627,
5
+ "closed_qa": 0.15841584158415842,
6
+ "extract": 0.08415841584158415,
7
+ "generation": 0.043333333333333335,
8
+ "rewrite": 0.05,
9
+ "summarize": 0.009900990099009901,
10
+ "classify": 0.16417910447761194,
11
+ "reasoning_over_numerical_data": 0.052336448598130844,
12
+ "multi-document_synthesis": 0.02207505518763797,
13
+ "fact_checking_or_attributed_qa": 0.39826839826839827,
14
+ "average": 0.15396951582159504,
15
+ "brainstorm_rank": 21,
16
+ "open_qa_rank": 12,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 24,
20
+ "rewrite_rank": 23,
21
+ "summarize_rank": 15,
22
+ "classify_rank": 20,
23
+ "reasoning_over_numerical_data_rank": 15,
24
+ "multi-document_synthesis_rank": 22,
25
+ "fact_checking_or_attributed_qa_rank": 15,
26
+ "average_rank": 22
27
+ }
temperature=0.0/koala-7B-HF.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "TheBloke/koala-7B-HF",
3
+ "brainstorm": 0.004173622704507512,
4
+ "open_qa": 0.6568627450980392,
5
+ "closed_qa": 0.08168316831683169,
6
+ "extract": 0.07920792079207921,
7
+ "generation": 0.03333333333333333,
8
+ "rewrite": 0.03357142857142857,
9
+ "summarize": 0.0,
10
+ "classify": 0.09701492537313432,
11
+ "reasoning_over_numerical_data": 0.030841121495327105,
12
+ "multi-document_synthesis": 0.013245033112582781,
13
+ "fact_checking_or_attributed_qa": 0.3874458874458874,
14
+ "average": 0.12885265329483195,
15
+ "brainstorm_rank": 24,
16
+ "open_qa_rank": 12,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 26,
20
+ "rewrite_rank": 26,
21
+ "summarize_rank": 15,
22
+ "classify_rank": 20,
23
+ "reasoning_over_numerical_data_rank": 15,
24
+ "multi-document_synthesis_rank": 24,
25
+ "fact_checking_or_attributed_qa_rank": 15,
26
+ "average_rank": 25
27
+ }
temperature=0.0/mpt-7b-chat.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "mosaicml/mpt-7b-chat",
3
+ "brainstorm": 0.005008347245409015,
4
+ "open_qa": 0.7303921568627451,
5
+ "closed_qa": 0.12128712871287128,
6
+ "extract": 0.037128712871287134,
7
+ "generation": 0.05,
8
+ "rewrite": 0.03857142857142857,
9
+ "summarize": 0.01485148514851485,
10
+ "classify": 0.21144278606965172,
11
+ "reasoning_over_numerical_data": 0.041121495327102804,
12
+ "multi-document_synthesis": 0.013245033112582781,
13
+ "fact_checking_or_attributed_qa": 0.474025974025974,
14
+ "average": 0.15791586799523338,
15
+ "brainstorm_rank": 24,
16
+ "open_qa_rank": 3,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 24,
20
+ "rewrite_rank": 23,
21
+ "summarize_rank": 15,
22
+ "classify_rank": 14,
23
+ "reasoning_over_numerical_data_rank": 15,
24
+ "multi-document_synthesis_rank": 24,
25
+ "fact_checking_or_attributed_qa_rank": 9,
26
+ "average_rank": 22
27
+ }
temperature=0.0/oasst-sft-1-pythia-12b.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "OpenAssistant/oasst-sft-1-pythia-12b",
3
+ "brainstorm": 0.001669449081803005,
4
+ "open_qa": 0.6225490196078431,
5
+ "closed_qa": 0.019801980198019802,
6
+ "extract": 0.01485148514851485,
7
+ "generation": 0.016666666666666666,
8
+ "rewrite": 0.005714285714285714,
9
+ "summarize": 0.0,
10
+ "classify": 0.03233830845771144,
11
+ "reasoning_over_numerical_data": 0.024299065420560748,
12
+ "multi-document_synthesis": 0.008830022075055188,
13
+ "fact_checking_or_attributed_qa": 0.23160173160173161,
14
+ "average": 0.08893836490656293,
15
+ "brainstorm_rank": 24,
16
+ "open_qa_rank": 12,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 28,
20
+ "rewrite_rank": 27,
21
+ "summarize_rank": 15,
22
+ "classify_rank": 20,
23
+ "reasoning_over_numerical_data_rank": 15,
24
+ "multi-document_synthesis_rank": 24,
25
+ "fact_checking_or_attributed_qa_rank": 26,
26
+ "average_rank": 29
27
+ }
temperature=0.0/tulu-2-dpo-13b.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "allenai/OLMo-7B-SFT",
3
+ "brainstorm": 0.09348914858096827,
4
+ "open_qa": 0.6862745098039216,
5
+ "closed_qa": 0.1782178217821782,
6
+ "extract": 0.13613861386138615,
7
+ "generation": 0.13333333333333333,
8
+ "rewrite": 0.15571428571428572,
9
+ "summarize": 0.09158415841584158,
10
+ "classify": 0.24875621890547261,
11
+ "reasoning_over_numerical_data": 0.10934579439252336,
12
+ "multi-document_synthesis": 0.10154525386313466,
13
+ "fact_checking_or_attributed_qa": 0.5021645021645021,
14
+ "average": 0.22150578552886796,
15
+ "brainstorm_rank": 17,
16
+ "open_qa_rank": 12,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 16,
20
+ "rewrite_rank": 13,
21
+ "summarize_rank": 15,
22
+ "classify_rank": 14,
23
+ "reasoning_over_numerical_data_rank": 15,
24
+ "multi-document_synthesis_rank": 13,
25
+ "fact_checking_or_attributed_qa_rank": 5,
26
+ "average_rank": 13
27
+ }
temperature=0.0/tulu-2-dpo-70b.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "allenai/tulu-2-dpo-70b",
3
+ "brainstorm": 0.17529215358931555,
4
+ "open_qa": 0.6078431372549019,
5
+ "closed_qa": 0.26485148514851486,
6
+ "extract": 0.2103960396039604,
7
+ "generation": 0.22916666666666666,
8
+ "rewrite": 0.215,
9
+ "summarize": 0.1188118811881188,
10
+ "classify": 0.30845771144278605,
11
+ "reasoning_over_numerical_data": 0.23177570093457944,
12
+ "multi-document_synthesis": 0.18101545253863136,
13
+ "fact_checking_or_attributed_qa": 0.5562770562770563,
14
+ "average": 0.2817170258767756,
15
+ "brainstorm_rank": 12,
16
+ "open_qa_rank": 12,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 9,
20
+ "rewrite_rank": 10,
21
+ "summarize_rank": 14,
22
+ "classify_rank": 9,
23
+ "reasoning_over_numerical_data_rank": 9,
24
+ "multi-document_synthesis_rank": 11,
25
+ "fact_checking_or_attributed_qa_rank": 1,
26
+ "average_rank": 11
27
+ }
temperature=0.0/tulu-2-dpo-7b.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "allenai/tulu-2-dpo-13b",
3
+ "brainstorm": 0.049248747913188645,
4
+ "open_qa": 0.5588235294117647,
5
+ "closed_qa": 0.14603960396039603,
6
+ "extract": 0.09900990099009901,
7
+ "generation": 0.1,
8
+ "rewrite": 0.09928571428571428,
9
+ "summarize": 0.06930693069306931,
10
+ "classify": 0.18407960199004975,
11
+ "reasoning_over_numerical_data": 0.05420560747663551,
12
+ "multi-document_synthesis": 0.059602649006622516,
13
+ "fact_checking_or_attributed_qa": 0.4653679653679654,
14
+ "average": 0.1713609319177732,
15
+ "brainstorm_rank": 19,
16
+ "open_qa_rank": 24,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 16,
20
+ "rewrite_rank": 18,
21
+ "summarize_rank": 15,
22
+ "classify_rank": 20,
23
+ "reasoning_over_numerical_data_rank": 15,
24
+ "multi-document_synthesis_rank": 19,
25
+ "fact_checking_or_attributed_qa_rank": 9,
26
+ "average_rank": 21
27
+ }
temperature=0.0/tulu-v2.5-ppo-13b-uf-mean-70b-uf-rm.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "allenai/tulu-v2.5-ppo-13b-uf-mean-70b-uf-rm",
3
+ "brainstorm": 0.3772954924874791,
4
+ "open_qa": 0.21568627450980393,
5
+ "closed_qa": 0.06435643564356436,
6
+ "extract": 0.10891089108910892,
7
+ "generation": 0.16416666666666666,
8
+ "rewrite": 0.155,
9
+ "summarize": 0.1608910891089109,
10
+ "classify": 0.12437810945273631,
11
+ "reasoning_over_numerical_data": 0.11682242990654206,
12
+ "multi-document_synthesis": 0.2185430463576159,
13
+ "fact_checking_or_attributed_qa": 0.21428571428571427,
14
+ "average": 0.17457601359164931,
15
+ "brainstorm_rank": 7,
16
+ "open_qa_rank": 29,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 11,
20
+ "rewrite_rank": 13,
21
+ "summarize_rank": 6,
22
+ "classify_rank": 20,
23
+ "reasoning_over_numerical_data_rank": 9,
24
+ "multi-document_synthesis_rank": 8,
25
+ "fact_checking_or_attributed_qa_rank": 26,
26
+ "average_rank": 20
27
+ }
temperature=0.0/vicuna-13b-v1.5.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "lmsys/vicuna-7b-v1.5",
3
+ "brainstorm": 0.04090150250417362,
4
+ "open_qa": 0.8137254901960784,
5
+ "closed_qa": 0.26485148514851486,
6
+ "extract": 0.13861386138613863,
7
+ "generation": 0.10166666666666667,
8
+ "rewrite": 0.11928571428571429,
9
+ "summarize": 0.07920792079207921,
10
+ "classify": 0.2562189054726368,
11
+ "reasoning_over_numerical_data": 0.10560747663551401,
12
+ "multi-document_synthesis": 0.05518763796909492,
13
+ "fact_checking_or_attributed_qa": 0.5584415584415584,
14
+ "average": 0.23033711086346997,
15
+ "brainstorm_rank": 19,
16
+ "open_qa_rank": 3,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 16,
20
+ "rewrite_rank": 18,
21
+ "summarize_rank": 15,
22
+ "classify_rank": 14,
23
+ "reasoning_over_numerical_data_rank": 15,
24
+ "multi-document_synthesis_rank": 19,
25
+ "fact_checking_or_attributed_qa_rank": 1,
26
+ "average_rank": 13
27
+ }
temperature=0.0/vicuna-7b-v1.5.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "lmsys/vicuna-7b-v1.5",
3
+ "brainstorm": 0.02921535893155259,
4
+ "open_qa": 0.7549019607843137,
5
+ "closed_qa": 0.20297029702970298,
6
+ "extract": 0.10643564356435645,
7
+ "generation": 0.08166666666666667,
8
+ "rewrite": 0.07,
9
+ "summarize": 0.027227722772277228,
10
+ "classify": 0.24129353233830847,
11
+ "reasoning_over_numerical_data": 0.06728971962616823,
12
+ "multi-document_synthesis": 0.04194260485651214,
13
+ "fact_checking_or_attributed_qa": 0.5281385281385281,
14
+ "average": 0.19555291224621696,
15
+ "brainstorm_rank": 21,
16
+ "open_qa_rank": 3,
17
+ "closed_qa_rank": 3,
18
+ "extract_rank": 1,
19
+ "generation_rank": 20,
20
+ "rewrite_rank": 20,
21
+ "summarize_rank": 15,
22
+ "classify_rank": 14,
23
+ "reasoning_over_numerical_data_rank": 15,
24
+ "multi-document_synthesis_rank": 19,
25
+ "fact_checking_or_attributed_qa_rank": 5,
26
+ "average_rank": 17
27
+ }
temperature=1.0/Llama-2-13b-chat-hf.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "meta-llama/Llama-2-13b-chat-hf",
3
+ "brainstorm": 0.21035058430717865,
4
+ "open_qa": 0.5931372549019608,
5
+ "closed_qa": 0.17574257425742573,
6
+ "extract": 0.13366336633663367,
7
+ "generation": 0.16666666666666666,
8
+ "rewrite": 0.13642857142857143,
9
+ "summarize": 0.18811881188118812,
10
+ "classify": 0.32338308457711445,
11
+ "reasoning_over_numerical_data": 0.12242990654205607,
12
+ "multi-document_synthesis": 0.12141280353200883,
13
+ "fact_checking_or_attributed_qa": 0.4090909090909091,
14
+ "average": 0.23458404850197392,
15
+ "brainstorm_rank": 13,
16
+ "open_qa_rank": 10,
17
+ "closed_qa_rank": 1,
18
+ "extract_rank": 7,
19
+ "generation_rank": 11,
20
+ "rewrite_rank": 13,
21
+ "summarize_rank": 9,
22
+ "classify_rank": 7,
23
+ "reasoning_over_numerical_data_rank": 8,
24
+ "multi-document_synthesis_rank": 14,
25
+ "fact_checking_or_attributed_qa_rank": 7,
26
+ "average_rank": 13
27
+ }
temperature=1.0/Llama-2-70b-chat-hf.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "meta-llama/Llama-2-70b-chat-hf",
3
+ "brainstorm": 0.21786310517529214,
4
+ "open_qa": 0.6617647058823529,
5
+ "closed_qa": 0.3118811881188119,
6
+ "extract": 0.17326732673267325,
7
+ "generation": 0.19749999999999998,
8
+ "rewrite": 0.18285714285714286,
9
+ "summarize": 0.1534653465346535,
10
+ "classify": 0.34328358208955223,
11
+ "reasoning_over_numerical_data": 0.17196261682242991,
12
+ "multi-document_synthesis": 0.16556291390728478,
13
+ "fact_checking_or_attributed_qa": 0.4880952380952381,
14
+ "average": 0.2788639242014029,
15
+ "brainstorm_rank": 13,
16
+ "open_qa_rank": 10,
17
+ "closed_qa_rank": 1,
18
+ "extract_rank": 7,
19
+ "generation_rank": 11,
20
+ "rewrite_rank": 11,
21
+ "summarize_rank": 9,
22
+ "classify_rank": 7,
23
+ "reasoning_over_numerical_data_rank": 8,
24
+ "multi-document_synthesis_rank": 11,
25
+ "fact_checking_or_attributed_qa_rank": 1,
26
+ "average_rank": 10
27
+ }
temperature=1.0/Llama-2-7b-chat-hf.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "meta-llama/Llama-2-7b-chat-hf",
3
+ "brainstorm": 0.17111853088480802,
4
+ "open_qa": 0.47549019607843135,
5
+ "closed_qa": 0.2004950495049505,
6
+ "extract": 0.11881188118811882,
7
+ "generation": 0.1525,
8
+ "rewrite": 0.13071428571428573,
9
+ "summarize": 0.12128712871287128,
10
+ "classify": 0.1865671641791045,
11
+ "reasoning_over_numerical_data": 0.08785046728971962,
12
+ "multi-document_synthesis": 0.10596026490066225,
13
+ "fact_checking_or_attributed_qa": 0.354978354978355,
14
+ "average": 0.1914339384937552,
15
+ "brainstorm_rank": 13,
16
+ "open_qa_rank": 21,
17
+ "closed_qa_rank": 1,
18
+ "extract_rank": 7,
19
+ "generation_rank": 15,
20
+ "rewrite_rank": 13,
21
+ "summarize_rank": 9,
22
+ "classify_rank": 17,
23
+ "reasoning_over_numerical_data_rank": 8,
24
+ "multi-document_synthesis_rank": 14,
25
+ "fact_checking_or_attributed_qa_rank": 12,
26
+ "average_rank": 14
27
+ }
temperature=1.0/Llama-3.1-70B-Instruct.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "meta-llama/Llama-3.1-70B-Instruct",
3
+ "brainstorm": 0.4365609348914858,
4
+ "open_qa": 0.7696078431372548,
5
+ "closed_qa": 0.4084158415841584,
6
+ "extract": 0.3688118811881188,
7
+ "generation": 0.43083333333333335,
8
+ "rewrite": 0.4235714285714286,
9
+ "summarize": 0.4430693069306931,
10
+ "classify": 0.5323383084577115,
11
+ "reasoning_over_numerical_data": 0.45794392523364486,
12
+ "multi-document_synthesis": 0.48123620309050774,
13
+ "fact_checking_or_attributed_qa": 0.49783549783549785,
14
+ "average": 0.47729313675034857,
15
+ "brainstorm_rank": 7,
16
+ "open_qa_rank": 1,
17
+ "closed_qa_rank": 1,
18
+ "extract_rank": 1,
19
+ "generation_rank": 4,
20
+ "rewrite_rank": 3,
21
+ "summarize_rank": 1,
22
+ "classify_rank": 1,
23
+ "reasoning_over_numerical_data_rank": 1,
24
+ "multi-document_synthesis_rank": 4,
25
+ "fact_checking_or_attributed_qa_rank": 1,
26
+ "average_rank": 1
27
+ }
temperature=1.0/Llama-3.1-8B-Instruct.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "meta-llama/Llama-3.1-8B-Instruct",
3
+ "brainstorm": 0.40150250417362265,
4
+ "open_qa": 0.6519607843137255,
5
+ "closed_qa": 0.30445544554455445,
6
+ "extract": 0.25495049504950495,
7
+ "generation": 0.32416666666666666,
8
+ "rewrite": 0.30142857142857143,
9
+ "summarize": 0.2995049504950495,
10
+ "classify": 0.3781094527363184,
11
+ "reasoning_over_numerical_data": 0.23457943925233646,
12
+ "multi-document_synthesis": 0.4105960264900662,
13
+ "fact_checking_or_attributed_qa": 0.31601731601731603,
14
+ "average": 0.3524792411061574,
15
+ "brainstorm_rank": 7,
16
+ "open_qa_rank": 10,
17
+ "closed_qa_rank": 1,
18
+ "extract_rank": 5,
19
+ "generation_rank": 8,
20
+ "rewrite_rank": 5,
21
+ "summarize_rank": 5,
22
+ "classify_rank": 7,
23
+ "reasoning_over_numerical_data_rank": 8,
24
+ "multi-document_synthesis_rank": 6,
25
+ "fact_checking_or_attributed_qa_rank": 15,
26
+ "average_rank": 7
27
+ }
temperature=1.0/Mistral-7B-Instruct-v0.3.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "mistralai/Mistral-7B-Instruct-v0.3",
3
+ "brainstorm": 0.29799666110183637,
4
+ "open_qa": 0.6176470588235294,
5
+ "closed_qa": 0.19554455445544555,
6
+ "extract": 0.13366336633663367,
7
+ "generation": 0.19416666666666665,
8
+ "rewrite": 0.16714285714285712,
9
+ "summarize": 0.16831683168316833,
10
+ "classify": 0.2263681592039801,
11
+ "reasoning_over_numerical_data": 0.18037383177570093,
12
+ "multi-document_synthesis": 0.2847682119205298,
13
+ "fact_checking_or_attributed_qa": 0.4025974025974026,
14
+ "average": 0.26078050924615914,
15
+ "brainstorm_rank": 9,
16
+ "open_qa_rank": 10,
17
+ "closed_qa_rank": 1,
18
+ "extract_rank": 7,
19
+ "generation_rank": 11,
20
+ "rewrite_rank": 11,
21
+ "summarize_rank": 9,
22
+ "classify_rank": 7,
23
+ "reasoning_over_numerical_data_rank": 8,
24
+ "multi-document_synthesis_rank": 9,
25
+ "fact_checking_or_attributed_qa_rank": 7,
26
+ "average_rank": 11
27
+ }
temperature=1.0/Mistral-Large-Instruct-2407.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "mistralai/Mistral-Large-Instruct-2407",
3
+ "brainstorm": 0.5484140233722872,
4
+ "open_qa": 0.6176470588235294,
5
+ "closed_qa": 0.28712871287128716,
6
+ "extract": 0.34653465346534656,
7
+ "generation": 0.5225,
8
+ "rewrite": 0.4985714285714286,
9
+ "summarize": 0.4108910891089109,
10
+ "classify": 0.4577114427860697,
11
+ "reasoning_over_numerical_data": 0.46074766355140184,
12
+ "multi-document_synthesis": 0.5916114790286976,
13
+ "fact_checking_or_attributed_qa": 0.24025974025974026,
14
+ "average": 0.4529106628944273,
15
+ "brainstorm_rank": 1,
16
+ "open_qa_rank": 10,
17
+ "closed_qa_rank": 1,
18
+ "extract_rank": 2,
19
+ "generation_rank": 1,
20
+ "rewrite_rank": 1,
21
+ "summarize_rank": 1,
22
+ "classify_rank": 1,
23
+ "reasoning_over_numerical_data_rank": 1,
24
+ "multi-document_synthesis_rank": 1,
25
+ "fact_checking_or_attributed_qa_rank": 19,
26
+ "average_rank": 2
27
+ }
temperature=1.0/Mistral-Small-Instruct-2409.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "mistralai/Mistral-Small-Instruct-2409",
3
+ "brainstorm": 0.49248747913188645,
4
+ "open_qa": 0.6029411764705882,
5
+ "closed_qa": 0.30198019801980197,
6
+ "extract": 0.23514851485148514,
7
+ "generation": 0.435,
8
+ "rewrite": 0.40785714285714286,
9
+ "summarize": 0.36633663366336633,
10
+ "classify": 0.3706467661691542,
11
+ "reasoning_over_numerical_data": 0.40654205607476634,
12
+ "multi-document_synthesis": 0.5640176600441501,
13
+ "fact_checking_or_attributed_qa": 0.22510822510822512,
14
+ "average": 0.4007332593082333,
15
+ "brainstorm_rank": 4,
16
+ "open_qa_rank": 10,
17
+ "closed_qa_rank": 1,
18
+ "extract_rank": 7,
19
+ "generation_rank": 2,
20
+ "rewrite_rank": 3,
21
+ "summarize_rank": 1,
22
+ "classify_rank": 7,
23
+ "reasoning_over_numerical_data_rank": 1,
24
+ "multi-document_synthesis_rank": 3,
25
+ "fact_checking_or_attributed_qa_rank": 19,
26
+ "average_rank": 4
27
+ }
temperature=1.0/OLMo-7B-0724-Instruct-hf.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "allenai/OLMo-7B-SFT",
3
+ "brainstorm": 0.0651085141903172,
4
+ "open_qa": 0.17647058823529413,
5
+ "closed_qa": 0.039603960396039604,
6
+ "extract": 0.03712871287128713,
7
+ "generation": 0.04,
8
+ "rewrite": 0.032857142857142856,
9
+ "summarize": 0.04950495049504951,
10
+ "classify": 0.07960199004975124,
11
+ "reasoning_over_numerical_data": 0.04953271028037383,
12
+ "multi-document_synthesis": 0.0728476821192053,
13
+ "fact_checking_or_attributed_qa": 0.17316017316017315,
14
+ "average": 0.07416512951405764,
15
+ "brainstorm_rank": 20,
16
+ "open_qa_rank": 26,
17
+ "closed_qa_rank": 1,
18
+ "extract_rank": 7,
19
+ "generation_rank": 21,
20
+ "rewrite_rank": 21,
21
+ "summarize_rank": 20,
22
+ "classify_rank": 23,
23
+ "reasoning_over_numerical_data_rank": 17,
24
+ "multi-document_synthesis_rank": 18,
25
+ "fact_checking_or_attributed_qa_rank": 24,
26
+ "average_rank": 27
27
+ }
temperature=1.0/OLMo-7B-SFT-hf.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "allenai/OLMo-7B-SFT",
3
+ "brainstorm": 0.011686143572621035,
4
+ "open_qa": 0.7205882352941176,
5
+ "closed_qa": 0.13861386138613863,
6
+ "extract": 0.04455445544554455,
7
+ "generation": 0.04833333333333333,
8
+ "rewrite": 0.024285714285714285,
9
+ "summarize": 0.012376237623762377,
10
+ "classify": 0.17661691542288557,
11
+ "reasoning_over_numerical_data": 0.03831775700934579,
12
+ "multi-document_synthesis": 0.02869757174392936,
13
+ "fact_checking_or_attributed_qa": 0.4393939393939394,
14
+ "average": 0.15304219677375744,
15
+ "brainstorm_rank": 22,
16
+ "open_qa_rank": 1,
17
+ "closed_qa_rank": 1,
18
+ "extract_rank": 7,
19
+ "generation_rank": 21,
20
+ "rewrite_rank": 21,
21
+ "summarize_rank": 22,
22
+ "classify_rank": 18,
23
+ "reasoning_over_numerical_data_rank": 17,
24
+ "multi-document_synthesis_rank": 22,
25
+ "fact_checking_or_attributed_qa_rank": 7,
26
+ "average_rank": 20
27
+ }
temperature=1.0/Phi-3-medium-4k-instruct.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "microsoft/Phi-3-medium-4k-instruct",
3
+ "brainstorm": 0.31969949916527546,
4
+ "open_qa": 0.75,
5
+ "closed_qa": 0.2747524752475248,
6
+ "extract": 0.16336633663366337,
7
+ "generation": 0.25,
8
+ "rewrite": 0.20785714285714288,
9
+ "summarize": 0.11386138613861387,
10
+ "classify": 0.38557213930348255,
11
+ "reasoning_over_numerical_data": 0.34018691588785044,
12
+ "multi-document_synthesis": 0.2119205298013245,
13
+ "fact_checking_or_attributed_qa": 0.48917748917748916,
14
+ "average": 0.31876308311021523,
15
+ "brainstorm_rank": 9,
16
+ "open_qa_rank": 1,
17
+ "closed_qa_rank": 1,
18
+ "extract_rank": 7,
19
+ "generation_rank": 9,
20
+ "rewrite_rank": 9,
21
+ "summarize_rank": 9,
22
+ "classify_rank": 4,
23
+ "reasoning_over_numerical_data_rank": 1,
24
+ "multi-document_synthesis_rank": 11,
25
+ "fact_checking_or_attributed_qa_rank": 1,
26
+ "average_rank": 9
27
+ }
temperature=1.0/Qwen1.5-110B-Chat.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "Qwen/Qwen1.5-110B-Chat",
3
+ "brainstorm": 0.4524207011686143,
4
+ "open_qa": 0.7745098039215687,
5
+ "closed_qa": 0.27970297029702973,
6
+ "extract": 0.25495049504950495,
7
+ "generation": 0.3675,
8
+ "rewrite": 0.3214285714285714,
9
+ "summarize": 0.2995049504950495,
10
+ "classify": 0.4129353233830846,
11
+ "reasoning_over_numerical_data": 0.3747663551401869,
12
+ "multi-document_synthesis": 0.4900662251655629,
13
+ "fact_checking_or_attributed_qa": 0.3463203463203463,
14
+ "average": 0.39764597657904716,
15
+ "brainstorm_rank": 5,
16
+ "open_qa_rank": 1,
17
+ "closed_qa_rank": 1,
18
+ "extract_rank": 6,
19
+ "generation_rank": 5,
20
+ "rewrite_rank": 5,
21
+ "summarize_rank": 6,
22
+ "classify_rank": 4,
23
+ "reasoning_over_numerical_data_rank": 1,
24
+ "multi-document_synthesis_rank": 4,
25
+ "fact_checking_or_attributed_qa_rank": 12,
26
+ "average_rank": 4
27
+ }
temperature=1.0/Qwen2-72B-Instruct.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "Qwen/Qwen2-72B-Instruct",
3
+ "brainstorm": 0.4540901502504174,
4
+ "open_qa": 0.7401960784313725,
5
+ "closed_qa": 0.25742574257425743,
6
+ "extract": 0.2747524752475248,
7
+ "generation": 0.34,
8
+ "rewrite": 0.3264285714285714,
9
+ "summarize": 0.2202970297029703,
10
+ "classify": 0.3930348258706468,
11
+ "reasoning_over_numerical_data": 0.3981308411214953,
12
+ "multi-document_synthesis": 0.34216335540838855,
13
+ "fact_checking_or_attributed_qa": 0.3874458874458874,
14
+ "average": 0.3758149961346847,
15
+ "brainstorm_rank": 5,
16
+ "open_qa_rank": 1,
17
+ "closed_qa_rank": 1,
18
+ "extract_rank": 3,
19
+ "generation_rank": 5,
20
+ "rewrite_rank": 5,
21
+ "summarize_rank": 7,
22
+ "classify_rank": 4,
23
+ "reasoning_over_numerical_data_rank": 1,
24
+ "multi-document_synthesis_rank": 8,
25
+ "fact_checking_or_attributed_qa_rank": 7,
26
+ "average_rank": 6
27
+ }
temperature=1.0/Qwen2.5-72B-Instruct.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "Qwen/Qwen2.5-72B-Instruct",
3
+ "brainstorm": 0.5233722871452421,
4
+ "open_qa": 0.642156862745098,
5
+ "closed_qa": 0.2920792079207921,
6
+ "extract": 0.3341584158415841,
7
+ "generation": 0.47,
8
+ "rewrite": 0.45285714285714285,
9
+ "summarize": 0.36386138613861385,
10
+ "classify": 0.42039800995024873,
11
+ "reasoning_over_numerical_data": 0.4542056074766355,
12
+ "multi-document_synthesis": 0.5739514348785872,
13
+ "fact_checking_or_attributed_qa": 0.22510822510822512,
14
+ "average": 0.432013507278379,
15
+ "brainstorm_rank": 1,
16
+ "open_qa_rank": 10,
17
+ "closed_qa_rank": 1,
18
+ "extract_rank": 3,
19
+ "generation_rank": 2,
20
+ "rewrite_rank": 2,
21
+ "summarize_rank": 1,
22
+ "classify_rank": 1,
23
+ "reasoning_over_numerical_data_rank": 1,
24
+ "multi-document_synthesis_rank": 1,
25
+ "fact_checking_or_attributed_qa_rank": 19,
26
+ "average_rank": 2
27
+ }
temperature=1.0/WizardLM-13B-V1.2.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "WizardLMTeam/WizardLM-13B-V1.2",
3
+ "brainstorm": 0.1894824707846411,
4
+ "open_qa": 0.5,
5
+ "closed_qa": 0.1188118811881188,
6
+ "extract": 0.08415841584158416,
7
+ "generation": 0.165,
8
+ "rewrite": 0.13714285714285712,
9
+ "summarize": 0.07178217821782179,
10
+ "classify": 0.21641791044776118,
11
+ "reasoning_over_numerical_data": 0.07757009345794391,
12
+ "multi-document_synthesis": 0.10375275938189846,
13
+ "fact_checking_or_attributed_qa": 0.30735930735930733,
14
+ "average": 0.17922526125653945,
15
+ "brainstorm_rank": 13,
16
+ "open_qa_rank": 21,
17
+ "closed_qa_rank": 1,
18
+ "extract_rank": 7,
19
+ "generation_rank": 11,
20
+ "rewrite_rank": 13,
21
+ "summarize_rank": 9,
22
+ "classify_rank": 7,
23
+ "reasoning_over_numerical_data_rank": 17,
24
+ "multi-document_synthesis_rank": 14,
25
+ "fact_checking_or_attributed_qa_rank": 15,
26
+ "average_rank": 14
27
+ }
temperature=1.0/Yi-1.5-34B-Chat.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "01-ai/Yi-1.5-34B-Chat",
3
+ "brainstorm": 0.5091819699499165,
4
+ "open_qa": 0.5931372549019608,
5
+ "closed_qa": 0.24257425742574257,
6
+ "extract": 0.19306930693069307,
7
+ "generation": 0.36083333333333334,
8
+ "rewrite": 0.30428571428571427,
9
+ "summarize": 0.20792079207920794,
10
+ "classify": 0.291044776119403,
11
+ "reasoning_over_numerical_data": 0.2906542056074766,
12
+ "multi-document_synthesis": 0.3863134657836645,
13
+ "fact_checking_or_attributed_qa": 0.2683982683982684,
14
+ "average": 0.3315830313468528,
15
+ "brainstorm_rank": 1,
16
+ "open_qa_rank": 10,
17
+ "closed_qa_rank": 1,
18
+ "extract_rank": 7,
19
+ "generation_rank": 5,
20
+ "rewrite_rank": 5,
21
+ "summarize_rank": 7,
22
+ "classify_rank": 7,
23
+ "reasoning_over_numerical_data_rank": 8,
24
+ "multi-document_synthesis_rank": 6,
25
+ "fact_checking_or_attributed_qa_rank": 19,
26
+ "average_rank": 8
27
+ }
temperature=1.0/dolly-v2-12b.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "databricks/dolly-v2-7b",
3
+ "brainstorm": 0.0,
4
+ "open_qa": 0.029411764705882353,
5
+ "closed_qa": 0.0,
6
+ "extract": 0.0,
7
+ "generation": 0.006666666666666667,
8
+ "rewrite": 0.0014285714285714286,
9
+ "summarize": 0.0,
10
+ "classify": 0.004975124378109453,
11
+ "reasoning_over_numerical_data": 0.007476635514018692,
12
+ "multi-document_synthesis": 0.008830022075055188,
13
+ "fact_checking_or_attributed_qa": 0.017316017316017316,
14
+ "average": 0.006918618371301918,
15
+ "brainstorm_rank": 28,
16
+ "open_qa_rank": 28,
17
+ "closed_qa_rank": 27,
18
+ "extract_rank": 7,
19
+ "generation_rank": 27,
20
+ "rewrite_rank": 27,
21
+ "summarize_rank": 22,
22
+ "classify_rank": 23,
23
+ "reasoning_over_numerical_data_rank": 28,
24
+ "multi-document_synthesis_rank": 25,
25
+ "fact_checking_or_attributed_qa_rank": 28,
26
+ "average_rank": 29
27
+ }
temperature=1.0/dolly-v2-7b.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "databricks/dolly-v2-12b",
3
+ "brainstorm": 0.0,
4
+ "open_qa": 0.058823529411764705,
5
+ "closed_qa": 0.0,
6
+ "extract": 0.0,
7
+ "generation": 0.008333333333333333,
8
+ "rewrite": 0.0014285714285714286,
9
+ "summarize": 0.0,
10
+ "classify": 0.014925373134328358,
11
+ "reasoning_over_numerical_data": 0.019626168224299065,
12
+ "multi-document_synthesis": 0.008830022075055188,
13
+ "fact_checking_or_attributed_qa": 0.021645021645021644,
14
+ "average": 0.012146547204761248,
15
+ "brainstorm_rank": 28,
16
+ "open_qa_rank": 28,
17
+ "closed_qa_rank": 27,
18
+ "extract_rank": 7,
19
+ "generation_rank": 27,
20
+ "rewrite_rank": 27,
21
+ "summarize_rank": 22,
22
+ "classify_rank": 23,
23
+ "reasoning_over_numerical_data_rank": 28,
24
+ "multi-document_synthesis_rank": 25,
25
+ "fact_checking_or_attributed_qa_rank": 28,
26
+ "average_rank": 28
27
+ }
temperature=1.0/gpt4all-13b-snoozy.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "nomic-ai/gpt4all-13b-snoozy",
3
+ "brainstorm": 0.007512520868113523,
4
+ "open_qa": 0.7696078431372548,
5
+ "closed_qa": 0.01485148514851485,
6
+ "extract": 0.022277227722772276,
7
+ "generation": 0.043333333333333335,
8
+ "rewrite": 0.02857142857142857,
9
+ "summarize": 0.0024752475247524753,
10
+ "classify": 0.04477611940298507,
11
+ "reasoning_over_numerical_data": 0.04299065420560748,
12
+ "multi-document_synthesis": 0.01545253863134658,
13
+ "fact_checking_or_attributed_qa": 0.20995670995670995,
14
+ "average": 0.10925500986389264,
15
+ "brainstorm_rank": 22,
16
+ "open_qa_rank": 1,
17
+ "closed_qa_rank": 27,
18
+ "extract_rank": 7,
19
+ "generation_rank": 21,
20
+ "rewrite_rank": 21,
21
+ "summarize_rank": 22,
22
+ "classify_rank": 23,
23
+ "reasoning_over_numerical_data_rank": 17,
24
+ "multi-document_synthesis_rank": 25,
25
+ "fact_checking_or_attributed_qa_rank": 24,
26
+ "average_rank": 23
27
+ }
temperature=1.0/koala-13B-HF.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "TheBloke/koala-13B-HF",
3
+ "brainstorm": 0.009181969949916527,
4
+ "open_qa": 0.49019607843137253,
5
+ "closed_qa": 0.06435643564356436,
6
+ "extract": 0.027227722772277228,
7
+ "generation": 0.045,
8
+ "rewrite": 0.03142857142857143,
9
+ "summarize": 0.012376237623762377,
10
+ "classify": 0.09950248756218906,
11
+ "reasoning_over_numerical_data": 0.03177570093457944,
12
+ "multi-document_synthesis": 0.024282560706401765,
13
+ "fact_checking_or_attributed_qa": 0.3116883116883117,
14
+ "average": 0.10427418879463148,
15
+ "brainstorm_rank": 22,
16
+ "open_qa_rank": 21,
17
+ "closed_qa_rank": 1,
18
+ "extract_rank": 7,
19
+ "generation_rank": 21,
20
+ "rewrite_rank": 21,
21
+ "summarize_rank": 22,
22
+ "classify_rank": 18,
23
+ "reasoning_over_numerical_data_rank": 17,
24
+ "multi-document_synthesis_rank": 22,
25
+ "fact_checking_or_attributed_qa_rank": 15,
26
+ "average_rank": 24
27
+ }
temperature=1.0/koala-7B-HF.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "path": "TheBloke/koala-7B-HF",
3
+ "brainstorm": 0.00667779632721202,
4
+ "open_qa": 0.5980392156862745,
5
+ "closed_qa": 0.01485148514851485,
6
+ "extract": 0.022277227722772276,
7
+ "generation": 0.02666666666666667,
8
+ "rewrite": 0.02142857142857143,
9
+ "summarize": 0.0,
10
+ "classify": 0.03980099502487562,
11
+ "reasoning_over_numerical_data": 0.028037383177570093,
12
+ "multi-document_synthesis": 0.017660044150110375,
13
+ "fact_checking_or_attributed_qa": 0.2683982683982684,
14
+ "average": 0.09489433215734876,
15
+ "brainstorm_rank": 22,
16
+ "open_qa_rank": 10,
17
+ "closed_qa_rank": 1,
18
+ "extract_rank": 7,
19
+ "generation_rank": 26,
20
+ "rewrite_rank": 21,
21
+ "summarize_rank": 22,
22
+ "classify_rank": 23,
23
+ "reasoning_over_numerical_data_rank": 17,
24
+ "multi-document_synthesis_rank": 22,
25
+ "fact_checking_or_attributed_qa_rank": 19,
26
+ "average_rank": 25
27
+ }