|
{ |
|
"path": "mistralai/Mistral-7B-Instruct-v0.3", |
|
"brainstorm": 0.298, |
|
"open_qa": 0.618, |
|
"closed_qa": 0.196, |
|
"extract": 0.134, |
|
"generation": 0.194, |
|
"rewrite": 0.167, |
|
"summarize": 0.168, |
|
"classify": 0.226, |
|
"reasoning_over_numerical_data": 0.18, |
|
"multi-document_synthesis": 0.285, |
|
"fact_checking_or_attributed_qa": 0.403, |
|
"average": 0.2425, |
|
"brainstorm_rank": 9, |
|
"open_qa_rank": 1, |
|
"closed_qa_rank": 6, |
|
"extract_rank": 12, |
|
"generation_rank": 9, |
|
"rewrite_rank": 9, |
|
"summarize_rank": 10, |
|
"classify_rank": 13, |
|
"reasoning_over_numerical_data_rank": 10, |
|
"multi-document_synthesis_rank": 9, |
|
"fact_checking_or_attributed_qa_rank": 9, |
|
"average_rank": 10, |
|
"brainstorm_confi": "+3.76 / -3.59", |
|
"open_qa_confi": "+9.80 / -9.80", |
|
"closed_qa_confi": "+5.20 / -4.95", |
|
"extract_confi": "+4.70 / -4.21", |
|
"generation_confi": "+3.08 / -3.09", |
|
"rewrite_confi": "+2.86 / -2.64", |
|
"summarize_confi": "+5.20 / -4.95", |
|
"classify_confi": "+5.97 / -5.47", |
|
"reasoning_over_numerical_data_confi": "+3.18 / -2.99", |
|
"multi-document_synthesis_confi": "+4.19 / -3.97", |
|
"fact_checking_or_attributed_qa_confi": "+4.55 / -4.76", |
|
"average_confi": "+1.3 / - 1.3" |
|
} |