|
{ |
|
"path": "01-ai/Yi-1.5-34B-Chat", |
|
"brainstorm": 0.509, |
|
"open_qa": 0.593, |
|
"closed_qa": 0.243, |
|
"extract": 0.193, |
|
"generation": 0.361, |
|
"rewrite": 0.304, |
|
"summarize": 0.208, |
|
"classify": 0.291, |
|
"reasoning_over_numerical_data": 0.291, |
|
"multi-document_synthesis": 0.386, |
|
"fact_checking_or_attributed_qa": 0.268, |
|
"average": 0.3377, |
|
"brainstorm_rank": 1, |
|
"open_qa_rank": 18, |
|
"closed_qa_rank": 6, |
|
"extract_rank": 5, |
|
"generation_rank": 3, |
|
"rewrite_rank": 5, |
|
"summarize_rank": 5, |
|
"classify_rank": 5, |
|
"reasoning_over_numerical_data_rank": 8, |
|
"multi-document_synthesis_rank": 7, |
|
"fact_checking_or_attributed_qa_rank": 16, |
|
"average_rank": 6, |
|
"brainstorm_confi": "+3.92 / -4.01", |
|
"open_qa_confi": "+9.80 / -9.80", |
|
"closed_qa_confi": "+5.69 / -5.45", |
|
"extract_confi": "+5.20 / -4.95", |
|
"generation_confi": "+3.83 / -3.83", |
|
"rewrite_confi": "+3.50 / -3.36", |
|
"summarize_confi": "+5.69 / -5.45", |
|
"classify_confi": "+5.72 / -5.97", |
|
"reasoning_over_numerical_data_confi": "+3.55 / -3.55", |
|
"multi-document_synthesis_confi": "+4.42 / -4.30", |
|
"fact_checking_or_attributed_qa_confi": "+4.11 / -3.90", |
|
"average_confi": "+1.4 / - 1.4" |
|
} |