href_results / temperature=0.0 /Llama-3.1-Tulu-3-70B-SFT.json
alrope's picture
Upload folder using huggingface_hub
7b20e33 verified
{
"path": "allenai/Llama-3.1-Tulu-3-70B-SFT",
"brainstorm": 0.028,
"open_qa": 0.951,
"closed_qa": 0.376,
"extract": 0.282,
"generation": 0.13,
"rewrite": 0.117,
"summarize": 0.04,
"classify": 0.366,
"reasoning_over_numerical_data": 0.353,
"multi-document_synthesis": 0.105,
"fact_checking_or_attributed_qa": 0.604,
"average": 0.2358,
"brainstorm_rank": 25,
"open_qa_rank": 1,
"closed_qa_rank": 2,
"extract_rank": 7,
"generation_rank": 19,
"rewrite_rank": 19,
"summarize_rank": 21,
"classify_rank": 9,
"reasoning_over_numerical_data_rank": 5,
"multi-document_synthesis_rank": 20,
"fact_checking_or_attributed_qa_rank": 1,
"average_rank": 17,
"brainstorm_confi": "+1.3 / -1.3",
"open_qa_confi": "+3.4 / -4.4",
"closed_qa_confi": "+6.2 / -6.2",
"extract_confi": "+5.4 / -5.2",
"generation_confi": "+2.7 / -2.7",
"rewrite_confi": "+2.4 / -2.4",
"summarize_confi": "+2.7 / -2.2",
"classify_confi": "+6.7 / -6.2",
"reasoning_over_numerical_data_confi": "+3.8 / -3.9",
"multi-document_synthesis_confi": "+3.0 / -2.8",
"fact_checking_or_attributed_qa_confi": "+4.3 / -4.5",
"average_confi": "+1.28 / -1.21"
}