href_results / temperature=1.0 /Llama-3.1-8B-Instruct.json
alrope's picture
Upload folder using huggingface_hub
f5869f0 verified
raw
history blame
1.26 kB
{
"path": "meta-llama/Llama-3.1-8B-Instruct",
"brainstorm": 0.402,
"open_qa": 0.652,
"closed_qa": 0.304,
"extract": 0.255,
"generation": 0.324,
"rewrite": 0.301,
"summarize": 0.3,
"classify": 0.378,
"reasoning_over_numerical_data": 0.235,
"multi-document_synthesis": 0.411,
"fact_checking_or_attributed_qa": 0.316,
"average": 0.3334,
"brainstorm_rank": 5,
"open_qa_rank": 1,
"closed_qa_rank": 1,
"extract_rank": 5,
"generation_rank": 7,
"rewrite_rank": 5,
"summarize_rank": 5,
"classify_rank": 5,
"reasoning_over_numerical_data_rank": 8,
"multi-document_synthesis_rank": 4,
"fact_checking_or_attributed_qa_rank": 16,
"average_rank": 8,
"brainstorm_confi": "+3.92 / -4.01",
"open_qa_confi": "+9.31 / -9.31",
"closed_qa_confi": "+5.94 / -5.94",
"extract_confi": "+5.69 / -5.20",
"generation_confi": "+3.83 / -3.75",
"rewrite_confi": "+3.57 / -3.43",
"summarize_confi": "+5.94 / -6.19",
"classify_confi": "+6.22 / -5.97",
"reasoning_over_numerical_data_confi": "+3.27 / -3.36",
"multi-document_synthesis_confi": "+4.64 / -4.64",
"fact_checking_or_attributed_qa_confi": "+4.33 / -4.11",
"average_confi": "+1.4 / - 1.4"
}