href_results / temperature=1.0 /WizardLM-13B-V1.2.json
alrope's picture
Upload folder using huggingface_hub
f5869f0 verified
raw
history blame
1.27 kB
{
"path": "WizardLMTeam/WizardLM-13B-V1.2",
"brainstorm": 0.189,
"open_qa": 0.5,
"closed_qa": 0.119,
"extract": 0.084,
"generation": 0.165,
"rewrite": 0.137,
"summarize": 0.072,
"classify": 0.216,
"reasoning_over_numerical_data": 0.078,
"multi-document_synthesis": 0.104,
"fact_checking_or_attributed_qa": 0.307,
"average": 0.1618,
"brainstorm_rank": 13,
"open_qa_rank": 18,
"closed_qa_rank": 14,
"extract_rank": 12,
"generation_rank": 13,
"rewrite_rank": 13,
"summarize_rank": 19,
"classify_rank": 13,
"reasoning_over_numerical_data_rank": 13,
"multi-document_synthesis_rank": 14,
"fact_checking_or_attributed_qa_rank": 16,
"average_rank": 15,
"brainstorm_confi": "+3.17 / -3.09",
"open_qa_confi": "+9.80 / -9.80",
"closed_qa_confi": "+4.21 / -4.21",
"extract_confi": "+3.72 / -3.47",
"generation_confi": "+3.00 / -3.00",
"rewrite_confi": "+2.64 / -2.50",
"summarize_confi": "+3.71 / -3.22",
"classify_confi": "+5.47 / -5.22",
"reasoning_over_numerical_data_confi": "+2.24 / -2.06",
"multi-document_synthesis_confi": "+2.87 / -2.87",
"fact_checking_or_attributed_qa_confi": "+4.33 / -4.11",
"average_confi": "+1.1 / - 1.1"
}