href_results / temperature=1.0 /vicuna-7b-v1.5.json
alrope's picture
Upload folder using huggingface_hub
f5869f0 verified
raw
history blame
1.26 kB
{
"path": "lmsys/vicuna-7b-v1.5",
"brainstorm": 0.055,
"open_qa": 0.696,
"closed_qa": 0.131,
"extract": 0.077,
"generation": 0.09,
"rewrite": 0.081,
"summarize": 0.042,
"classify": 0.172,
"reasoning_over_numerical_data": 0.058,
"multi-document_synthesis": 0.064,
"fact_checking_or_attributed_qa": 0.468,
"average": 0.1352,
"brainstorm_rank": 20,
"open_qa_rank": 1,
"closed_qa_rank": 14,
"extract_rank": 12,
"generation_rank": 19,
"rewrite_rank": 19,
"summarize_rank": 19,
"classify_rank": 13,
"reasoning_over_numerical_data_rank": 19,
"multi-document_synthesis_rank": 19,
"fact_checking_or_attributed_qa_rank": 1,
"average_rank": 19,
"brainstorm_confi": "+1.84 / -1.75",
"open_qa_confi": "+8.82 / -9.80",
"closed_qa_confi": "+4.46 / -3.97",
"extract_confi": "+3.47 / -3.22",
"generation_confi": "+2.33 / -2.17",
"rewrite_confi": "+2.07 / -2.00",
"summarize_confi": "+2.72 / -2.48",
"classify_confi": "+5.22 / -4.98",
"reasoning_over_numerical_data_confi": "+2.06 / -1.87",
"multi-document_synthesis_confi": "+2.43 / -2.21",
"fact_checking_or_attributed_qa_confi": "+4.55 / -4.55",
"average_confi": "+1.0 / - 1.0"
}