href_results / temperature=1.0 /Phi-3-medium-4k-instruct.json
alrope's picture
Upload folder using huggingface_hub
f5869f0 verified
raw
history blame
1.26 kB
{
"path": "microsoft/Phi-3-medium-4k-instruct",
"brainstorm": 0.32,
"open_qa": 0.75,
"closed_qa": 0.275,
"extract": 0.163,
"generation": 0.25,
"rewrite": 0.208,
"summarize": 0.114,
"classify": 0.386,
"reasoning_over_numerical_data": 0.34,
"multi-document_synthesis": 0.212,
"fact_checking_or_attributed_qa": 0.489,
"average": 0.2951,
"brainstorm_rank": 9,
"open_qa_rank": 1,
"closed_qa_rank": 6,
"extract_rank": 5,
"generation_rank": 9,
"rewrite_rank": 9,
"summarize_rank": 10,
"classify_rank": 5,
"reasoning_over_numerical_data_rank": 6,
"multi-document_synthesis_rank": 9,
"fact_checking_or_attributed_qa_rank": 1,
"average_rank": 9,
"brainstorm_confi": "+3.76 / -3.76",
"open_qa_confi": "+7.84 / -8.33",
"closed_qa_confi": "+5.69 / -5.45",
"extract_confi": "+4.70 / -4.46",
"generation_confi": "+3.67 / -3.33",
"rewrite_confi": "+3.00 / -2.86",
"summarize_confi": "+4.46 / -4.21",
"classify_confi": "+6.47 / -6.47",
"reasoning_over_numerical_data_confi": "+3.64 / -3.64",
"multi-document_synthesis_confi": "+3.97 / -3.53",
"fact_checking_or_attributed_qa_confi": "+4.55 / -4.76",
"average_confi": "+1.4 / - 1.4"
}