href_results / temperature=0.0 /oasst-sft-1-pythia-12b.json
alrope's picture
Upload folder using huggingface_hub
7b20e33 verified
{
"path": "OpenAssistant/oasst-sft-1-pythia-12b",
"brainstorm": 0.002,
"open_qa": 0.623,
"closed_qa": 0.02,
"extract": 0.015,
"generation": 0.017,
"rewrite": 0.006,
"summarize": 0.0,
"classify": 0.032,
"reasoning_over_numerical_data": 0.024,
"multi-document_synthesis": 0.009,
"fact_checking_or_attributed_qa": 0.232,
"average": 0.0507,
"brainstorm_rank": 36,
"open_qa_rank": 21,
"closed_qa_rank": 34,
"extract_rank": 36,
"generation_rank": 36,
"rewrite_rank": 35,
"summarize_rank": 34,
"classify_rank": 34,
"reasoning_over_numerical_data_rank": 36,
"multi-document_synthesis_rank": 31,
"fact_checking_or_attributed_qa_rank": 25,
"average_rank": 37,
"brainstorm_confi": "+0.3 / -0.2",
"open_qa_confi": "+9.3 / -9.3",
"closed_qa_confi": "+2.0 / -1.5",
"extract_confi": "+1.7 / -1.2",
"generation_confi": "+1.2 / -1.0",
"rewrite_confi": "+0.6 / -0.4",
"summarize_confi": "+0.0 / -0.0",
"classify_confi": "+2.5 / -2.2",
"reasoning_over_numerical_data_confi": "+1.3 / -1.1",
"multi-document_synthesis_confi": "+0.9 / -0.7",
"fact_checking_or_attributed_qa_confi": "+3.9 / -3.7",
"average_confi": "+0.67 / -0.62"
}