mt5-base-esquad-qag / eval /metric.first.answer.paragraph.questions_answers.lmqg_qag_esquad.default.json
asahi417's picture
model update
1687dfe
raw
history blame
1.16 kB
{"validation": {"Bleu_1": 0.290410134310789, "Bleu_2": 0.165502229998739, "Bleu_3": 0.09848741138745379, "Bleu_4": 0.06646403182008429, "METEOR": 0.21098897650961085, "ROUGE_L": 0.25289328543314055, "BERTScore": 0.739177295160455, "MoverScore": 0.5240828055144467, "QAAlignedF1Score (BERTScore)": 0.815034062215664, "QAAlignedRecall (BERTScore)": 0.7926918455339421, "QAAlignedPrecision (BERTScore)": 0.8397426666823765, "QAAlignedF1Score (MoverScore)": 0.5631614868359288, "QAAlignedRecall (MoverScore)": 0.5433533140367633, "QAAlignedPrecision (MoverScore)": 0.586108974589693}, "test": {"Bleu_1": 0.07904094156389288, "Bleu_2": 0.03812578203969263, "Bleu_3": 0.02271639512237187, "Bleu_4": 0.015312801666172975, "METEOR": 0.17562136808176335, "ROUGE_L": 0.13300750909719405, "BERTScore": 0.6468604413092586, "MoverScore": 0.5034427086240993, "QAAlignedF1Score (BERTScore)": 0.7895520071597255, "QAAlignedRecall (BERTScore)": 0.7930640663081466, "QAAlignedPrecision (BERTScore)": 0.7865682015086454, "QAAlignedF1Score (MoverScore)": 0.5430042564379615, "QAAlignedRecall (MoverScore)": 0.544242489751627, "QAAlignedPrecision (MoverScore)": 0.5421227444914386}}