t5-small-tweetqa-qag / eval /metric.first.answer.paragraph.questions_answers.lmqg_qag_tweetqa.default.json
asahi417's picture
add model
16d07d9
raw
history blame
1.17 kB
{"validation": {"Bleu_1": 0.3345070422535018, "Bleu_2": 0.21544505942559114, "Bleu_3": 0.14314309581452783, "Bleu_4": 0.09683893632934976, "METEOR": 0.30224984331474797, "ROUGE_L": 0.35365280171487196, "BERTScore": 0.8922636171556869, "MoverScore": 0.6050952366700484, "QAAlignedF1Score (BERTScore)": 0.9052095424849287, "QAAlignedF1Score (MoverScore)": 0.6315811777418272, "QAAlignedRecall (BERTScore)": 0.9005212476844444, "QAAlignedPrecision (BERTScore)": 0.9100775211589414, "QAAlignedRecall (MoverScore)": 0.6217389911113362, "QAAlignedPrecision (MoverScore)": 0.6424459558351244}, "test": {"Bleu_1": 0.355266164039276, "Bleu_2": 0.22935230967802653, "Bleu_3": 0.15106487659216425, "Bleu_4": 0.10080358110819482, "METEOR": 0.28019855592470416, "ROUGE_L": 0.34193464058970124, "BERTScore": 0.8964198049713776, "MoverScore": 0.6047135052650878, "QAAlignedF1Score (BERTScore)": 0.9142303181239072, "QAAlignedF1Score (MoverScore)": 0.6307767033392071, "QAAlignedRecall (BERTScore)": 0.9097622335862658, "QAAlignedPrecision (BERTScore)": 0.9188558514660737, "QAAlignedRecall (MoverScore)": 0.6215505466446392, "QAAlignedPrecision (MoverScore)": 0.6408362894345216}}