Commit
·
88cd373
1
Parent(s):
e7f98af
Round evaluation metrics for improved precision in scoring results
Browse files
medvqa/submission_samples/gi-2025/submission_task1.py
CHANGED
@@ -110,11 +110,12 @@ preds = [pred['answer'] for pred in predictions]
|
|
110 |
bleu_result = bleu.compute(predictions=preds, references=references)
|
111 |
rouge_result = rouge.compute(predictions=preds, references=references)
|
112 |
meteor_result = meteor.compute(predictions=preds, references=references)
|
113 |
-
bleu_score = bleu_result['bleu']
|
114 |
-
rouge1_score = float(rouge_result['rouge1'])
|
115 |
-
rouge2_score = float(rouge_result['rouge2'])
|
116 |
-
rougeL_score = float(rouge_result['rougeL'])
|
117 |
-
meteor_score = float(meteor_result['meteor'])
|
|
|
118 |
public_scores = {
|
119 |
'bleu': bleu_score,
|
120 |
'rouge1': rouge1_score,
|
|
|
110 |
bleu_result = bleu.compute(predictions=preds, references=references)
|
111 |
rouge_result = rouge.compute(predictions=preds, references=references)
|
112 |
meteor_result = meteor.compute(predictions=preds, references=references)
|
113 |
+
bleu_score = round(bleu_result['bleu'], 2)
|
114 |
+
rouge1_score = round(float(rouge_result['rouge1']), 2)
|
115 |
+
rouge2_score = round(float(rouge_result['rouge2']), 2)
|
116 |
+
rougeL_score = round(float(rouge_result['rougeL']), 2)
|
117 |
+
meteor_score = round(float(meteor_result['meteor']), 2)
|
118 |
+
|
119 |
public_scores = {
|
120 |
'bleu': bleu_score,
|
121 |
'rouge1': rouge1_score,
|